text
stringlengths
3
1.05M
import os import yaml import time import shutil import torch import random import argparse import numpy as np from torch.utils import data from tqdm import tqdm from ptsemseg.models import get_model from ptsemseg.loss import get_loss_function from ptsemseg.loader import get_loader from ptsemseg.utils import get_logger, Table from ptsemseg.metrics import runningScore, averageMeter from ptsemseg.augmentations import get_composed_augmentations from ptsemseg.schedulers import get_scheduler from ptsemseg.optimizers import get_optimizer from tensorboardX import SummaryWriter def train(cfg, writer, logger): # Setup seeds torch.manual_seed(cfg.get("seed", 1337)) torch.cuda.manual_seed(cfg.get("seed", 1337)) np.random.seed(cfg.get("seed", 1337)) random.seed(cfg.get("seed", 1337)) # Setup device device = torch.device("cuda" if torch.cuda.is_available() else "cpu") # Setup Augmentations augmentations = cfg["training"].get("augmentations", None) data_aug = get_composed_augmentations(augmentations) # Setup Dataloader data_loader = get_loader(cfg["data"]["dataset"]) data_path = cfg["data"]["path"] t_loader = data_loader( data_path, is_transform=True, split=cfg["data"]["train_split"], img_size=(cfg["data"]["img_rows"], cfg["data"]["img_cols"]), augmentations=data_aug ) v_loader = data_loader( data_path, is_transform=True, split=cfg["data"]["val_split"], img_size=(cfg["data"]["img_rows"], cfg["data"]["img_cols"]) ) n_classes = t_loader.n_classes trainloader = data.DataLoader( t_loader, batch_size=cfg["training"]["batch_size"], num_workers=cfg["training"]["n_workers"], shuffle=True, ) valloader = data.DataLoader( v_loader, batch_size=cfg["training"]["batch_size"], num_workers=cfg["training"]["n_workers"] ) # Setup Metrics running_metrics_val = runningScore(n_classes) # Setup Model model = get_model(cfg["model"], n_classes).to(device) model = torch.nn.DataParallel(model, device_ids=range(torch.cuda.device_count())) # Setup optimizer, lr_scheduler and loss function optimizer_cls = get_optimizer(cfg) optimizer_params = {k: v for k, v in cfg["training"]["optimizer"].items() if k != "name"} optimizer = optimizer_cls(model.parameters(), **optimizer_params) logger.info("Using optimizer {}".format(optimizer)) scheduler = get_scheduler(optimizer, cfg["training"]["lr_schedule"]) loss_fn = get_loss_function(cfg) logger.info("Using loss {}".format(loss_fn)) start_iter = 0 if cfg["training"]["resume"] is not None: if os.path.isfile(cfg["training"]["resume"]): logger.info( "Loading model and optimizer from checkpoint '{}'".format(cfg["training"]["resume"]) ) checkpoint = torch.load(cfg["training"]["resume"]) model.load_state_dict(checkpoint["model_state"]) optimizer.load_state_dict(checkpoint["optimizer_state"]) scheduler.load_state_dict(checkpoint["scheduler_state"]) start_iter = checkpoint["epoch"] logger.info( "Loaded checkpoint '{}' (iter {})".format( cfg["training"]["resume"], checkpoint["epoch"] ) ) else: logger.info("No checkpoint found at '{}'".format(cfg["training"]["resume"])) val_loss_meter = averageMeter() time_meter = averageMeter() best_iou = -100.0 i = start_iter flag = True score_table = Table(["Overall_Acc", "Mean_Acc", "FreqW_Acc", "Mean_IoU"]) class_table = Table([i for i in range(t_loader.n_classes)]) # print(class_table.header) while i <= cfg["training"]["train_iters"] and flag: for (images, labels) in trainloader: i += 1 start_ts = time.time() scheduler.step() model.train() images = images.to(device) labels = labels.to(device) optimizer.zero_grad() outputs = model(images) loss = loss_fn(input=outputs, target=labels) loss.backward() optimizer.step() time_meter.update(time.time() - start_ts) if (i + 1) % cfg["training"]["print_interval"] == 0: fmt_str = "Iter [{:d}/{:d}] Loss: {:.4f} Time/Image: {:.4f}" print_str = fmt_str.format( i + 1, cfg["training"]["train_iters"], loss.item(), time_meter.avg / cfg["training"]["batch_size"], ) print(print_str) logger.info(print_str) writer.add_scalar("loss/train_loss", loss.item(), i + 1) time_meter.reset() if (i + 1) % cfg["training"]["val_interval"] == 0 or (i + 1) == cfg["training"][ "train_iters" ]: model.eval() # unncessary with torch.no_grad(): for i_val, (images_val, labels_val) in tqdm(enumerate(valloader)): images_val = images_val.to(device) labels_val = labels_val.to(device) outputs = model(images_val) val_loss = loss_fn(input=outputs, target=labels_val) pred = outputs.data.max(1)[1].cpu().numpy() gt = labels_val.data.cpu().numpy() running_metrics_val.update(gt, pred) val_loss_meter.update(val_loss.item()) writer.add_scalar("loss/val_loss", val_loss_meter.avg, i + 1) logger.info("Iter %d Loss: %.4f" % (i + 1, val_loss_meter.avg)) score, class_iou = running_metrics_val.get_scores() score_table.update(score) for k, v in score.items(): writer.add_scalar("val_metrics/{}".format(k), v, i + 1) score_table.print_table() # print(class_iou) class_table.update(class_iou) for k, v in class_iou.items(): writer.add_scalar("val_metrics/cls_{}".format(k), v, i + 1) class_table.print_table() val_loss_meter.reset() running_metrics_val.reset() if score["Mean_IoU"] >= best_iou: best_iou = score["Mean_IoU"] state = { "epoch": i + 1, "model_state": model.state_dict(), "optimizer_state": optimizer.state_dict(), "scheduler_state": scheduler.state_dict(), "best_iou": best_iou, } save_path = os.path.join( cfg["training"]["ckpt_path"], "{}_{}_best_model.pkl".format(cfg["model"]["arch"], cfg["data"]["dataset"]), ) torch.save(state, save_path) if (i + 1) == cfg["training"]["train_iters"]: flag = False break if __name__ == "__main__": parser = argparse.ArgumentParser(description="config") parser.add_argument( "--config", nargs="?", type=str, default="configs/fcn8s_pascal.yml", help="Configuration file to use", ) args = parser.parse_args() with open(args.config) as fp: cfg = yaml.load(fp) run_id = random.randint(1, 100000) logdir = os.path.join("runs", os.path.basename(args.config)[:-4], str(run_id)) writer = SummaryWriter(logdir=logdir) print("RUNDIR: {}".format(logdir)) shutil.copy(args.config, logdir) logger = get_logger(logdir) logger.info("Let the games begin") train(cfg, writer, logger)
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. #pragma once #include "../profiler.h" class GCAllocateProfiler : public Profiler { public: GCAllocateProfiler() : Profiler(), _gcLOHAllocations(0), _gcPOHAllocations(0), _failures(0) {} virtual GUID GetClsid(); virtual HRESULT STDMETHODCALLTYPE Initialize(IUnknown* pICorProfilerInfoUnk); virtual HRESULT STDMETHODCALLTYPE ObjectAllocated(ObjectID objectId, ClassID classId); virtual HRESULT STDMETHODCALLTYPE Shutdown(); private: std::atomic<int> _gcLOHAllocations; std::atomic<int> _gcPOHAllocations; std::atomic<int> _failures; };
# # Copyright 2021 Janick Bergeron <janick@bergeron.com> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import asyncio import logging import binascii import json import os import platform import struct import sys import time import uuid import User import Workout import Display import aiotkinter workoutSession = None testMode = None User.defineUser() for arg in sys.argv: if arg == "-d": User.secsInOneMin = 1 User.metersInOneKm = 10 if arg == "-Tt": testMode = 'Time' if arg == "-Td": testMode = 'Distance' window = Display.MainDisplay(User.screenSize['X'], User.screenSize['Y']) workoutSession = Workout.Session(window) if testMode == "Time": workoutSession.createSplits("Normal", 30, None) if testMode == "Distance": workoutSession.createSplits("Normal", None, 5000) workoutSession.startSplits() interval = 0.5 for i in range(10): time.sleep(interval) workoutSession.update(0, 0, 78) for i in range(20): time.sleep(interval) workoutSession.update(2, 20, 90) for i in range(6): time.sleep(interval) workoutSession.update(0, 0, 85) for i in range(20): time.sleep(interval) workoutSession.update(2, 20, 90) for i in range(20): time.sleep(interval) workoutSession.update(0, 0, 80) time.sleep(20)
#!/usr/bin/env python # -*- coding: UTF-8 -*- '''================================================= @IDE :PyCharm @Author :Valuebai @Date :2020/2/3 19:31 @Desc :主函数 ==================================================''' import os, sys # 解决在命令行窗口报错No module named 'base',需要放在最前面 curPath = os.path.abspath(os.path.dirname(__file__)) rootPath = os.path.split(curPath)[0] sys.path.append(rootPath) from base.configHttp import RunMethod from data.get_data import GetData from data.get_dependent_data import DependentData from common.common_util import CommonUtil from common.logConf import logger class Main(): def __init__(self): self.run_method = RunMethod() self.get_data = GetData() self.common_util = CommonUtil() def run(self): result = None # 获取工作表的总行数(即用例数) rows = self.get_data.get_case_lines() print('获取工作表的总行数(即用例数):', rows) # 读取测试用例excel表,第一行标题不读取 for i in range(1, rows): # 检查用例的运行字段,True is_run = self.get_data.get_is_run(i) # 根据是否运行yes来执行用例 if is_run: url = self.get_data.get_url(i) method = self.get_data.get_request_method(i) expect_data = self.get_data.get_expect_data(i) # 获取期望值 header = self.get_data.is_header(i) # 判断是否存在用例依赖 depend_case = self.get_data.is_depend(i) if depend_case != None: self.depend_data = DependentData(depend_case) # 获取依赖相应数据 depend_response_data = self.depend_data.get_data_from_key(i) # 获取依赖的key depend_key = self.get_data.get_depend_field(i) depend_response_data[depend_key] = depend_response_data # 用封装的requests请求 result = self.run_method.run_main(method, url, expect_data, header) # !!!这里有个header的异常需要处理,用Imooc-11作为header依赖,会有报错,需要处理后提醒传入{}或者其他类型数据 # 判断期望结果是否存在真实结果中 if self.common_util.is_contain(expect_data, result): self.get_data.write_result(i, 'pass') # 测试通过 print('第%d个测试,结果pass' % i) else: self.get_data.write_result(i, 'fail') # 测试失败 print('第%d测试,结果fail' % i) if __name__ == "__main__": logger.info('INFO日志打印...') result = Main().run() print(result)
module.controller('UserRoleMappingCtrl', function($scope, $http, realm, user, clients, client, Notifications, RealmRoleMapping, ClientRoleMapping, AvailableRealmRoleMapping, AvailableClientRoleMapping, CompositeRealmRoleMapping, CompositeClientRoleMapping) { $scope.realm = realm; $scope.user = user; $scope.selectedRealmRoles = []; $scope.selectedRealmMappings = []; $scope.realmMappings = []; $scope.clients = clients; $scope.client = client; $scope.clientRoles = []; $scope.clientComposite = []; $scope.selectedClientRoles = []; $scope.selectedClientMappings = []; $scope.clientMappings = []; $scope.dummymodel = []; $scope.realmMappings = RealmRoleMapping.query({realm : realm.realm, userId : user.id}); $scope.realmRoles = AvailableRealmRoleMapping.query({realm : realm.realm, userId : user.id}); $scope.realmComposite = CompositeRealmRoleMapping.query({realm : realm.realm, userId : user.id}); $scope.addRealmRole = function() { var roles = $scope.selectedRealmRoles; $scope.selectedRealmRoles = []; $http.post(authUrl + '/admin/realms/' + realm.realm + '/users/' + user.id + '/role-mappings/realm', roles).then(function() { $scope.realmMappings = RealmRoleMapping.query({realm : realm.realm, userId : user.id}); $scope.realmRoles = AvailableRealmRoleMapping.query({realm : realm.realm, userId : user.id}); $scope.realmComposite = CompositeRealmRoleMapping.query({realm : realm.realm, userId : user.id}); $scope.selectedRealmMappings = []; $scope.selectRealmRoles = []; if ($scope.targetClient) { console.log('load available'); $scope.clientComposite = CompositeClientRoleMapping.query({realm : realm.realm, userId : user.id, client : $scope.targetClient.id}); $scope.clientRoles = AvailableClientRoleMapping.query({realm : realm.realm, userId : user.id, client : $scope.targetClient.id}); $scope.clientMappings = ClientRoleMapping.query({realm : realm.realm, userId : user.id, client : $scope.targetClient.id}); $scope.selectedClientRoles = []; $scope.selectedClientMappings = []; } Notifications.success("Role mappings updated."); }); }; $scope.deleteRealmRole = function() { $http.delete(authUrl + '/admin/realms/' + realm.realm + '/users/' + user.id + '/role-mappings/realm', {data : $scope.selectedRealmMappings, headers : {"content-type" : "application/json"}}).then(function() { $scope.realmMappings = RealmRoleMapping.query({realm : realm.realm, userId : user.id}); $scope.realmRoles = AvailableRealmRoleMapping.query({realm : realm.realm, userId : user.id}); $scope.realmComposite = CompositeRealmRoleMapping.query({realm : realm.realm, userId : user.id}); $scope.selectedRealmMappings = []; $scope.selectRealmRoles = []; if ($scope.targetClient) { console.log('load available'); $scope.clientComposite = CompositeClientRoleMapping.query({realm : realm.realm, userId : user.id, client : $scope.targetClient.id}); $scope.clientRoles = AvailableClientRoleMapping.query({realm : realm.realm, userId : user.id, client : $scope.targetClient.id}); $scope.clientMappings = ClientRoleMapping.query({realm : realm.realm, userId : user.id, client : $scope.targetClient.id}); $scope.selectedClientRoles = []; $scope.selectedClientMappings = []; } Notifications.success("Role mappings updated."); }); }; $scope.addClientRole = function() { $http.post(authUrl + '/admin/realms/' + realm.realm + '/users/' + user.id + '/role-mappings/clients/' + $scope.targetClient.id, $scope.selectedClientRoles).then(function() { $scope.clientMappings = ClientRoleMapping.query({realm : realm.realm, userId : user.id, client : $scope.targetClient.id}); $scope.clientRoles = AvailableClientRoleMapping.query({realm : realm.realm, userId : user.id, client : $scope.targetClient.id}); $scope.clientComposite = CompositeClientRoleMapping.query({realm : realm.realm, userId : user.id, client : $scope.targetClient.id}); $scope.selectedClientRoles = []; $scope.selectedClientMappings = []; $scope.realmComposite = CompositeRealmRoleMapping.query({realm : realm.realm, userId : user.id}); $scope.realmRoles = AvailableRealmRoleMapping.query({realm : realm.realm, userId : user.id}); Notifications.success("Role mappings updated."); }); }; $scope.deleteClientRole = function() { $http.delete(authUrl + '/admin/realms/' + realm.realm + '/users/' + user.id + '/role-mappings/clients/' + $scope.targetClient.id, {data : $scope.selectedClientMappings, headers : {"content-type" : "application/json"}}).then(function() { $scope.clientMappings = ClientRoleMapping.query({realm : realm.realm, userId : user.id, client : $scope.targetClient.id}); $scope.clientRoles = AvailableClientRoleMapping.query({realm : realm.realm, userId : user.id, client : $scope.targetClient.id}); $scope.clientComposite = CompositeClientRoleMapping.query({realm : realm.realm, userId : user.id, client : $scope.targetClient.id}); $scope.selectedClientRoles = []; $scope.selectedClientMappings = []; $scope.realmComposite = CompositeRealmRoleMapping.query({realm : realm.realm, userId : user.id}); $scope.realmRoles = AvailableRealmRoleMapping.query({realm : realm.realm, userId : user.id}); Notifications.success("Role mappings updated."); }); }; $scope.changeClient = function() { console.log('changeClient'); if ($scope.targetClient) { console.log('load available'); $scope.clientComposite = CompositeClientRoleMapping.query({realm : realm.realm, userId : user.id, client : $scope.targetClient.id}); $scope.clientRoles = AvailableClientRoleMapping.query({realm : realm.realm, userId : user.id, client : $scope.targetClient.id}); $scope.clientMappings = ClientRoleMapping.query({realm : realm.realm, userId : user.id, client : $scope.targetClient.id}); } else { $scope.clientRoles = null; $scope.clientMappings = null; $scope.clientComposite = null; } $scope.selectedClientRoles = []; $scope.selectedClientMappings = []; }; }); module.controller('UserSessionsCtrl', function($scope, realm, user, sessions, UserSessions, UserLogout, UserSessionLogout, Notifications) { $scope.realm = realm; $scope.user = user; $scope.sessions = sessions; $scope.logoutAll = function() { UserLogout.save({realm : realm.realm, user: user.id}, function () { Notifications.success('Logged out user in all clients'); UserSessions.query({realm: realm.realm, user: user.id}, function(updated) { $scope.sessions = updated; }) }); }; $scope.logoutSession = function(sessionId) { console.log('here in logoutSession'); UserSessionLogout.delete({realm : realm.realm, session: sessionId}, function() { UserSessions.query({realm: realm.realm, user: user.id}, function(updated) { $scope.sessions = updated; Notifications.success('Logged out session'); }) }); } }); module.controller('UserFederatedIdentityCtrl', function($scope, $location, realm, user, federatedIdentities, UserFederatedIdentity, Notifications, Dialog) { $scope.realm = realm; $scope.user = user; $scope.federatedIdentities = federatedIdentities; $scope.hasAnyProvidersToCreate = function() { return realm.identityProviders.length - $scope.federatedIdentities.length > 0; } $scope.removeProviderLink = function(providerLink) { console.log("Removing provider link: " + providerLink.identityProvider); Dialog.confirmDelete(providerLink.identityProvider, 'Identity Provider Link', function() { UserFederatedIdentity.remove({ realm: realm.realm, user: user.id, provider: providerLink.identityProvider }, function() { Notifications.success("The provider link has been deleted."); var indexToRemove = $scope.federatedIdentities.indexOf(providerLink); $scope.federatedIdentities.splice(indexToRemove, 1); }); }); } }); module.controller('UserFederatedIdentityAddCtrl', function($scope, $location, realm, user, federatedIdentities, UserFederatedIdentity, Notifications) { $scope.realm = realm; $scope.user = user; $scope.federatedIdentity = {}; var getAvailableProvidersToCreate = function() { var realmProviders = []; for (var i=0 ; i<realm.identityProviders.length ; i++) { var providerAlias = realm.identityProviders[i].alias; realmProviders.push(providerAlias); }; for (var i=0 ; i<federatedIdentities.length ; i++) { var providerAlias = federatedIdentities[i].identityProvider; var index = realmProviders.indexOf(providerAlias); realmProviders.splice(index, 1); } return realmProviders; } $scope.availableProvidersToCreate = getAvailableProvidersToCreate(); $scope.save = function() { UserFederatedIdentity.save({ realm : realm.realm, user: user.id, provider: $scope.federatedIdentity.identityProvider }, $scope.federatedIdentity, function(data, headers) { $location.url("/realms/" + realm.realm + '/users/' + $scope.user.id + '/federated-identity'); Notifications.success("Provider link has been created."); }); }; $scope.cancel = function() { $location.url("/realms/" + realm.realm + '/users/' + $scope.user.id + '/federated-identity'); }; }); module.controller('UserConsentsCtrl', function($scope, realm, user, userConsents, UserConsents, Notifications) { $scope.realm = realm; $scope.user = user; $scope.userConsents = userConsents; $scope.revokeConsent = function(clientId) { UserConsents.delete({realm : realm.realm, user: user.id, client: clientId }, function () { UserConsents.query({realm: realm.realm, user: user.id}, function(updated) { $scope.userConsents = updated; }) Notifications.success('Grant revoked successfully'); }, function() { Notifications.error("Grant couldn't be revoked"); }); console.log("Revoke consent " + clientId); } }); module.controller('UserOfflineSessionsCtrl', function($scope, $location, realm, user, client, offlineSessions) { $scope.realm = realm; $scope.user = user; $scope.client = client; $scope.offlineSessions = offlineSessions; $scope.cancel = function() { $location.url("/realms/" + realm.realm + '/users/' + user.id + '/consents'); }; }); module.controller('UserListCtrl', function($scope, realm, User, UserSearchState, UserImpersonation, BruteForce, Notifications, $route, Dialog) { $scope.init = function() { $scope.realm = realm; UserSearchState.query.realm = realm.realm; $scope.query = UserSearchState.query; if (!UserSearchState.isFirstSearch) $scope.searchQuery(); }; $scope.impersonate = function(userId) { UserImpersonation.save({realm : realm.realm, user: userId}, function (data) { if (data.sameRealm) { window.location = data.redirect; } else { window.open(data.redirect, "_blank"); } }); }; $scope.unlockUsers = function() { BruteForce.delete({realm: realm.realm}, function(data) { Notifications.success("Any temporarily locked users are now unlocked."); }); } $scope.firstPage = function() { $scope.query.first = 0; $scope.searchQuery(); } $scope.previousPage = function() { $scope.query.first -= parseInt($scope.query.max); if ($scope.query.first < 0) { $scope.query.first = 0; } $scope.searchQuery(); } $scope.nextPage = function() { $scope.query.first += parseInt($scope.query.max); $scope.searchQuery(); } $scope.searchQuery = function() { console.log("query.search: " + $scope.query.search); $scope.searchLoaded = false; $scope.users = User.query($scope.query, function() { $scope.searchLoaded = true; $scope.lastSearch = $scope.query.search; UserSearchState.isFirstSearch = false; }); }; $scope.removeUser = function(user) { Dialog.confirmDelete(user.id, 'user', function() { user.$remove({ realm : realm.realm, userId : user.id }, function() { $route.reload(); if ($scope.users.length === 1 && $scope.query.first > 0) { $scope.previousPage(); } Notifications.success("The user has been deleted."); }, function() { Notifications.error("User couldn't be deleted"); }); }); }; }); module.controller('UserTabCtrl', function($scope, $location, Dialog, Notifications, Current) { $scope.removeUser = function() { Dialog.confirmDelete($scope.user.id, 'user', function() { $scope.user.$remove({ realm : Current.realm.realm, userId : $scope.user.id }, function() { $location.url("/realms/" + Current.realm.realm + "/users"); Notifications.success("The user has been deleted."); }, function() { Notifications.error("User couldn't be deleted"); }); }); }; }); module.controller('UserDetailCtrl', function($scope, realm, user, BruteForceUser, User, Components, UserImpersonation, RequiredActions, UserStorageOperations, $location, $http, Dialog, Notifications) { $scope.realm = realm; $scope.create = !user.id; $scope.editUsername = $scope.create || $scope.realm.editUsernameAllowed; if ($scope.create) { $scope.user = { enabled: true, attributes: {} } } else { if (!user.attributes) { user.attributes = {} } convertAttributeValuesToString(user); $scope.user = angular.copy(user); $scope.impersonate = function() { UserImpersonation.save({realm : realm.realm, user: $scope.user.id}, function (data) { if (data.sameRealm) { window.location = data.redirect; } else { window.open(data.redirect, "_blank"); } }); }; if(user.federationLink) { console.log("federationLink is not null. It is " + user.federationLink); if ($scope.access.viewRealm) { Components.get({realm: realm.realm, componentId: user.federationLink}, function (link) { $scope.federationLinkName = link.name; $scope.federationLink = "#/realms/" + realm.realm + "/user-storage/providers/" + link.providerId + "/" + link.id; }); } else { // KEYCLOAK-4328 UserStorageOperations.simpleName.get({realm: realm.realm, componentId: user.federationLink}, function (link) { $scope.federationLinkName = link.name; $scope.federationLink = $location.absUrl(); }) } } else { console.log("federationLink is null"); } if(user.origin) { if ($scope.access.viewRealm) { Components.get({realm: realm.realm, componentId: user.origin}, function (link) { $scope.originName = link.name; $scope.originLink = "#/realms/" + realm.realm + "/user-storage/providers/" + link.providerId + "/" + link.id; }) } else { // KEYCLOAK-4328 UserStorageOperations.simpleName.get({realm: realm.realm, componentId: user.origin}, function (link) { $scope.originName = link.name; $scope.originLink = $location.absUrl(); }) } } else { console.log("origin is null"); } console.log('realm brute force? ' + realm.bruteForceProtected) $scope.temporarilyDisabled = false; var isDisabled = function () { BruteForceUser.get({realm: realm.realm, userId: user.id}, function(data) { console.log('here in isDisabled ' + data.disabled); $scope.temporarilyDisabled = data.disabled; }); }; console.log("check if disabled"); isDisabled(); $scope.unlockUser = function() { BruteForceUser.delete({realm: realm.realm, userId: user.id}, function(data) { isDisabled(); }); } } $scope.changed = false; // $scope.create; if (user.requiredActions) { for (var i = 0; i < user.requiredActions.length; i++) { console.log("user require action: " + user.requiredActions[i]); } } // ID - Name map for required actions. IDs are enum names. RequiredActions.query({realm: realm.realm}, function(data) { $scope.userReqActionList = []; for (var i = 0; i < data.length; i++) { console.log("listed required action: " + data[i].name); if (data[i].enabled) { var item = data[i]; $scope.userReqActionList.push(item); } } console.log("---------------------"); console.log("ng-model: user.requiredActions=" + JSON.stringify($scope.user.requiredActions)); console.log("---------------------"); console.log("ng-repeat: userReqActionList=" + JSON.stringify($scope.userReqActionList)); console.log("---------------------"); }); $scope.$watch('user', function() { if (!angular.equals($scope.user, user)) { $scope.changed = true; } }, true); $scope.save = function() { convertAttributeValuesToLists(); if ($scope.create) { User.save({ realm: realm.realm }, $scope.user, function (data, headers) { $scope.changed = false; convertAttributeValuesToString($scope.user); user = angular.copy($scope.user); var l = headers().location; console.debug("Location == " + l); var id = l.substring(l.lastIndexOf("/") + 1); $location.url("/realms/" + realm.realm + "/users/" + id); Notifications.success("The user has been created."); }); } else { User.update({ realm: realm.realm, userId: $scope.user.id }, $scope.user, function () { $scope.changed = false; convertAttributeValuesToString($scope.user); user = angular.copy($scope.user); Notifications.success("Your changes have been saved to the user."); }); } }; function convertAttributeValuesToLists() { var attrs = $scope.user.attributes; for (var attribute in attrs) { if (typeof attrs[attribute] === "string") { var attrVals = attrs[attribute].split("##"); attrs[attribute] = attrVals; } } } function convertAttributeValuesToString(user) { var attrs = user.attributes; for (var attribute in attrs) { if (typeof attrs[attribute] === "object") { var attrVals = attrs[attribute].join("##"); attrs[attribute] = attrVals; } } } $scope.reset = function() { $scope.user = angular.copy(user); $scope.changed = false; }; $scope.cancel = function() { $location.url("/realms/" + realm.realm + "/users"); }; $scope.addAttribute = function() { $scope.user.attributes[$scope.newAttribute.key] = $scope.newAttribute.value; delete $scope.newAttribute; } $scope.removeAttribute = function(key) { delete $scope.user.attributes[key]; } }); module.controller('UserCredentialsCtrl', function($scope, realm, user, $route, RequiredActions, User, UserExecuteActionsEmail, UserCredentials, Notifications, Dialog, TimeUnit2) { console.log('UserCredentialsCtrl'); $scope.realm = realm; $scope.user = angular.copy(user); $scope.temporaryPassword = true; $scope.isTotp = false; if(!!user.totp){ $scope.isTotp = user.totp; } // ID - Name map for required actions. IDs are enum names. RequiredActions.query({realm: realm.realm}, function(data) { $scope.userReqActionList = []; for (var i = 0; i < data.length; i++) { console.log("listed required action: " + data[i].name); if (data[i].enabled) { var item = data[i]; $scope.userReqActionList.push(item); } } }); $scope.resetPassword = function() { // hit enter without entering both fields - ignore if (!$scope.passwordAndConfirmPasswordEntered()) return; if ($scope.pwdChange) { if ($scope.password != $scope.confirmPassword) { Notifications.error("Password and confirmation does not match."); return; } } var msgTitle = 'Change password'; var msg = 'Are you sure you want to change the users password?'; Dialog.confirm(msgTitle, msg, function() { UserCredentials.resetPassword({ realm: realm.realm, userId: user.id }, { type : "password", value : $scope.password, temporary: $scope.temporaryPassword }, function() { Notifications.success("The password has been reset"); $scope.password = null; $scope.confirmPassword = null; $route.reload(); }); }, function() { $scope.password = null; $scope.confirmPassword = null; }); }; $scope.passwordAndConfirmPasswordEntered = function() { return $scope.password && $scope.confirmPassword; } $scope.disableCredentialTypes = function() { Dialog.confirm('Disable credentials', 'Are you sure you want to disable these the users credentials?', function() { UserCredentials.disableCredentialTypes({ realm: realm.realm, userId: user.id }, $scope.disableableCredentialTypes, function() { $route.reload(); Notifications.success("Credentials disabled"); }, function() { Notifications.error("Failed to disable credentials"); }); }); }; $scope.emailActions = []; $scope.emailActionsTimeout = TimeUnit2.asUnit(realm.actionTokenGeneratedByAdminLifespan); $scope.disableableCredentialTypes = []; $scope.sendExecuteActionsEmail = function() { if ($scope.changed) { Dialog.message("Cannot send email", "You must save your current changes before you can send an email"); return; } Dialog.confirm('Send Email', 'Are you sure you want to send email to user?', function() { UserExecuteActionsEmail.update({ realm: realm.realm, userId: user.id, lifespan: $scope.emailActionsTimeout.toSeconds() }, $scope.emailActions, function() { Notifications.success("Email sent to user"); $scope.emailActions = []; }, function() { Notifications.error("Failed to send email to user"); }); }); }; $scope.$watch('user', function() { if (!angular.equals($scope.user, user)) { $scope.userChange = true; } else { $scope.userChange = false; } }, true); $scope.$watch('password', function() { if (!!$scope.password){ $scope.pwdChange = true; } else { $scope.pwdChange = false; } }, true); $scope.reset = function() { $scope.password = ""; $scope.confirmPassword = ""; $scope.user = angular.copy(user); $scope.isTotp = false; if(!!user.totp){ $scope.isTotp = user.totp; } $scope.pwdChange = false; $scope.userChange = false; }; }); module.controller('UserFederationCtrl', function($scope, $location, $route, realm, serverInfo, Components, Notifications, Dialog) { console.log('UserFederationCtrl ++++****'); $scope.realm = realm; $scope.providers = serverInfo.componentTypes['org.keycloak.storage.UserStorageProvider']; $scope.instancesLoaded = false; if (!$scope.providers) $scope.providers = []; $scope.addProvider = function(provider) { console.log('Add provider: ' + provider.id); $location.url("/create/user-storage/" + realm.realm + "/providers/" + provider.id); }; $scope.getInstanceLink = function(instance) { return "/realms/" + realm.realm + "/user-storage/providers/" + instance.providerId + "/" + instance.id; } $scope.getInstanceName = function(instance) { return instance.name; } $scope.getInstanceProvider = function(instance) { return instance.providerId; } $scope.isProviderEnabled = function(instance) { return !instance.config['enabled'] || instance.config['enabled'][0] == 'true'; } $scope.getInstancePriority = function(instance) { if (!instance.config['priority']) { console.log('getInstancePriority is undefined'); } return instance.config['priority'][0]; } Components.query({realm: realm.realm, parent: realm.id, type: 'org.keycloak.storage.UserStorageProvider' }, function(data) { $scope.instances = data; $scope.instancesLoaded = true; }); $scope.removeInstance = function(instance) { Dialog.confirmDelete(instance.name, 'user storage provider', function() { Components.remove({ realm : realm.realm, componentId : instance.id }, function() { $route.reload(); Notifications.success("The provider has been deleted."); }); }); }; }); module.controller('GenericUserStorageCtrl', function($scope, $location, Notifications, $route, Dialog, realm, serverInfo, instance, providerId, Components, UserStorageOperations) { console.log('GenericUserStorageCtrl'); console.log('providerId: ' + providerId); $scope.create = !instance.providerId; console.log('create: ' + $scope.create); var providers = serverInfo.componentTypes['org.keycloak.storage.UserStorageProvider']; console.log('providers length ' + providers.length); var providerFactory = null; for (var i = 0; i < providers.length; i++) { var p = providers[i]; console.log('provider: ' + p.id); if (p.id == providerId) { $scope.providerFactory = p; providerFactory = p; break; } } $scope.showSync = false; $scope.changed = false; console.log("providerFactory: " + providerFactory.id); function initUserStorageSettings() { if ($scope.create) { $scope.changed = true; instance.name = providerFactory.id; instance.providerId = providerFactory.id; instance.providerType = 'org.keycloak.storage.UserStorageProvider'; instance.parentId = realm.id; instance.config = { }; instance.config['priority'] = ["0"]; instance.config['enabled'] = ["true"]; $scope.fullSyncEnabled = false; $scope.changedSyncEnabled = false; if (providerFactory.metadata.synchronizable) { instance.config['fullSyncPeriod'] = ['-1']; instance.config['changedSyncPeriod'] = ['-1']; } instance.config['cachePolicy'] = ['DEFAULT']; instance.config['evictionDay'] = ['']; instance.config['evictionHour'] = ['']; instance.config['evictionMinute'] = ['']; instance.config['maxLifespan'] = ['']; if (providerFactory.properties) { for (var i = 0; i < providerFactory.properties.length; i++) { var configProperty = providerFactory.properties[i]; if (configProperty.defaultValue) { instance.config[configProperty.name] = [configProperty.defaultValue]; } else { instance.config[configProperty.name] = ['']; } } } } else { $scope.changed = false; $scope.fullSyncEnabled = (instance.config['fullSyncPeriod'] && instance.config['fullSyncPeriod'][0] > 0); $scope.changedSyncEnabled = (instance.config['changedSyncPeriod'] && instance.config['changedSyncPeriod'][0]> 0); if (providerFactory.metadata.synchronizable) { if (!instance.config['fullSyncPeriod']) { console.log('setting to -1'); instance.config['fullSyncPeriod'] = ['-1']; } if (!instance.config['changedSyncPeriod']) { console.log('setting to -1'); instance.config['changedSyncPeriod'] = ['-1']; } } if (!instance.config['enabled']) { instance.config['enabled'] = ['true']; } if (!instance.config['cachePolicy']) { instance.config['cachePolicy'] = ['DEFAULT']; } if (!instance.config['evictionDay']) { instance.config['evictionDay'] = ['']; } if (!instance.config['evictionHour']) { instance.config['evictionHour'] = ['']; } if (!instance.config['evictionMinute']) { instance.config['evictionMinute'] = ['']; } if (!instance.config['maxLifespan']) { instance.config['maxLifespan'] = ['']; } if (!instance.config['priority']) { instance.config['priority'] = ['0']; } if (providerFactory.properties) { for (var i = 0; i < providerFactory.properties.length; i++) { var configProperty = providerFactory.properties[i]; if (!instance.config[configProperty.name]) { instance.config[configProperty.name] = ['']; } } } } if (providerFactory.metadata.synchronizable) { if (instance.config && instance.config['importEnabled']) { $scope.showSync = instance.config['importEnabled'][0] == 'true'; } else { $scope.showSync = true; } } } initUserStorageSettings(); $scope.instance = angular.copy(instance); $scope.realm = realm; $scope.$watch('instance', function() { if (!angular.equals($scope.instance, instance)) { $scope.changed = true; } }, true); $scope.$watch('fullSyncEnabled', function(newVal, oldVal) { if (oldVal == newVal) { return; } $scope.instance.config['fullSyncPeriod'][0] = $scope.fullSyncEnabled ? "604800" : "-1"; $scope.changed = true; }); $scope.$watch('changedSyncEnabled', function(newVal, oldVal) { if (oldVal == newVal) { return; } $scope.instance.config['changedSyncPeriod'][0] = $scope.changedSyncEnabled ? "86400" : "-1"; $scope.changed = true; }); $scope.save = function() { console.log('save provider'); $scope.changed = false; if ($scope.create) { console.log('saving new provider'); Components.save({realm: realm.realm}, $scope.instance, function (data, headers) { var l = headers().location; var id = l.substring(l.lastIndexOf("/") + 1); $location.url("/realms/" + realm.realm + "/user-storage/providers/" + $scope.instance.providerId + "/" + id); Notifications.success("The provider has been created."); }); } else { console.log('update existing provider'); Components.update({realm: realm.realm, componentId: instance.id }, $scope.instance, function () { $route.reload(); Notifications.success("The provider has been updated."); }); } }; $scope.reset = function() { //initUserStorageSettings(); //$scope.instance = angular.copy(instance); $route.reload(); }; $scope.cancel = function() { console.log('cancel'); if ($scope.create) { $location.url("/realms/" + realm.realm + "/user-federation"); } else { $route.reload(); } }; $scope.triggerFullSync = function() { console.log('GenericCtrl: triggerFullSync'); triggerSync('triggerFullSync'); } $scope.triggerChangedUsersSync = function() { console.log('GenericCtrl: triggerChangedUsersSync'); triggerSync('triggerChangedUsersSync'); } function triggerSync(action) { UserStorageOperations.sync.save({ action: action, realm: $scope.realm.realm, componentId: $scope.instance.id }, {}, function(syncResult) { $route.reload(); Notifications.success("Sync of users finished successfully. " + syncResult.status); }, function() { $route.reload(); Notifications.error("Error during sync of users"); }); } $scope.removeImportedUsers = function() { UserStorageOperations.removeImportedUsers.save({ realm: $scope.realm.realm, componentId: $scope.instance.id }, {}, function(syncResult) { $route.reload(); Notifications.success("Remove imported users finished successfully. "); }, function() { $route.reload(); Notifications.error("Error during remove"); }); }; $scope.unlinkUsers = function() { UserStorageOperations.unlinkUsers.save({ realm: $scope.realm.realm, componentId: $scope.instance.id }, {}, function(syncResult) { $route.reload(); Notifications.success("Unlink of users finished successfully. "); }, function() { $route.reload(); Notifications.error("Error during unlink"); }); }; }); function removeGroupMember(groups, member) { for (var j = 0; j < groups.length; j++) { //console.log('checking: ' + groups[j].path); if (member.path == groups[j].path) { groups.splice(j, 1); break; } if (groups[j].subGroups && groups[j].subGroups.length > 0) { //console.log('going into subgroups'); removeGroupMember(groups[j].subGroups, member); } } } module.controller('UserGroupMembershipCtrl', function($scope, $route, realm, groups, user, UserGroupMembership, UserGroupMapping, Notifications, $location, Dialog) { $scope.realm = realm; $scope.user = user; $scope.groupList = groups; $scope.selectedGroup = null; $scope.tree = []; UserGroupMembership.query({realm: realm.realm, userId: user.id}, function(data) { $scope.groupMemberships = data; for (var i = 0; i < data.length; i++) { var member = data[i]; removeGroupMember(groups, member); } }); $scope.joinGroup = function() { if (!$scope.tree.currentNode) { Notifications.error('Please select a group to add'); return; }; UserGroupMapping.update({realm: realm.realm, userId: user.id, groupId: $scope.tree.currentNode.id}, function() { Notifications.success('Added group membership'); $route.reload(); }); }; $scope.leaveGroup = function() { if (!$scope.selectedGroup) { return; } UserGroupMapping.remove({realm: realm.realm, userId: user.id, groupId: $scope.selectedGroup.id}, function() { Notifications.success('Removed group membership'); $route.reload(); }); }; var isLeaf = function(node) { return node.id != "realm" && (!node.subGroups || node.subGroups.length == 0); }; $scope.getGroupClass = function(node) { if (node.id == "realm") { return 'pficon pficon-users'; } if (isLeaf(node)) { return 'normal'; } if (node.subGroups.length && node.collapsed) return 'collapsed'; if (node.subGroups.length && !node.collapsed) return 'expanded'; return 'collapsed'; } $scope.getSelectedClass = function(node) { if (node.selected) { return 'selected'; } else if ($scope.cutNode && $scope.cutNode.id == node.id) { return 'cut'; } return undefined; } }); module.controller('LDAPUserStorageCtrl', function($scope, $location, Notifications, $route, Dialog, realm, serverInfo, instance, Components, UserStorageOperations, RealmLDAPConnectionTester) { console.log('LDAPUserStorageCtrl'); var providerId = 'ldap'; console.log('providerId: ' + providerId); $scope.create = !instance.providerId; console.log('create: ' + $scope.create); var providers = serverInfo.componentTypes['org.keycloak.storage.UserStorageProvider']; console.log('providers length ' + providers.length); var providerFactory = null; for (var i = 0; i < providers.length; i++) { var p = providers[i]; console.log('provider: ' + p.id); if (p.id == providerId) { $scope.providerFactory = p; providerFactory = p; break; } } $scope.provider = instance; $scope.showSync = false; if (serverInfo.profileInfo.name == 'community') { $scope.ldapVendors = [ {"id": "ad", "name": "Active Directory"}, {"id": "rhds", "name": "Red Hat Directory Server"}, {"id": "tivoli", "name": "Tivoli"}, {"id": "edirectory", "name": "Novell eDirectory"}, {"id": "other", "name": "Other"} ]; } else { $scope.ldapVendors = [ {"id": "ad", "name": "Active Directory"}, {"id": "rhds", "name": "Red Hat Directory Server"} ]; } $scope.authTypes = [ { "id": "none", "name": "none" }, { "id": "simple", "name": "simple" } ]; $scope.searchScopes = [ { "id": "1", "name": "One Level" }, { "id": "2", "name": "Subtree" } ]; $scope.useTruststoreOptions = [ { "id": "always", "name": "Always" }, { "id": "ldapsOnly", "name": "Only for ldaps" }, { "id": "never", "name": "Never" } ]; var DEFAULT_BATCH_SIZE = "1000"; console.log("providerFactory: " + providerFactory.id); $scope.changed = false; function initUserStorageSettings() { if ($scope.create) { $scope.changed = true; instance.name = 'ldap'; instance.providerId = 'ldap'; instance.providerType = 'org.keycloak.storage.UserStorageProvider'; instance.parentId = realm.id; instance.config = { }; instance.config['enabled'] = ["true"]; instance.config['priority'] = ["0"]; $scope.fullSyncEnabled = false; $scope.changedSyncEnabled = false; instance.config['fullSyncPeriod'] = ['-1']; instance.config['changedSyncPeriod'] = ['-1']; instance.config['cachePolicy'] = ['DEFAULT']; instance.config['evictionDay'] = ['']; instance.config['evictionHour'] = ['']; instance.config['evictionMinute'] = ['']; instance.config['maxLifespan'] = ['']; instance.config['batchSizeForSync'] = [DEFAULT_BATCH_SIZE]; //instance.config['importEnabled'] = ['true']; if (providerFactory.properties) { for (var i = 0; i < providerFactory.properties.length; i++) { var configProperty = providerFactory.properties[i]; if (configProperty.defaultValue) { instance.config[configProperty.name] = [configProperty.defaultValue]; } else { instance.config[configProperty.name] = ['']; } } } } else { $scope.changed = false; $scope.fullSyncEnabled = (instance.config['fullSyncPeriod'] && instance.config['fullSyncPeriod'][0] > 0); $scope.changedSyncEnabled = (instance.config['changedSyncPeriod'] && instance.config['changedSyncPeriod'][0]> 0); if (!instance.config['fullSyncPeriod']) { console.log('setting to -1'); instance.config['fullSyncPeriod'] = ['-1']; } if (!instance.config['enabled']) { instance.config['enabled'] = ['true']; } if (!instance.config['changedSyncPeriod']) { console.log('setting to -1'); instance.config['changedSyncPeriod'] = ['-1']; } if (!instance.config['cachePolicy']) { instance.config['cachePolicy'] = ['DEFAULT']; } if (!instance.config['evictionDay']) { instance.config['evictionDay'] = ['']; } if (!instance.config['evictionHour']) { instance.config['evictionHour'] = ['']; } if (!instance.config['evictionMinute']) { instance.config['evictionMinute'] = ['']; } if (!instance.config['maxLifespan']) { instance.config['maxLifespan'] = ['']; } if (!instance.config['priority']) { instance.config['priority'] = ['0']; } if (!instance.config['importEnabled']) { instance.config['importEnabled'] = ['true']; } if (providerFactory.properties) { for (var i = 0; i < providerFactory.properties.length; i++) { var configProperty = providerFactory.properties[i]; if (!instance.config[configProperty.name]) { if (configProperty.defaultValue) { instance.config[configProperty.name] = [configProperty.defaultValue]; } else { instance.config[configProperty.name] = ['']; } } } } for (var i=0 ; i<$scope.ldapVendors.length ; i++) { if ($scope.ldapVendors[i].id === instance.config['vendor'][0]) { $scope.vendorName = $scope.ldapVendors[i].name; } }; } if (instance.config && instance.config['importEnabled']) { $scope.showSync = instance.config['importEnabled'][0] == 'true'; } else { $scope.showSync = true; } $scope.lastVendor = instance.config['vendor'][0]; } initUserStorageSettings(); $scope.instance = angular.copy(instance); $scope.realm = realm; $scope.$watch('instance', function() { if (!angular.equals($scope.instance, instance)) { $scope.changed = true; } if (!angular.equals($scope.instance.config['vendor'][0], $scope.lastVendor)) { console.log("LDAP vendor changed. Previous=" + $scope.lastVendor + " New=" + $scope.instance.config['vendor'][0]); $scope.lastVendor = $scope.instance.config['vendor'][0]; if ($scope.lastVendor === "ad") { $scope.instance.config['usernameLDAPAttribute'][0] = "cn"; $scope.instance.config['userObjectClasses'][0] = "person, organizationalPerson, user"; } else { $scope.instance.config['usernameLDAPAttribute'][0] = "uid"; $scope.instance.config['userObjectClasses'][0] = "inetOrgPerson, organizationalPerson"; } $scope.instance.config['rdnLDAPAttribute'][0] = $scope.instance.config['usernameLDAPAttribute'][0]; var vendorToUUID = { rhds: "nsuniqueid", tivoli: "uniqueidentifier", edirectory: "guid", ad: "objectGUID", other: "entryUUID" }; $scope.instance.config['uuidLDAPAttribute'][0] = vendorToUUID[$scope.lastVendor]; } }, true); $scope.$watch('fullSyncEnabled', function(newVal, oldVal) { if (oldVal == newVal) { return; } $scope.instance.config['fullSyncPeriod'][0] = $scope.fullSyncEnabled ? "604800" : "-1"; $scope.changed = true; }); $scope.$watch('changedSyncEnabled', function(newVal, oldVal) { if (oldVal == newVal) { return; } $scope.instance.config['changedSyncPeriod'][0] = $scope.changedSyncEnabled ? "86400" : "-1"; $scope.changed = true; }); $scope.save = function() { $scope.changed = false; if (!$scope.instance.config['batchSizeForSync'] || !parseInt($scope.instance.config['batchSizeForSync'][0])) { $scope.instance.config['batchSizeForSync'] = [ DEFAULT_BATCH_SIZE ]; } else { $scope.instance.config['batchSizeForSync'][0] = parseInt($scope.instance.config.batchSizeForSync).toString(); } if ($scope.create) { Components.save({realm: realm.realm}, $scope.instance, function (data, headers) { var l = headers().location; var id = l.substring(l.lastIndexOf("/") + 1); $location.url("/realms/" + realm.realm + "/user-storage/providers/" + $scope.instance.providerId + "/" + id); Notifications.success("The provider has been created."); }); } else { Components.update({realm: realm.realm, componentId: instance.id }, $scope.instance, function () { $route.reload(); Notifications.success("The provider has been updated."); }); } }; $scope.reset = function() { $route.reload(); }; $scope.cancel = function() { if ($scope.create) { $location.url("/realms/" + realm.realm + "/user-federation"); } else { $route.reload(); } }; $scope.triggerFullSync = function() { console.log('GenericCtrl: triggerFullSync'); triggerSync('triggerFullSync'); } $scope.triggerChangedUsersSync = function() { console.log('GenericCtrl: triggerChangedUsersSync'); triggerSync('triggerChangedUsersSync'); } function triggerSync(action) { UserStorageOperations.sync.save({ action: action, realm: $scope.realm.realm, componentId: $scope.instance.id }, {}, function(syncResult) { $route.reload(); Notifications.success("Sync of users finished successfully. " + syncResult.status); }, function() { $route.reload(); Notifications.error("Error during sync of users"); }); } $scope.removeImportedUsers = function() { UserStorageOperations.removeImportedUsers.save({ realm: $scope.realm.realm, componentId: $scope.instance.id }, {}, function(syncResult) { $route.reload(); Notifications.success("Remove imported users finished successfully. "); }, function() { $route.reload(); Notifications.error("Error during remove"); }); }; $scope.unlinkUsers = function() { UserStorageOperations.unlinkUsers.save({ realm: $scope.realm.realm, componentId: $scope.instance.id }, {}, function(syncResult) { $route.reload(); Notifications.success("Unlink of users finished successfully. "); }, function() { $route.reload(); Notifications.error("Error during unlink"); }); }; var initConnectionTest = function(testAction, ldapConfig) { return { action: testAction, realm: $scope.realm.realm, connectionUrl: ldapConfig.connectionUrl, bindDn: ldapConfig.bindDn, bindCredential: ldapConfig.bindCredential, useTruststoreSpi: ldapConfig.useTruststoreSpi, connectionTimeout: ldapConfig.connectionTimeout, componentId: instance.id }; }; $scope.testConnection = function() { console.log('LDAPCtrl: testConnection'); RealmLDAPConnectionTester.post(initConnectionTest("testConnection", $scope.instance.config), function() { Notifications.success("LDAP connection successful."); }, function() { Notifications.error("Error when trying to connect to LDAP. See server.log for details."); }); } $scope.testAuthentication = function() { console.log('LDAPCtrl: testAuthentication'); RealmLDAPConnectionTester.post(initConnectionTest("testAuthentication", $scope.instance.config), function() { Notifications.success("LDAP authentication successful."); }, function() { Notifications.error("LDAP authentication failed. See server.log for details"); }); } }); module.controller('LDAPTabCtrl', function(Dialog, $scope, Current, Notifications, $location) { $scope.removeUserFederation = function() { Dialog.confirmDelete($scope.instance.name, 'ldap provider', function() { $scope.instance.$remove({ realm : Current.realm.realm, componentId : $scope.instance.id }, function() { $location.url("/realms/" + Current.realm.realm + "/user-federation"); Notifications.success("The provider has been deleted."); }); }); }; }); module.controller('LDAPMapperListCtrl', function($scope, $location, Notifications, $route, Dialog, realm, provider, mappers) { console.log('LDAPMapperListCtrl'); $scope.realm = realm; $scope.provider = provider; $scope.instance = provider; $scope.mappers = mappers; }); module.controller('LDAPMapperCtrl', function($scope, $route, realm, provider, mapperTypes, mapper, clients, Components, LDAPMapperSync, Notifications, Dialog, $location) { console.log('LDAPMapperCtrl'); $scope.realm = realm; $scope.provider = provider; $scope.clients = clients; $scope.create = false; $scope.changed = false; for (var i = 0; i < mapperTypes.length; i++) { console.log('mapper.providerId: ' + mapper.providerId); console.log('mapperTypes[i].id ' + mapperTypes[i].id); if (mapperTypes[i].id == mapper.providerId) { $scope.mapperType = mapperTypes[i]; break; } } if ($scope.mapperType.properties) { for (var i = 0; i < $scope.mapperType.properties.length; i++) { var configProperty = $scope.mapperType.properties[i]; if (!mapper.config[configProperty.name]) { if (configProperty.defaultValue) { mapper.config[configProperty.name] = [configProperty.defaultValue]; } else { mapper.config[configProperty.name] = ['']; } } } } $scope.mapper = angular.copy(mapper); $scope.$watch('mapper', function() { if (!angular.equals($scope.mapper, mapper)) { $scope.changed = true; } }, true); $scope.save = function() { Components.update({realm: realm.realm, componentId: mapper.id }, $scope.mapper, function () { $route.reload(); Notifications.success("The mapper has been updated."); }); }; $scope.reset = function() { $scope.mapper = angular.copy(mapper); $scope.changed = false; }; $scope.remove = function() { Dialog.confirmDelete($scope.mapper.name, 'ldap mapper', function() { Components.remove({ realm : realm.realm, componentId : mapper.id }, function() { $location.url("/realms/" + realm.realm + '/ldap-mappers/' + provider.id); Notifications.success("The provider has been deleted."); }); }); }; $scope.triggerFedToKeycloakSync = function() { triggerMapperSync("fedToKeycloak") } $scope.triggerKeycloakToFedSync = function() { triggerMapperSync("keycloakToFed"); } function triggerMapperSync(direction) { LDAPMapperSync.save({ direction: direction, realm: realm.realm, parentId: provider.id, mapperId : $scope.mapper.id }, {}, function(syncResult) { Notifications.success("Data synced successfully. " + syncResult.status); }, function(error) { Notifications.error(error.data.errorMessage); }); } }); module.controller('LDAPMapperCreateCtrl', function($scope, realm, provider, mapperTypes, clients, Components, Notifications, Dialog, $location) { console.log('LDAPMapperCreateCtrl'); $scope.realm = realm; $scope.provider = provider; $scope.clients = clients; $scope.create = true; $scope.mapper = { config: {}}; $scope.mapperTypes = mapperTypes; $scope.mapperType = null; $scope.changed = true; $scope.$watch('mapperType', function() { if ($scope.mapperType != null) { $scope.mapper.config = {}; if ($scope.mapperType.properties) { for (var i = 0; i < $scope.mapperType.properties.length; i++) { var configProperty = $scope.mapperType.properties[i]; if (!$scope.mapper.config[configProperty.name]) { if (configProperty.defaultValue) { $scope.mapper.config[configProperty.name] = [configProperty.defaultValue]; } else { $scope.mapper.config[configProperty.name] = ['']; } } } } } }, true); $scope.save = function() { if ($scope.mapperType == null) { Notifications.error("You need to select mapper type!"); return; } $scope.mapper.providerId = $scope.mapperType.id; $scope.mapper.providerType = 'org.keycloak.storage.ldap.mappers.LDAPStorageMapper'; $scope.mapper.parentId = provider.id; Components.save({realm: realm.realm}, $scope.mapper, function (data, headers) { var l = headers().location; var id = l.substring(l.lastIndexOf("/") + 1); $location.url("/realms/" + realm.realm + "/ldap-mappers/" + $scope.mapper.parentId + "/mappers/" + id); Notifications.success("The mapper has been created."); }); }; $scope.reset = function() { $location.url("/realms/" + realm.realm + '/ldap-mappers/' + provider.id); }; });
/** * check if selenium response contains an element result * @param {object} result response object from the driver * @return {Boolean} returns * 0 if response was not an element result * 1 if response was a element result * 2 if response was an elements result */ function hasElementResult (result) { /** * check for element call */ if (result && result.value && result.value.ELEMENT) { return 1 } /** * check for elements call */ if (result && Array.isArray(result.value) && result.value.filter((r) => !r.ELEMENT).length === 0) { return 2 } return 0 } export default hasElementResult
import collections import hashlib import http.client import json import os import random import shutil import time import unittest import urllib from twisted.internet import reactor from hydrus.core import HydrusConstants as HC from hydrus.core import HydrusData from hydrus.core import HydrusExceptions from hydrus.core import HydrusGlobals as HG from hydrus.core import HydrusTags from hydrus.core import HydrusText from hydrus.client import ClientConstants as CC from hydrus.client import ClientAPI from hydrus.client import ClientManagers from hydrus.client import ClientSearch from hydrus.client import ClientServices from hydrus.client.importing import ClientImportFiles from hydrus.client.media import ClientMedia from hydrus.client.media import ClientMediaManagers from hydrus.client.media import ClientMediaResult from hydrus.client.metadata import ClientTags from hydrus.client.networking import ClientLocalServer from hydrus.client.networking import ClientLocalServerResources from hydrus.client.networking import ClientNetworkingContexts class TestClientAPI( unittest.TestCase ): @classmethod def setUpClass( cls ): cls._client_api = ClientServices.GenerateService( CC.CLIENT_API_SERVICE_KEY, HC.CLIENT_API_SERVICE, 'client api' ) cls._client_api_cors = ClientServices.GenerateService( CC.CLIENT_API_SERVICE_KEY, HC.CLIENT_API_SERVICE, 'client api' ) cls._client_api_cors._support_cors = True def TWISTEDSetup(): reactor.listenTCP( 45869, ClientLocalServer.HydrusServiceClientAPI( cls._client_api, allow_non_local_connections = False ) ) reactor.listenTCP( 45899, ClientLocalServer.HydrusServiceClientAPI( cls._client_api_cors, allow_non_local_connections = False ) ) reactor.callFromThread( TWISTEDSetup ) time.sleep( 1 ) def _compare_content_updates( self, service_keys_to_content_updates, expected_service_keys_to_content_updates ): self.assertEqual( len( service_keys_to_content_updates ), len( expected_service_keys_to_content_updates ) ) for ( service_key, content_updates ) in service_keys_to_content_updates.items(): expected_content_updates = expected_service_keys_to_content_updates[ service_key ] c_u_tuples = sorted( ( ( c_u.ToTuple(), c_u.GetReason() ) for c_u in content_updates ) ) e_c_u_tuples = sorted( ( ( e_c_u.ToTuple(), e_c_u.GetReason() ) for e_c_u in expected_content_updates ) ) self.assertEqual( c_u_tuples, e_c_u_tuples ) def _test_basics( self, connection ): # connection.request( 'GET', '/' ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) # with open( os.path.join( HC.STATIC_DIR, 'hydrus.ico' ), 'rb' ) as f: favicon = f.read() connection.request( 'GET', '/favicon.ico' ) response = connection.getresponse() data = response.read() self.assertEqual( data, favicon ) time.sleep( 3 ) def _test_client_api_basics( self, connection ): # /api_version connection.request( 'GET', '/api_version' ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) response_json = json.loads( text ) self.assertEqual( response_json[ 'version' ], HC.CLIENT_API_VERSION ) self.assertEqual( response_json[ 'hydrus_version' ], HC.SOFTWARE_VERSION ) # /request_new_permissions def format_request_new_permissions_query( name, basic_permissions ): return '/request_new_permissions?name={}&basic_permissions={}'.format( urllib.parse.quote( name ), urllib.parse.quote( json.dumps( basic_permissions ) ) ) # fail as dialog not open ClientAPI.api_request_dialog_open = False connection.request( 'GET', format_request_new_permissions_query( 'test', [ ClientAPI.CLIENT_API_PERMISSION_ADD_FILES ] ) ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 409 ) self.assertIn( 'dialog', text ) # success permissions_to_set_up = [] permissions_to_set_up.append( ( 'everything', list( ClientAPI.ALLOWED_PERMISSIONS ) ) ) permissions_to_set_up.append( ( 'add_files', [ ClientAPI.CLIENT_API_PERMISSION_ADD_FILES ] ) ) permissions_to_set_up.append( ( 'add_tags', [ ClientAPI.CLIENT_API_PERMISSION_ADD_TAGS ] ) ) permissions_to_set_up.append( ( 'add_urls', [ ClientAPI.CLIENT_API_PERMISSION_ADD_URLS ] ) ) permissions_to_set_up.append( ( 'manage_pages', [ ClientAPI.CLIENT_API_PERMISSION_MANAGE_PAGES ] ) ) permissions_to_set_up.append( ( 'manage_cookies', [ ClientAPI.CLIENT_API_PERMISSION_MANAGE_COOKIES ] ) ) permissions_to_set_up.append( ( 'search_all_files', [ ClientAPI.CLIENT_API_PERMISSION_SEARCH_FILES ] ) ) permissions_to_set_up.append( ( 'search_green_files', [ ClientAPI.CLIENT_API_PERMISSION_SEARCH_FILES ] ) ) set_up_permissions = {} for ( name, basic_permissions ) in permissions_to_set_up: ClientAPI.api_request_dialog_open = True connection.request( 'GET', format_request_new_permissions_query( name, basic_permissions ) ) response = connection.getresponse() data = response.read() ClientAPI.api_request_dialog_open = False response_text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) response_json = json.loads( response_text ) access_key_hex = response_json[ 'access_key' ] self.assertEqual( len( access_key_hex ), 64 ) access_key_hex = HydrusText.HexFilter( access_key_hex ) self.assertEqual( len( access_key_hex ), 64 ) api_permissions = ClientAPI.last_api_permissions_request if 'green' in name: search_tag_filter = HydrusTags.TagFilter() search_tag_filter.SetRule( '', HC.FILTER_BLACKLIST ) search_tag_filter.SetRule( ':', HC.FILTER_BLACKLIST ) search_tag_filter.SetRule( 'green', HC.FILTER_WHITELIST ) api_permissions.SetSearchTagFilter( search_tag_filter ) self.assertEqual( bytes.fromhex( access_key_hex ), api_permissions.GetAccessKey() ) set_up_permissions[ name ] = api_permissions HG.test_controller.client_api_manager.AddAccess( api_permissions ) # /verify_access_key # missing connection.request( 'GET', '/verify_access_key' ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 401 ) # fail header incorrect_headers = { 'Hydrus-Client-API-Access-Key' : 'abcd' } connection.request( 'GET', '/verify_access_key', headers = incorrect_headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 403 ) # fail get param connection.request( 'GET', '/verify_access_key?Hydrus-Client-API-Access-Key=abcd' ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 403 ) # success def do_good_verify_test( api_permissions, key_hex, key_name ): for request_type in ( 'header', 'get' ): if request_type == 'header': headers = { key_name : key_hex } connection.request( 'GET', '/verify_access_key', headers = headers ) elif request_type == 'get': connection.request( 'GET', '/verify_access_key?{}={}'.format( key_name, key_hex ) ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) body_dict = json.loads( text ) self.assertEqual( set( body_dict[ 'basic_permissions' ] ), set( api_permissions.GetBasicPermissions() ) ) self.assertEqual( body_dict[ 'human_description' ], api_permissions.ToHumanString() ) for api_permissions in set_up_permissions.values(): access_key_hex = api_permissions.GetAccessKey().hex() do_good_verify_test( api_permissions, access_key_hex, 'Hydrus-Client-API-Access-Key' ) # /session_key # fail header incorrect_headers = { 'Hydrus-Client-API-Session-Key' : 'abcd' } connection.request( 'GET', '/verify_access_key', headers = incorrect_headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 419 ) # fail get param connection.request( 'GET', '/verify_access_key?Hydrus-Client-API-Session-Key=abcd' ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 419 ) # success for api_permissions in set_up_permissions.values(): access_key_hex = api_permissions.GetAccessKey().hex() headers = { 'Hydrus-Client-API-Access-Key' : access_key_hex } connection.request( 'GET', '/session_key', headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) body_dict = json.loads( text ) self.assertEqual( response.status, 200 ) self.assertIn( 'session_key', body_dict ) session_key_hex = body_dict[ 'session_key' ] self.assertEqual( len( session_key_hex ), 64 ) do_good_verify_test( api_permissions, session_key_hex, 'Hydrus-Client-API-Session-Key' ) # test access in POST params # fail headers = { 'Content-Type' : HC.mime_mimetype_string_lookup[ HC.APPLICATION_JSON ] } hash = os.urandom( 32 ) hash_hex = hash.hex() HG.test_controller.ClearWrites( 'content_updates' ) path = '/add_tags/add_tags' body_dict = { 'Hydrus-Client-API-Access-Key' : 'abcd', 'hash' : hash_hex, 'service_names_to_tags' : { 'my tags' : [ 'test', 'test2' ] } } body = json.dumps( body_dict ) connection.request( 'POST', path, body = body, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 403 ) body_dict = { 'Hydrus-Client-API-Session-Key' : 'abcd', 'hash' : hash_hex, 'service_names_to_tags' : { 'my tags' : [ 'test', 'test2' ] } } body = json.dumps( body_dict ) connection.request( 'POST', path, body = body, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 419 ) # success api_permissions = set_up_permissions[ 'everything' ] access_key_hex = api_permissions.GetAccessKey().hex() headers = { 'Hydrus-Client-API-Access-Key' : access_key_hex } connection.request( 'GET', '/session_key', headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) body_dict = json.loads( text ) session_key_hex = body_dict[ 'session_key' ] headers = { 'Content-Type' : HC.mime_mimetype_string_lookup[ HC.APPLICATION_JSON ] } hash = os.urandom( 32 ) hash_hex = hash.hex() HG.test_controller.ClearWrites( 'content_updates' ) path = '/add_tags/add_tags' body_dict = { 'Hydrus-Client-API-Access-Key' : access_key_hex, 'hash' : hash_hex, 'service_names_to_tags' : { 'my tags' : [ 'test', 'test2' ] } } body = json.dumps( body_dict ) connection.request( 'POST', path, body = body, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) # HG.test_controller.ClearWrites( 'content_updates' ) body_dict = { 'Hydrus-Client-API-Session-Key' : session_key_hex, 'hash' : hash_hex, 'service_names_to_tags' : { 'my tags' : [ 'test', 'test2' ] } } body = json.dumps( body_dict ) connection.request( 'POST', path, body = body, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) [ ( ( service_keys_to_content_updates, ), kwargs ) ] = HG.test_controller.GetWrite( 'content_updates' ) self.assertIn( CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, service_keys_to_content_updates ) self.assertTrue( len( service_keys_to_content_updates[ CC.DEFAULT_LOCAL_TAG_SERVICE_KEY ] ) > 0 ) # HG.test_controller.ClearWrites( 'content_updates' ) body_dict = { 'Hydrus-Client-API-Session-Key' : session_key_hex, 'hash' : hash_hex, 'service_keys_to_tags' : { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY.hex() : [ 'test', 'test2' ] } } body = json.dumps( body_dict ) connection.request( 'POST', path, body = body, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) [ ( ( service_keys_to_content_updates, ), kwargs ) ] = HG.test_controller.GetWrite( 'content_updates' ) self.assertIn( CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, service_keys_to_content_updates ) self.assertTrue( len( service_keys_to_content_updates[ CC.DEFAULT_LOCAL_TAG_SERVICE_KEY ] ) > 0 ) # HG.test_controller.ClearWrites( 'content_updates' ) body_dict = { 'Hydrus-Client-API-Session-Key' : session_key_hex, 'hash' : hash_hex, 'service_keys_to_actions_to_tags' : { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY.hex() : { str( HC.CONTENT_UPDATE_ADD ) : [ 'test', 'test2' ] } } } body = json.dumps( body_dict ) connection.request( 'POST', path, body = body, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) [ ( ( service_keys_to_content_updates, ), kwargs ) ] = HG.test_controller.GetWrite( 'content_updates' ) self.assertIn( CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, service_keys_to_content_updates ) self.assertTrue( len( service_keys_to_content_updates[ CC.DEFAULT_LOCAL_TAG_SERVICE_KEY ] ) > 0 ) # return set_up_permissions def _test_cors_fails( self, connection ): connection.request( 'OPTIONS', '/api_version' ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) self.assertEqual( response.getheader( 'Allow' ), 'GET' ) # connection.request( 'OPTIONS', '/api_version', headers = { 'Origin' : 'muhsite.com' } ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 401 ) def _test_cors_succeeds( self, connection ): connection.request( 'OPTIONS', '/api_version' ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) self.assertEqual( response.getheader( 'Allow' ), 'GET' ) # connection.request( 'OPTIONS', '/api_version', headers = { 'Origin' : 'muhsite.com' } ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) self.assertEqual( response.getheader( 'Access-Control-Allow-Methods' ), 'GET' ) self.assertEqual( response.getheader( 'Access-Control-Allow-Headers' ), '*' ) self.assertEqual( response.getheader( 'Access-Control-Allow-Origin' ), '*' ) def _test_get_services( self, connection, set_up_permissions ): should_work = { set_up_permissions[ 'everything' ], set_up_permissions[ 'add_files' ], set_up_permissions[ 'add_tags' ], set_up_permissions[ 'manage_pages' ], set_up_permissions[ 'search_all_files' ], set_up_permissions[ 'search_green_files' ] } should_break = { set_up_permissions[ 'add_urls' ], set_up_permissions[ 'manage_cookies' ] } expected_answer = { 'local_tags': [ { 'name': 'my tags', 'service_key': '6c6f63616c2074616773' } ], 'tag_repositories': [ { 'name': 'example tag repo', 'service_key': HG.test_controller.example_tag_repo_service_key.hex() } ], 'local_files': [ { 'name': 'my files', 'service_key': '6c6f63616c2066696c6573' } ], 'file_repositories': [ ], 'all_local_files': [ { 'name': 'all local files', 'service_key': '616c6c206c6f63616c2066696c6573' } ], 'all_known_files': [ { 'name': 'all known files', 'service_key': '616c6c206b6e6f776e2066696c6573' } ], 'all_known_tags': [ { 'name': 'all known tags', 'service_key': '616c6c206b6e6f776e2074616773' } ], 'trash': [ { 'name': 'trash', 'service_key': '7472617368' } ] } for api_permissions in should_work.union( should_break ): access_key_hex = api_permissions.GetAccessKey().hex() headers = { 'Hydrus-Client-API-Access-Key' : access_key_hex } # path = '/get_services' connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() if api_permissions in should_work: text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) d = json.loads( text ) self.assertEqual( d, expected_answer ) else: self.assertEqual( response.status, 403 ) def _test_add_files_add_file( self, connection, set_up_permissions ): api_permissions = set_up_permissions[ 'add_files' ] access_key_hex = api_permissions.GetAccessKey().hex() # fail hash = bytes.fromhex( 'a593942cb7ea9ffcd8ccf2f0fa23c338e23bfecd9a3e508dfc0bcf07501ead08' ) f = ClientImportFiles.FileImportStatus.STATICGetUnknownStatus() f.hash = hash HG.test_controller.SetRead( 'hash_status', f ) headers = { 'Hydrus-Client-API-Access-Key' : access_key_hex, 'Content-Type' : HC.mime_mimetype_string_lookup[ HC.APPLICATION_OCTET_STREAM ] } path = '/add_files/add_file' body = b'blarg' connection.request( 'POST', path, body = body, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) response_json = json.loads( text ) self.assertEqual( response_json[ 'status' ], CC.STATUS_ERROR ) self.assertEqual( response_json[ 'hash' ], hash.hex() ) self.assertIn( 'Traceback', response_json[ 'note' ] ) # success as body hash = b'\xadm5\x99\xa6\xc4\x89\xa5u\xeb\x19\xc0&\xfa\xce\x97\xa9\xcdey\xe7G(\xb0\xce\x94\xa6\x01\xd22\xf3\xc3' f = ClientImportFiles.FileImportStatus.STATICGetUnknownStatus() f.hash = hash f.note = 'test note' HG.test_controller.SetRead( 'hash_status', f ) hydrus_png_path = os.path.join( HC.STATIC_DIR, 'hydrus.png' ) with open( hydrus_png_path, 'rb' ) as f: HYDRUS_PNG_BYTES = f.read() headers = { 'Hydrus-Client-API-Access-Key' : access_key_hex, 'Content-Type' : HC.mime_mimetype_string_lookup[ HC.APPLICATION_OCTET_STREAM ] } path = '/add_files/add_file' body = HYDRUS_PNG_BYTES connection.request( 'POST', path, body = body, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) response_json = json.loads( text ) expected_result = { 'status' : CC.STATUS_SUCCESSFUL_AND_NEW, 'hash' : hash.hex() , 'note' : 'test note' } self.assertEqual( response_json, expected_result ) # do hydrus png as path f = ClientImportFiles.FileImportStatus.STATICGetUnknownStatus() f.hash = hash f.note = 'test note' HG.test_controller.SetRead( 'hash_status', f ) headers = { 'Hydrus-Client-API-Access-Key' : access_key_hex, 'Content-Type' : HC.mime_mimetype_string_lookup[ HC.APPLICATION_JSON ] } path = '/add_files/add_file' body_dict = { 'path' : hydrus_png_path } body = json.dumps( body_dict ) connection.request( 'POST', path, body = body, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) response_json = json.loads( text ) expected_result = { 'status' : CC.STATUS_SUCCESSFUL_AND_NEW, 'hash' : hash.hex() , 'note' : 'test note' } self.assertEqual( response_json, expected_result ) def _test_add_files_other_actions( self, connection, set_up_permissions ): api_permissions = set_up_permissions[ 'add_files' ] access_key_hex = api_permissions.GetAccessKey().hex() headers = { 'Hydrus-Client-API-Access-Key' : access_key_hex, 'Content-Type' : HC.mime_mimetype_string_lookup[ HC.APPLICATION_JSON ] } # hash = HydrusData.GenerateKey() hashes = { HydrusData.GenerateKey() for i in range( 10 ) } # HG.test_controller.ClearWrites( 'content_updates' ) path = '/add_files/delete_files' body_dict = { 'hash' : hash.hex() } body = json.dumps( body_dict ) connection.request( 'POST', path, body = body, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) [ ( ( service_keys_to_content_updates, ), kwargs ) ] = HG.test_controller.GetWrite( 'content_updates' ) expected_service_keys_to_content_updates = { CC.LOCAL_FILE_SERVICE_KEY : [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, { hash } ) ] } self._compare_content_updates( service_keys_to_content_updates, expected_service_keys_to_content_updates ) # HG.test_controller.ClearWrites( 'content_updates' ) path = '/add_files/delete_files' body_dict = { 'hashes' : [ h.hex() for h in hashes ] } body = json.dumps( body_dict ) connection.request( 'POST', path, body = body, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) [ ( ( service_keys_to_content_updates, ), kwargs ) ] = HG.test_controller.GetWrite( 'content_updates' ) expected_service_keys_to_content_updates = { CC.LOCAL_FILE_SERVICE_KEY : [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, hashes ) ] } self._compare_content_updates( service_keys_to_content_updates, expected_service_keys_to_content_updates ) # HG.test_controller.ClearWrites( 'content_updates' ) path = '/add_files/undelete_files' body_dict = { 'hash' : hash.hex() } body = json.dumps( body_dict ) connection.request( 'POST', path, body = body, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) [ ( ( service_keys_to_content_updates, ), kwargs ) ] = HG.test_controller.GetWrite( 'content_updates' ) expected_service_keys_to_content_updates = { CC.LOCAL_FILE_SERVICE_KEY : [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_UNDELETE, { hash } ) ] } self._compare_content_updates( service_keys_to_content_updates, expected_service_keys_to_content_updates ) # HG.test_controller.ClearWrites( 'content_updates' ) path = '/add_files/undelete_files' body_dict = { 'hashes' : [ h.hex() for h in hashes ] } body = json.dumps( body_dict ) connection.request( 'POST', path, body = body, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) [ ( ( service_keys_to_content_updates, ), kwargs ) ] = HG.test_controller.GetWrite( 'content_updates' ) expected_service_keys_to_content_updates = { CC.LOCAL_FILE_SERVICE_KEY : [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_UNDELETE, hashes ) ] } self._compare_content_updates( service_keys_to_content_updates, expected_service_keys_to_content_updates ) # HG.test_controller.ClearWrites( 'content_updates' ) path = '/add_files/archive_files' body_dict = { 'hash' : hash.hex() } body = json.dumps( body_dict ) connection.request( 'POST', path, body = body, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) [ ( ( service_keys_to_content_updates, ), kwargs ) ] = HG.test_controller.GetWrite( 'content_updates' ) expected_service_keys_to_content_updates = { CC.COMBINED_LOCAL_FILE_SERVICE_KEY : [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_ARCHIVE, { hash } ) ] } self._compare_content_updates( service_keys_to_content_updates, expected_service_keys_to_content_updates ) # HG.test_controller.ClearWrites( 'content_updates' ) path = '/add_files/archive_files' body_dict = { 'hashes' : [ h.hex() for h in hashes ] } body = json.dumps( body_dict ) connection.request( 'POST', path, body = body, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) [ ( ( service_keys_to_content_updates, ), kwargs ) ] = HG.test_controller.GetWrite( 'content_updates' ) expected_service_keys_to_content_updates = { CC.COMBINED_LOCAL_FILE_SERVICE_KEY : [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_ARCHIVE, hashes ) ] } self._compare_content_updates( service_keys_to_content_updates, expected_service_keys_to_content_updates ) # HG.test_controller.ClearWrites( 'content_updates' ) path = '/add_files/unarchive_files' body_dict = { 'hash' : hash.hex() } body = json.dumps( body_dict ) connection.request( 'POST', path, body = body, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) [ ( ( service_keys_to_content_updates, ), kwargs ) ] = HG.test_controller.GetWrite( 'content_updates' ) expected_service_keys_to_content_updates = { CC.COMBINED_LOCAL_FILE_SERVICE_KEY : [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_INBOX, { hash } ) ] } self._compare_content_updates( service_keys_to_content_updates, expected_service_keys_to_content_updates ) # HG.test_controller.ClearWrites( 'content_updates' ) path = '/add_files/unarchive_files' body_dict = { 'hashes' : [ h.hex() for h in hashes ] } body = json.dumps( body_dict ) connection.request( 'POST', path, body = body, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) [ ( ( service_keys_to_content_updates, ), kwargs ) ] = HG.test_controller.GetWrite( 'content_updates' ) expected_service_keys_to_content_updates = { CC.COMBINED_LOCAL_FILE_SERVICE_KEY : [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_INBOX, hashes ) ] } self._compare_content_updates( service_keys_to_content_updates, expected_service_keys_to_content_updates ) def _test_add_tags( self, connection, set_up_permissions ): api_permissions = set_up_permissions[ 'everything' ] access_key_hex = api_permissions.GetAccessKey().hex() headers = { 'Hydrus-Client-API-Access-Key' : access_key_hex } # clean tags tags = [ " bikini ", "blue eyes", " character : samus aran ", ":)", " ", "", "10", "11", "9", "system:wew", "-flower" ] json_tags = json.dumps( tags ) path = '/add_tags/clean_tags?tags={}'.format( urllib.parse.quote( json_tags, safe = '' ) ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) d = json.loads( text ) expected_answer = {} clean_tags = [ "bikini", "blue eyes", "character:samus aran", "::)", "10", "11", "9", "wew", "flower" ] clean_tags = HydrusTags.SortNumericTags( clean_tags ) expected_answer[ 'tags' ] = clean_tags self.assertEqual( d, expected_answer ) # add tags headers = { 'Hydrus-Client-API-Access-Key' : access_key_hex, 'Content-Type' : HC.mime_mimetype_string_lookup[ HC.APPLICATION_JSON ] } hash = os.urandom( 32 ) hash_hex = hash.hex() hash2 = os.urandom( 32 ) hash2_hex = hash2.hex() # missing hashes path = '/add_tags/add_tags' body_dict = { 'service_names_to_tags' : { 'my tags' : [ 'test' ] } } body = json.dumps( body_dict ) connection.request( 'POST', path, body = body, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 400 ) # invalid service key path = '/add_tags/add_tags' body_dict = { 'hash' : hash_hex, 'service_names_to_tags' : { 'bad tag service' : [ 'test' ] } } body = json.dumps( body_dict ) connection.request( 'POST', path, body = body, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 400 ) # add tags to local HG.test_controller.ClearWrites( 'content_updates' ) path = '/add_tags/add_tags' body_dict = { 'hash' : hash_hex, 'service_names_to_tags' : { 'my tags' : [ 'test', 'test2' ] } } body = json.dumps( body_dict ) connection.request( 'POST', path, body = body, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) expected_service_keys_to_content_updates = collections.defaultdict( list ) expected_service_keys_to_content_updates[ CC.DEFAULT_LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'test', set( [ hash ] ) ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'test2', set( [ hash ] ) ) ) ] [ ( ( service_keys_to_content_updates, ), kwargs ) ] = HG.test_controller.GetWrite( 'content_updates' ) self._compare_content_updates( service_keys_to_content_updates, expected_service_keys_to_content_updates ) # add tags to local complex HG.test_controller.ClearWrites( 'content_updates' ) path = '/add_tags/add_tags' body_dict = { 'hash' : hash_hex, 'service_names_to_actions_to_tags' : { 'my tags' : { str( HC.CONTENT_UPDATE_ADD ) : [ 'test_add', 'test_add2' ], str( HC.CONTENT_UPDATE_DELETE ) : [ 'test_delete', 'test_delete2' ] } } } body = json.dumps( body_dict ) connection.request( 'POST', path, body = body, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) expected_service_keys_to_content_updates = collections.defaultdict( list ) expected_service_keys_to_content_updates[ CC.DEFAULT_LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'test_add', set( [ hash ] ) ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'test_add2', set( [ hash ] ) ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ( 'test_delete', set( [ hash ] ) ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ( 'test_delete2', set( [ hash ] ) ) ) ] [ ( ( service_keys_to_content_updates, ), kwargs ) ] = HG.test_controller.GetWrite( 'content_updates' ) self._compare_content_updates( service_keys_to_content_updates, expected_service_keys_to_content_updates ) # pend tags to repo HG.test_controller.ClearWrites( 'content_updates' ) path = '/add_tags/add_tags' body_dict = { 'hash' : hash_hex, 'service_names_to_tags' : { 'example tag repo' : [ 'test', 'test2' ] } } body = json.dumps( body_dict ) connection.request( 'POST', path, body = body, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) expected_service_keys_to_content_updates = collections.defaultdict( list ) expected_service_keys_to_content_updates[ HG.test_controller.example_tag_repo_service_key ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_PEND, ( 'test', set( [ hash ] ) ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_PEND, ( 'test2', set( [ hash ] ) ) ) ] [ ( ( service_keys_to_content_updates, ), kwargs ) ] = HG.test_controller.GetWrite( 'content_updates' ) self._compare_content_updates( service_keys_to_content_updates, expected_service_keys_to_content_updates ) # pend tags to repo complex HG.test_controller.ClearWrites( 'content_updates' ) path = '/add_tags/add_tags' body_dict = { 'hash' : hash_hex, 'service_names_to_actions_to_tags' : { 'example tag repo' : { str( HC.CONTENT_UPDATE_PEND ) : [ 'test_add', 'test_add2' ], str( HC.CONTENT_UPDATE_PETITION ) : [ [ 'test_delete', 'muh reason' ], 'test_delete2' ] } } } body = json.dumps( body_dict ) connection.request( 'POST', path, body = body, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) expected_service_keys_to_content_updates = collections.defaultdict( list ) expected_service_keys_to_content_updates[ HG.test_controller.example_tag_repo_service_key ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_PEND, ( 'test_add', set( [ hash ] ) ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_PEND, ( 'test_add2', set( [ hash ] ) ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_PETITION, ( 'test_delete', set( [ hash ] ) ), reason = 'muh reason' ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_PETITION, ( 'test_delete2', set( [ hash ] ) ), reason = 'Petitioned from API' ) ] [ ( ( service_keys_to_content_updates, ), kwargs ) ] = HG.test_controller.GetWrite( 'content_updates' ) self._compare_content_updates( service_keys_to_content_updates, expected_service_keys_to_content_updates ) # add to multiple files HG.test_controller.ClearWrites( 'content_updates' ) path = '/add_tags/add_tags' body_dict = { 'hashes' : [ hash_hex, hash2_hex ], 'service_names_to_tags' : { 'my tags' : [ 'test', 'test2' ] } } body = json.dumps( body_dict ) connection.request( 'POST', path, body = body, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) expected_service_keys_to_content_updates = collections.defaultdict( list ) expected_service_keys_to_content_updates[ CC.DEFAULT_LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'test', set( [ hash, hash2 ] ) ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'test2', set( [ hash, hash2 ] ) ) ) ] [ ( ( service_keys_to_content_updates, ), kwargs ) ] = HG.test_controller.GetWrite( 'content_updates' ) self._compare_content_updates( service_keys_to_content_updates, expected_service_keys_to_content_updates ) def _test_add_urls( self, connection, set_up_permissions ): # get url files api_permissions = set_up_permissions[ 'everything' ] access_key_hex = api_permissions.GetAccessKey().hex() headers = { 'Hydrus-Client-API-Access-Key' : access_key_hex } # none url = 'https://muhsite.wew/help_compute' HG.test_controller.SetRead( 'url_statuses', [] ) path = '/add_urls/get_url_files?url={}'.format( urllib.parse.quote( url, safe = '' ) ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) d = json.loads( text ) expected_answer = {} expected_answer[ 'normalised_url' ] = url expected_answer[ 'url_file_statuses' ] = [] self.assertEqual( d, expected_answer ) # some url = 'http://safebooru.org/index.php?s=view&page=post&id=2753608' normalised_url = 'https://safebooru.org/index.php?id=2753608&page=post&s=view' hash = os.urandom( 32 ) url_file_statuses = [ ClientImportFiles.FileImportStatus( CC.STATUS_SUCCESSFUL_BUT_REDUNDANT, hash, note = 'muh import phrase' ) ] json_url_file_statuses = [ { 'status' : CC.STATUS_SUCCESSFUL_BUT_REDUNDANT, 'hash' : hash.hex(), 'note' : 'muh import phrase' } ] HG.test_controller.SetRead( 'url_statuses', url_file_statuses ) path = '/add_urls/get_url_files?url={}'.format( urllib.parse.quote( url, safe = '' ) ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) d = json.loads( text ) expected_answer = {} expected_answer[ 'normalised_url' ] = normalised_url expected_answer[ 'url_file_statuses' ] = json_url_file_statuses self.assertEqual( d, expected_answer ) # get url info api_permissions = set_up_permissions[ 'everything' ] access_key_hex = api_permissions.GetAccessKey().hex() headers = { 'Hydrus-Client-API-Access-Key' : access_key_hex } # unknown url = 'https://muhsite.wew/help_compute' path = '/add_urls/get_url_info?url={}'.format( urllib.parse.quote( url, safe = '' ) ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) d = json.loads( text ) expected_answer = {} expected_answer[ 'normalised_url' ] = url expected_answer[ 'url_type' ] = HC.URL_TYPE_UNKNOWN expected_answer[ 'url_type_string' ] = 'unknown url' expected_answer[ 'match_name' ] = 'unknown url' expected_answer[ 'can_parse' ] = False expected_answer[ 'cannot_parse_reason' ] = 'unknown url class' self.assertEqual( d, expected_answer ) # known url = 'http://8ch.net/tv/res/1846574.html' normalised_url = 'https://8ch.net/tv/res/1846574.html' # http so we can test normalised is https path = '/add_urls/get_url_info?url={}'.format( urllib.parse.quote( url, safe = '' ) ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) d = json.loads( text ) expected_answer = {} expected_answer[ 'normalised_url' ] = normalised_url expected_answer[ 'url_type' ] = HC.URL_TYPE_WATCHABLE expected_answer[ 'url_type_string' ] = 'watchable url' expected_answer[ 'match_name' ] = '8chan thread' expected_answer[ 'can_parse' ] = True self.assertEqual( d, expected_answer ) # known post url url = 'http://safebooru.org/index.php?page=post&s=view&id=2753608' normalised_url = 'https://safebooru.org/index.php?id=2753608&page=post&s=view' hash = os.urandom( 32 ) path = '/add_urls/get_url_info?url={}'.format( urllib.parse.quote( url, safe = '' ) ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) d = json.loads( text ) expected_answer = {} expected_answer[ 'normalised_url' ] = normalised_url expected_answer[ 'url_type' ] = HC.URL_TYPE_POST expected_answer[ 'url_type_string' ] = 'post url' expected_answer[ 'match_name' ] = 'safebooru file page' expected_answer[ 'can_parse' ] = True self.assertEqual( d, expected_answer ) # add url HG.test_controller.ClearWrites( 'import_url_test' ) headers = { 'Hydrus-Client-API-Access-Key' : access_key_hex, 'Content-Type' : HC.mime_mimetype_string_lookup[ HC.APPLICATION_JSON ] } url = 'http://8ch.net/tv/res/1846574.html' request_dict = { 'url' : url } request_body = json.dumps( request_dict ) connection.request( 'POST', '/add_urls/add_url', body = request_body, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) response_json = json.loads( text ) self.assertEqual( response_json[ 'human_result_text' ], '"https://8ch.net/tv/res/1846574.html" URL added successfully.' ) self.assertEqual( response_json[ 'normalised_url' ], 'https://8ch.net/tv/res/1846574.html' ) self.assertEqual( HG.test_controller.GetWrite( 'import_url_test' ), [ ( ( url, set(), ClientTags.ServiceKeysToTags(), None, None, False ), {} ) ] ) # with name HG.test_controller.ClearWrites( 'import_url_test' ) request_dict = { 'url' : url, 'destination_page_name' : 'muh /tv/' } request_body = json.dumps( request_dict ) connection.request( 'POST', '/add_urls/add_url', body = request_body, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) response_json = json.loads( text ) self.assertEqual( response_json[ 'human_result_text' ], '"https://8ch.net/tv/res/1846574.html" URL added successfully.' ) self.assertEqual( response_json[ 'normalised_url' ], 'https://8ch.net/tv/res/1846574.html' ) self.assertEqual( HG.test_controller.GetWrite( 'import_url_test' ), [ ( ( url, set(), ClientTags.ServiceKeysToTags(), 'muh /tv/', None, False ), {} ) ] ) # with page_key HG.test_controller.ClearWrites( 'import_url_test' ) page_key = os.urandom( 32 ) page_key_hex = page_key.hex() request_dict = { 'url' : url, 'destination_page_key' : page_key_hex } request_body = json.dumps( request_dict ) connection.request( 'POST', '/add_urls/add_url', body = request_body, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) response_json = json.loads( text ) self.assertEqual( response_json[ 'human_result_text' ], '"https://8ch.net/tv/res/1846574.html" URL added successfully.' ) self.assertEqual( response_json[ 'normalised_url' ], 'https://8ch.net/tv/res/1846574.html' ) self.assertEqual( HG.test_controller.GetWrite( 'import_url_test' ), [ ( ( url, set(), ClientTags.ServiceKeysToTags(), None, page_key, False ), {} ) ] ) # add tags and name, and show destination page HG.test_controller.ClearWrites( 'import_url_test' ) request_dict = { 'url' : url, 'destination_page_name' : 'muh /tv/', 'show_destination_page' : True, 'filterable_tags' : [ 'filename:yo' ], 'service_names_to_additional_tags' : { 'my tags' : [ '/tv/ thread' ] } } request_body = json.dumps( request_dict ) connection.request( 'POST', '/add_urls/add_url', body = request_body, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) response_json = json.loads( text ) self.assertEqual( response_json[ 'human_result_text' ], '"https://8ch.net/tv/res/1846574.html" URL added successfully.' ) self.assertEqual( response_json[ 'normalised_url' ], 'https://8ch.net/tv/res/1846574.html' ) filterable_tags = [ 'filename:yo' ] additional_service_keys_to_tags = ClientTags.ServiceKeysToTags( { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY : set( [ '/tv/ thread' ] ) } ) self.assertEqual( HG.test_controller.GetWrite( 'import_url_test' ), [ ( ( url, set( filterable_tags ), additional_service_keys_to_tags, 'muh /tv/', None, True ), {} ) ] ) # add tags with service key and name, and show destination page HG.test_controller.ClearWrites( 'import_url_test' ) request_dict = { 'url' : url, 'destination_page_name' : 'muh /tv/', 'show_destination_page' : True, 'filterable_tags' : [ 'filename:yo' ], 'service_keys_to_additional_tags' : { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY.hex() : [ '/tv/ thread' ] } } request_body = json.dumps( request_dict ) connection.request( 'POST', '/add_urls/add_url', body = request_body, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) response_json = json.loads( text ) self.assertEqual( response_json[ 'human_result_text' ], '"https://8ch.net/tv/res/1846574.html" URL added successfully.' ) self.assertEqual( response_json[ 'normalised_url' ], 'https://8ch.net/tv/res/1846574.html' ) filterable_tags = [ 'filename:yo' ] additional_service_keys_to_tags = ClientTags.ServiceKeysToTags( { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY : set( [ '/tv/ thread' ] ) } ) self.assertEqual( HG.test_controller.GetWrite( 'import_url_test' ), [ ( ( url, set( filterable_tags ), additional_service_keys_to_tags, 'muh /tv/', None, True ), {} ) ] ) # associate url HG.test_controller.ClearWrites( 'content_updates' ) hash = bytes.fromhex( '3b820114f658d768550e4e3d4f1dced3ff8db77443472b5ad93700647ad2d3ba' ) url = 'https://rule34.xxx/index.php?id=2588418&page=post&s=view' request_dict = { 'url_to_add' : url, 'hash' : hash.hex() } request_body = json.dumps( request_dict ) connection.request( 'POST', '/add_urls/associate_url', body = request_body, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) expected_service_keys_to_content_updates = collections.defaultdict( list ) expected_service_keys_to_content_updates[ CC.COMBINED_LOCAL_FILE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_URLS, HC.CONTENT_UPDATE_ADD, ( [ url ], [ hash ] ) ) ] expected_result = [ ( ( expected_service_keys_to_content_updates, ), {} ) ] result = HG.test_controller.GetWrite( 'content_updates' ) self.assertEqual( result, expected_result ) # HG.test_controller.ClearWrites( 'content_updates' ) hash = bytes.fromhex( '3b820114f658d768550e4e3d4f1dced3ff8db77443472b5ad93700647ad2d3ba' ) url = 'https://rule34.xxx/index.php?id=2588418&page=post&s=view' request_dict = { 'urls_to_add' : [ url ], 'hashes' : [ hash.hex() ] } request_body = json.dumps( request_dict ) connection.request( 'POST', '/add_urls/associate_url', body = request_body, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) expected_service_keys_to_content_updates = collections.defaultdict( list ) expected_service_keys_to_content_updates[ CC.COMBINED_LOCAL_FILE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_URLS, HC.CONTENT_UPDATE_ADD, ( [ url ], [ hash ] ) ) ] expected_result = [ ( ( expected_service_keys_to_content_updates, ), {} ) ] result = HG.test_controller.GetWrite( 'content_updates' ) self.assertEqual( result, expected_result ) # HG.test_controller.ClearWrites( 'content_updates' ) hash = bytes.fromhex( '3b820114f658d768550e4e3d4f1dced3ff8db77443472b5ad93700647ad2d3ba' ) url = 'http://rule34.xxx/index.php?id=2588418&page=post&s=view' request_dict = { 'url_to_delete' : url, 'hash' : hash.hex() } request_body = json.dumps( request_dict ) connection.request( 'POST', '/add_urls/associate_url', body = request_body, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) expected_service_keys_to_content_updates = collections.defaultdict( list ) expected_service_keys_to_content_updates[ CC.COMBINED_LOCAL_FILE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_URLS, HC.CONTENT_UPDATE_DELETE, ( [ url ], [ hash ] ) ) ] expected_result = [ ( ( expected_service_keys_to_content_updates, ), {} ) ] result = HG.test_controller.GetWrite( 'content_updates' ) self.assertEqual( result, expected_result ) # HG.test_controller.ClearWrites( 'content_updates' ) hash = bytes.fromhex( '3b820114f658d768550e4e3d4f1dced3ff8db77443472b5ad93700647ad2d3ba' ) url = 'http://rule34.xxx/index.php?id=2588418&page=post&s=view' request_dict = { 'urls_to_delete' : [ url ], 'hashes' : [ hash.hex() ] } request_body = json.dumps( request_dict ) connection.request( 'POST', '/add_urls/associate_url', body = request_body, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) expected_service_keys_to_content_updates = collections.defaultdict( list ) expected_service_keys_to_content_updates[ CC.COMBINED_LOCAL_FILE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_URLS, HC.CONTENT_UPDATE_DELETE, ( [ url ], [ hash ] ) ) ] expected_result = [ ( ( expected_service_keys_to_content_updates, ), {} ) ] result = HG.test_controller.GetWrite( 'content_updates' ) self.assertEqual( result, expected_result ) def _test_manage_cookies( self, connection, set_up_permissions ): api_permissions = set_up_permissions[ 'manage_cookies' ] access_key_hex = api_permissions.GetAccessKey().hex() headers = { 'Hydrus-Client-API-Access-Key' : access_key_hex } # path = '/manage_cookies/get_cookies?domain=somesite.com' connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) d = json.loads( text ) cookies = d[ 'cookies' ] self.assertEqual( cookies, [] ) # headers = { 'Hydrus-Client-API-Access-Key' : access_key_hex, 'Content-Type' : HC.mime_mimetype_string_lookup[ HC.APPLICATION_JSON ] } path = '/manage_cookies/set_cookies' cookies = [] cookies.append( [ 'one', '1', '.somesite.com', '/', HydrusData.GetNow() + 86400 ] ) cookies.append( [ 'two', '2', 'somesite.com', '/', HydrusData.GetNow() + 86400 ] ) cookies.append( [ 'three', '3', 'wew.somesite.com', '/', HydrusData.GetNow() + 86400 ] ) cookies.append( [ 'four', '4', '.somesite.com', '/', None ] ) request_dict = { 'cookies' : cookies } request_body = json.dumps( request_dict ) connection.request( 'POST', path, body = request_body, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) path = '/manage_cookies/get_cookies?domain=somesite.com' connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) d = json.loads( text ) result_cookies = d[ 'cookies' ] frozen_result_cookies = { tuple( row ) for row in result_cookies } frozen_expected_cookies = { tuple( row ) for row in cookies } self.assertEqual( frozen_result_cookies, frozen_expected_cookies ) # headers = { 'Hydrus-Client-API-Access-Key' : access_key_hex, 'Content-Type' : HC.mime_mimetype_string_lookup[ HC.APPLICATION_JSON ] } path = '/manage_cookies/set_cookies' cookies = [] cookies.append( [ 'one', None, '.somesite.com', '/', None ] ) request_dict = { 'cookies' : cookies } request_body = json.dumps( request_dict ) connection.request( 'POST', path, body = request_body, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) path = '/manage_cookies/get_cookies?domain=somesite.com' connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) d = json.loads( text ) result_cookies = d[ 'cookies' ] expected_cookies = [] expected_cookies.append( [ 'two', '2', 'somesite.com', '/', HydrusData.GetNow() + 86400 ] ) expected_cookies.append( [ 'three', '3', 'wew.somesite.com', '/', HydrusData.GetNow() + 86400 ] ) expected_cookies.append( [ 'four', '4', '.somesite.com', '/', None ] ) frozen_result_cookies = { tuple( row ) for row in result_cookies } frozen_expected_cookies = { tuple( row ) for row in expected_cookies } self.assertEqual( frozen_result_cookies, frozen_expected_cookies ) # headers = { 'Hydrus-Client-API-Access-Key' : access_key_hex, 'Content-Type' : HC.mime_mimetype_string_lookup[ HC.APPLICATION_JSON ] } path = '/manage_headers/set_user_agent' new_user_agent = 'muh user agent' request_dict = { 'user-agent' : new_user_agent } request_body = json.dumps( request_dict ) connection.request( 'POST', path, body = request_body, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) current_headers = HG.test_controller.network_engine.domain_manager.GetHeaders( [ ClientNetworkingContexts.GLOBAL_NETWORK_CONTEXT ] ) self.assertEqual( current_headers[ 'User-Agent' ], new_user_agent ) # request_dict = { 'user-agent' : '' } request_body = json.dumps( request_dict ) connection.request( 'POST', path, body = request_body, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) current_headers = HG.test_controller.network_engine.domain_manager.GetHeaders( [ ClientNetworkingContexts.GLOBAL_NETWORK_CONTEXT ] ) from hydrus.client import ClientDefaults self.assertEqual( current_headers[ 'User-Agent' ], ClientDefaults.DEFAULT_USER_AGENT ) def _test_manage_database( self, connection, set_up_permissions ): api_permissions = set_up_permissions[ 'everything' ] access_key_hex = api_permissions.GetAccessKey().hex() headers = { 'Hydrus-Client-API-Access-Key' : access_key_hex } # self.assertFalse( HG.client_busy.locked() ) path = '/manage_database/lock_on' connection.request( 'POST', path, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) self.assertTrue( HG.client_busy.locked() ) # path = '/manage_pages/get_pages' connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 503 ) # path = '/manage_database/lock_off' connection.request( 'POST', path, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) self.assertFalse( HG.client_busy.locked() ) # expected_data = { 'hell forever' : 666 } HG.test_controller.SetRead( 'boned_stats', expected_data ) path = '/manage_database/mr_bones' connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) d = json.loads( text ) boned_stats = d[ 'boned_stats' ] self.assertEqual( boned_stats, dict( expected_data ) ) def _test_manage_pages( self, connection, set_up_permissions ): api_permissions = set_up_permissions[ 'manage_pages' ] access_key_hex = api_permissions.GetAccessKey().hex() headers = { 'Hydrus-Client-API-Access-Key' : access_key_hex } # path = '/manage_pages/get_pages' connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) d = json.loads( text ) pages = d[ 'pages' ] self.assertEqual( pages[ 'name' ], 'top pages notebook' ) # headers = { 'Hydrus-Client-API-Access-Key' : access_key_hex, 'Content-Type' : HC.mime_mimetype_string_lookup[ HC.APPLICATION_JSON ] } path = '/manage_pages/focus_page' page_key = os.urandom( 32 ) request_dict = { 'page_key' : page_key.hex() } request_body = json.dumps( request_dict ) connection.request( 'POST', path, body = request_body, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) result = HG.test_controller.GetWrite( 'show_page' ) expected_result = [ ( ( page_key, ), {} ) ] self.assertEqual( result, expected_result ) def _test_search_files( self, connection, set_up_permissions ): hash_ids = [ 1, 2, 3, 4, 5, 10, 15, 16, 17, 18, 19, 20, 21, 25, 100, 101, 150 ] # search files failed tag permission api_permissions = set_up_permissions[ 'search_green_files' ] access_key_hex = api_permissions.GetAccessKey().hex() headers = { 'Hydrus-Client-API-Access-Key' : access_key_hex } # sample_hash_ids = set( random.sample( hash_ids, 3 ) ) HG.test_controller.SetRead( 'file_query_ids', set( sample_hash_ids ) ) tags = [] path = '/get_files/search_files?tags={}'.format( urllib.parse.quote( json.dumps( tags ) ) ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 403 ) # sample_hash_ids = set( random.sample( hash_ids, 3 ) ) HG.test_controller.SetRead( 'file_query_ids', set( sample_hash_ids ) ) tags = [ 'kino' ] path = '/get_files/search_files?tags={}'.format( urllib.parse.quote( json.dumps( tags ) ) ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 403 ) # search files HG.test_controller.ClearReads( 'file_query_ids' ) sample_hash_ids = set( random.sample( hash_ids, 3 ) ) HG.test_controller.SetRead( 'file_query_ids', set( sample_hash_ids ) ) tags = [ 'kino', 'green' ] path = '/get_files/search_files?tags={}'.format( urllib.parse.quote( json.dumps( tags ) ) ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) d = json.loads( text ) expected_answer = { 'file_ids' : list( sample_hash_ids ) } self.assertEqual( d, expected_answer ) [ ( args, kwargs ) ] = HG.test_controller.GetRead( 'file_query_ids' ) ( file_search_context, ) = args self.assertEqual( file_search_context.GetLocationContext().current_service_keys, { CC.LOCAL_FILE_SERVICE_KEY } ) self.assertEqual( file_search_context.GetTagSearchContext().service_key, CC.COMBINED_TAG_SERVICE_KEY ) self.assertEqual( set( file_search_context.GetPredicates() ), { ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, tag ) for tag in tags } ) self.assertIn( 'sort_by', kwargs ) sort_by = kwargs[ 'sort_by' ] self.assertEqual( sort_by.sort_type, ( 'system', CC.SORT_FILES_BY_IMPORT_TIME ) ) self.assertEqual( sort_by.sort_order, CC.SORT_DESC ) self.assertIn( 'apply_implicit_limit', kwargs ) self.assertEqual( kwargs[ 'apply_implicit_limit' ], False ) # search files and get hashes HG.test_controller.ClearReads( 'file_query_ids' ) sample_hash_ids = set( random.sample( hash_ids, 3 ) ) hash_ids_to_hashes = { hash_id : os.urandom( 32 ) for hash_id in sample_hash_ids } HG.test_controller.SetRead( 'file_query_ids', set( sample_hash_ids ) ) HG.test_controller.SetRead( 'hash_ids_to_hashes', hash_ids_to_hashes ) tags = [ 'kino', 'green' ] path = '/get_files/search_files?tags={}&return_hashes=true'.format( urllib.parse.quote( json.dumps( tags ) ) ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) d = json.loads( text ) expected_hashes_set = { hash.hex() for hash in hash_ids_to_hashes.values() } self.assertEqual( set( d[ 'hashes' ] ), expected_hashes_set ) [ ( args, kwargs ) ] = HG.test_controller.GetRead( 'file_query_ids' ) ( file_search_context, ) = args self.assertEqual( file_search_context.GetLocationContext().current_service_keys, { CC.LOCAL_FILE_SERVICE_KEY } ) self.assertEqual( file_search_context.GetTagSearchContext().service_key, CC.COMBINED_TAG_SERVICE_KEY ) self.assertEqual( set( file_search_context.GetPredicates() ), { ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, tag ) for tag in tags } ) self.assertIn( 'sort_by', kwargs ) sort_by = kwargs[ 'sort_by' ] self.assertEqual( sort_by.sort_type, ( 'system', CC.SORT_FILES_BY_IMPORT_TIME ) ) self.assertEqual( sort_by.sort_order, CC.SORT_DESC ) self.assertIn( 'apply_implicit_limit', kwargs ) self.assertEqual( kwargs[ 'apply_implicit_limit' ], False ) [ ( args, kwargs ) ] = HG.test_controller.GetRead( 'hash_ids_to_hashes' ) hash_ids = kwargs[ 'hash_ids' ] self.assertEqual( set( hash_ids ), sample_hash_ids ) # sort # this just tests if it parses, we don't have a full test for read params yet HG.test_controller.ClearReads( 'file_query_ids' ) sample_hash_ids = set( random.sample( hash_ids, 3 ) ) HG.test_controller.SetRead( 'file_query_ids', set( sample_hash_ids ) ) tags = [ 'kino', 'green' ] path = '/get_files/search_files?tags={}&file_sort_type={}'.format( urllib.parse.quote( json.dumps( tags ) ), CC.SORT_FILES_BY_FRAMERATE ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) [ ( args, kwargs ) ] = HG.test_controller.GetRead( 'file_query_ids' ) ( file_search_context, ) = args self.assertEqual( file_search_context.GetLocationContext().current_service_keys, { CC.LOCAL_FILE_SERVICE_KEY } ) self.assertEqual( file_search_context.GetTagSearchContext().service_key, CC.COMBINED_TAG_SERVICE_KEY ) self.assertEqual( set( file_search_context.GetPredicates() ), { ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, tag ) for tag in tags } ) self.assertIn( 'sort_by', kwargs ) sort_by = kwargs[ 'sort_by' ] self.assertEqual( sort_by.sort_type, ( 'system', CC.SORT_FILES_BY_FRAMERATE ) ) self.assertEqual( sort_by.sort_order, CC.SORT_DESC ) self.assertIn( 'apply_implicit_limit', kwargs ) self.assertEqual( kwargs[ 'apply_implicit_limit' ], False ) # sort HG.test_controller.ClearReads( 'file_query_ids' ) sample_hash_ids = set( random.sample( hash_ids, 3 ) ) HG.test_controller.SetRead( 'file_query_ids', set( sample_hash_ids ) ) tags = [ 'kino', 'green' ] path = '/get_files/search_files?tags={}&file_sort_type={}&file_sort_asc={}'.format( urllib.parse.quote( json.dumps( tags ) ), CC.SORT_FILES_BY_FRAMERATE, 'true' ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) [ ( args, kwargs ) ] = HG.test_controller.GetRead( 'file_query_ids' ) ( file_search_context, ) = args self.assertEqual( file_search_context.GetLocationContext().current_service_keys, { CC.LOCAL_FILE_SERVICE_KEY } ) self.assertEqual( file_search_context.GetTagSearchContext().service_key, CC.COMBINED_TAG_SERVICE_KEY ) self.assertEqual( set( file_search_context.GetPredicates() ), { ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, tag ) for tag in tags } ) self.assertIn( 'sort_by', kwargs ) sort_by = kwargs[ 'sort_by' ] self.assertEqual( sort_by.sort_type, ( 'system', CC.SORT_FILES_BY_FRAMERATE ) ) self.assertEqual( sort_by.sort_order, CC.SORT_ASC ) self.assertIn( 'apply_implicit_limit', kwargs ) self.assertEqual( kwargs[ 'apply_implicit_limit' ], False ) # file domain HG.test_controller.ClearReads( 'file_query_ids' ) sample_hash_ids = set( random.sample( hash_ids, 3 ) ) HG.test_controller.SetRead( 'file_query_ids', set( sample_hash_ids ) ) tags = [ 'kino', 'green' ] path = '/get_files/search_files?tags={}&file_sort_type={}&file_sort_asc={}&file_service_name={}'.format( urllib.parse.quote( json.dumps( tags ) ), CC.SORT_FILES_BY_FRAMERATE, 'true', 'trash' ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) [ ( args, kwargs ) ] = HG.test_controller.GetRead( 'file_query_ids' ) ( file_search_context, ) = args self.assertEqual( file_search_context.GetLocationContext().current_service_keys, { CC.TRASH_SERVICE_KEY } ) self.assertEqual( file_search_context.GetTagSearchContext().service_key, CC.COMBINED_TAG_SERVICE_KEY ) self.assertEqual( set( file_search_context.GetPredicates() ), { ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, tag ) for tag in tags } ) self.assertIn( 'sort_by', kwargs ) sort_by = kwargs[ 'sort_by' ] self.assertEqual( sort_by.sort_type, ( 'system', CC.SORT_FILES_BY_FRAMERATE ) ) self.assertEqual( sort_by.sort_order, CC.SORT_ASC ) self.assertIn( 'apply_implicit_limit', kwargs ) self.assertEqual( kwargs[ 'apply_implicit_limit' ], False ) # file and tag domain HG.test_controller.ClearReads( 'file_query_ids' ) sample_hash_ids = set( random.sample( hash_ids, 3 ) ) HG.test_controller.SetRead( 'file_query_ids', set( sample_hash_ids ) ) tags = [ 'kino', 'green' ] path = '/get_files/search_files?tags={}&file_sort_type={}&file_sort_asc={}&file_service_key={}&tag_service_name={}'.format( urllib.parse.quote( json.dumps( tags ) ), CC.SORT_FILES_BY_FRAMERATE, 'true', CC.TRASH_SERVICE_KEY.hex(), 'all%20known%20tags' ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) [ ( args, kwargs ) ] = HG.test_controller.GetRead( 'file_query_ids' ) ( file_search_context, ) = args self.assertEqual( file_search_context.GetLocationContext().current_service_keys, { CC.TRASH_SERVICE_KEY } ) self.assertEqual( file_search_context.GetTagSearchContext().service_key, CC.COMBINED_TAG_SERVICE_KEY ) self.assertEqual( set( file_search_context.GetPredicates() ), { ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, tag ) for tag in tags } ) self.assertIn( 'sort_by', kwargs ) sort_by = kwargs[ 'sort_by' ] self.assertEqual( sort_by.sort_type, ( 'system', CC.SORT_FILES_BY_FRAMERATE ) ) self.assertEqual( sort_by.sort_order, CC.SORT_ASC ) self.assertIn( 'apply_implicit_limit', kwargs ) self.assertEqual( kwargs[ 'apply_implicit_limit' ], False ) # file and tag domain # this just tests if it parses, we don't have a full test for read params yet sample_hash_ids = set( random.sample( hash_ids, 3 ) ) HG.test_controller.SetRead( 'file_query_ids', set( sample_hash_ids ) ) tags = [ 'kino', 'green' ] path = '/get_files/search_files?tags={}&file_sort_type={}&file_sort_asc={}&file_service_key={}&tag_service_key={}'.format( urllib.parse.quote( json.dumps( tags ) ), CC.SORT_FILES_BY_FRAMERATE, 'true', CC.COMBINED_FILE_SERVICE_KEY.hex(), CC.COMBINED_TAG_SERVICE_KEY.hex() ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 400 ) def _test_search_files_predicate_parsing( self, connection, set_up_permissions ): # some file search param parsing class PretendRequest( object ): pass pretend_request = PretendRequest() pretend_request.parsed_request_args = {} pretend_request.client_api_permissions = set_up_permissions[ 'everything' ] predicates = ClientLocalServerResources.ParseClientAPISearchPredicates( pretend_request ) self.assertEqual( predicates, [] ) # pretend_request = PretendRequest() pretend_request.parsed_request_args = { 'system_inbox' : True } pretend_request.client_api_permissions = set_up_permissions[ 'search_green_files' ] with self.assertRaises( HydrusExceptions.InsufficientCredentialsException ): ClientLocalServerResources.ParseClientAPISearchPredicates( pretend_request ) # pretend_request = PretendRequest() pretend_request.parsed_request_args = { 'tags' : [ '-green' ] } pretend_request.client_api_permissions = set_up_permissions[ 'search_green_files' ] with self.assertRaises( HydrusExceptions.InsufficientCredentialsException ): ClientLocalServerResources.ParseClientAPISearchPredicates( pretend_request ) # pretend_request = PretendRequest() pretend_request.parsed_request_args = { 'tags' : [ 'green', '-kino' ] } pretend_request.client_api_permissions = set_up_permissions[ 'search_green_files' ] predicates = ClientLocalServerResources.ParseClientAPISearchPredicates( pretend_request ) expected_predicates = [] expected_predicates.append( ClientSearch.Predicate( predicate_type = ClientSearch.PREDICATE_TYPE_TAG, value = 'green' ) ) expected_predicates.append( ClientSearch.Predicate( predicate_type = ClientSearch.PREDICATE_TYPE_TAG, value = 'kino', inclusive = False ) ) self.assertEqual( set( predicates ), set( expected_predicates ) ) # pretend_request = PretendRequest() pretend_request.parsed_request_args = { 'tags' : [ 'green' ], 'system_inbox' : True } pretend_request.client_api_permissions = set_up_permissions[ 'search_green_files' ] predicates = ClientLocalServerResources.ParseClientAPISearchPredicates( pretend_request ) expected_predicates = [] expected_predicates.append( ClientSearch.Predicate( predicate_type = ClientSearch.PREDICATE_TYPE_TAG, value = 'green' ) ) expected_predicates.append( ClientSearch.Predicate( predicate_type = ClientSearch.PREDICATE_TYPE_SYSTEM_INBOX ) ) self.assertEqual( set( predicates ), set( expected_predicates ) ) # pretend_request = PretendRequest() pretend_request.parsed_request_args = { 'tags' : [ 'green' ], 'system_archive' : True } pretend_request.client_api_permissions = set_up_permissions[ 'search_green_files' ] predicates = ClientLocalServerResources.ParseClientAPISearchPredicates( pretend_request ) expected_predicates = [] expected_predicates.append( ClientSearch.Predicate( predicate_type = ClientSearch.PREDICATE_TYPE_TAG, value = 'green' ) ) expected_predicates.append( ClientSearch.Predicate( predicate_type = ClientSearch.PREDICATE_TYPE_SYSTEM_ARCHIVE ) ) self.assertEqual( set( predicates ), set( expected_predicates ) ) # pretend_request = PretendRequest() pretend_request.parsed_request_args = { 'tags' : [ 'green', 'system:archive' ] } pretend_request.client_api_permissions = set_up_permissions[ 'search_green_files' ] predicates = ClientLocalServerResources.ParseClientAPISearchPredicates( pretend_request ) expected_predicates = [] expected_predicates.append( ClientSearch.Predicate( predicate_type = ClientSearch.PREDICATE_TYPE_TAG, value = 'green' ) ) expected_predicates.append( ClientSearch.Predicate( predicate_type = ClientSearch.PREDICATE_TYPE_SYSTEM_ARCHIVE ) ) self.assertEqual( set( predicates ), set( expected_predicates ) ) # pretend_request = PretendRequest() pretend_request.parsed_request_args = { 'tags' : [ 'green', [ 'red', 'blue' ], 'system:archive' ] } pretend_request.client_api_permissions = set_up_permissions[ 'search_green_files' ] predicates = ClientLocalServerResources.ParseClientAPISearchPredicates( pretend_request ) expected_predicates = [] expected_predicates.append( ClientSearch.Predicate( predicate_type = ClientSearch.PREDICATE_TYPE_TAG, value = 'green' ) ) expected_predicates.append( ClientSearch.Predicate( predicate_type = ClientSearch.PREDICATE_TYPE_OR_CONTAINER, value = [ ClientSearch.Predicate( predicate_type = ClientSearch.PREDICATE_TYPE_TAG, value = 'red' ), ClientSearch.Predicate( predicate_type = ClientSearch.PREDICATE_TYPE_TAG, value = 'blue' ) ] ) ) expected_predicates.append( ClientSearch.Predicate( predicate_type = ClientSearch.PREDICATE_TYPE_SYSTEM_ARCHIVE ) ) self.assertEqual( { pred for pred in predicates if pred.GetType() != ClientSearch.PREDICATE_TYPE_OR_CONTAINER }, { pred for pred in expected_predicates if pred.GetType() != ClientSearch.PREDICATE_TYPE_OR_CONTAINER } ) self.assertEqual( { frozenset( pred.GetValue() ) for pred in predicates if pred.GetType() == ClientSearch.PREDICATE_TYPE_OR_CONTAINER }, { frozenset( pred.GetValue() ) for pred in expected_predicates if pred.GetType() == ClientSearch.PREDICATE_TYPE_OR_CONTAINER } ) def _test_file_metadata( self, connection, set_up_permissions ): # test file metadata for hide_service_names_tags in ( False, True ): api_permissions = set_up_permissions[ 'search_green_files' ] access_key_hex = api_permissions.GetAccessKey().hex() headers = { 'Hydrus-Client-API-Access-Key' : access_key_hex } file_ids_to_hashes = { 1 : bytes.fromhex( 'a' * 64 ), 2 : bytes.fromhex( 'b' * 64 ), 3 : bytes.fromhex( 'c' * 64 ) } metadata = [] for ( file_id, hash ) in file_ids_to_hashes.items(): metadata_row = { 'file_id' : file_id, 'hash' : hash.hex() } metadata.append( metadata_row ) expected_identifier_result = { 'metadata' : metadata } media_results = [] urls = { "https://gelbooru.com/index.php?page=post&s=view&id=4841557", "https://img2.gelbooru.com//images/80/c8/80c8646b4a49395fb36c805f316c49a9.jpg" } sorted_urls = sorted( urls ) random_file_service_hex_current = HydrusData.GenerateKey() random_file_service_hex_deleted = HydrusData.GenerateKey() current_import_timestamp = 500 deleted_import_timestamp = 300 deleted_deleted_timestamp = 450 file_modified_timestamp = 20 for ( file_id, hash ) in file_ids_to_hashes.items(): size = random.randint( 8192, 20 * 1048576 ) mime = random.choice( [ HC.IMAGE_JPEG, HC.VIDEO_WEBM, HC.APPLICATION_PDF ] ) width = random.randint( 200, 4096 ) height = random.randint( 200, 4096 ) duration = random.choice( [ 220, 16.66667, None ] ) has_audio = random.choice( [ True, False ] ) file_info_manager = ClientMediaManagers.FileInfoManager( file_id, hash, size = size, mime = mime, width = width, height = height, duration = duration, has_audio = has_audio ) service_keys_to_statuses_to_tags = { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY : { HC.CONTENT_STATUS_CURRENT : [ 'blue_eyes', 'blonde_hair' ], HC.CONTENT_STATUS_PENDING : [ 'bodysuit' ] } } service_keys_to_statuses_to_display_tags = { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY : { HC.CONTENT_STATUS_CURRENT : [ 'blue eyes', 'blonde hair' ], HC.CONTENT_STATUS_PENDING : [ 'bodysuit', 'clothing' ] } } tags_manager = ClientMediaManagers.TagsManager( service_keys_to_statuses_to_tags, service_keys_to_statuses_to_display_tags ) locations_manager = ClientMediaManagers.LocationsManager( { random_file_service_hex_current : current_import_timestamp }, { random_file_service_hex_deleted : ( deleted_deleted_timestamp, deleted_import_timestamp ) }, set(), set(), inbox = False, urls = urls, file_modified_timestamp = file_modified_timestamp ) ratings_manager = ClientMediaManagers.RatingsManager( {} ) notes_manager = ClientMediaManagers.NotesManager( {} ) file_viewing_stats_manager = ClientMediaManagers.FileViewingStatsManager.STATICGenerateEmptyManager() media_result = ClientMediaResult.MediaResult( file_info_manager, tags_manager, locations_manager, ratings_manager, notes_manager, file_viewing_stats_manager ) media_results.append( media_result ) hide_service_names_tags_metadata = [] metadata = [] detailed_known_urls_metadata = [] services_manager = HG.client_controller.services_manager service_keys_to_names = {} for media_result in media_results: file_info_manager = media_result.GetFileInfoManager() metadata_row = { 'file_id' : file_info_manager.hash_id, 'hash' : file_info_manager.hash.hex(), 'size' : file_info_manager.size, 'mime' : HC.mime_mimetype_string_lookup[ file_info_manager.mime ], 'ext' : HC.mime_ext_lookup[ file_info_manager.mime ], 'width' : file_info_manager.width, 'height' : file_info_manager.height, 'duration' : file_info_manager.duration, 'has_audio' : file_info_manager.has_audio, 'num_frames' : file_info_manager.num_frames, 'num_words' : file_info_manager.num_words, 'file_services' : { 'current' : { random_file_service_hex_current.hex() : { 'time_imported' : current_import_timestamp } }, 'deleted' : { random_file_service_hex_deleted.hex() : { 'time_deleted' : deleted_deleted_timestamp, 'time_imported' : deleted_import_timestamp } } }, 'time_modified' : file_modified_timestamp, 'is_inbox' : False, 'is_local' : False, 'is_trashed' : False, 'known_urls' : list( sorted_urls ) } tags_manager = media_result.GetTagsManager() service_names_to_statuses_to_tags = {} api_service_keys_to_statuses_to_tags = {} service_keys_to_statuses_to_tags = tags_manager.GetServiceKeysToStatusesToTags( ClientTags.TAG_DISPLAY_STORAGE ) for ( service_key, statuses_to_tags ) in service_keys_to_statuses_to_tags.items(): if service_key not in service_keys_to_names: service_keys_to_names[ service_key ] = services_manager.GetName( service_key ) s = { str( status ) : sorted( tags, key = HydrusTags.ConvertTagToSortable ) for ( status, tags ) in statuses_to_tags.items() if len( tags ) > 0 } if len( s ) > 0: service_name = service_keys_to_names[ service_key ] service_names_to_statuses_to_tags[ service_name ] = s api_service_keys_to_statuses_to_tags[ service_key.hex() ] = s metadata_row[ 'service_keys_to_statuses_to_tags' ] = api_service_keys_to_statuses_to_tags service_names_to_statuses_to_display_tags = {} service_keys_to_statuses_to_display_tags = {} service_keys_to_statuses_to_tags = tags_manager.GetServiceKeysToStatusesToTags( ClientTags.TAG_DISPLAY_ACTUAL ) for ( service_key, statuses_to_tags ) in service_keys_to_statuses_to_tags.items(): if service_key not in service_keys_to_names: service_keys_to_names[ service_key ] = services_manager.GetName( service_key ) s = { str( status ) : sorted( tags, key = HydrusTags.ConvertTagToSortable ) for ( status, tags ) in statuses_to_tags.items() if len( tags ) > 0 } if len( s ) > 0: service_name = service_keys_to_names[ service_key ] service_names_to_statuses_to_display_tags[ service_name ] = s service_keys_to_statuses_to_display_tags[ service_key.hex() ] = s metadata_row[ 'service_keys_to_statuses_to_display_tags' ] = service_keys_to_statuses_to_display_tags hide_service_names_tags_metadata.append( metadata_row ) metadata_row = dict( metadata_row ) metadata_row[ 'service_names_to_statuses_to_tags' ] = service_names_to_statuses_to_tags metadata_row[ 'service_names_to_statuses_to_display_tags' ] = service_names_to_statuses_to_display_tags metadata.append( metadata_row ) detailed_known_urls_metadata_row = dict( metadata_row ) detailed_known_urls_metadata_row[ 'detailed_known_urls' ] = [ {'normalised_url': 'https://gelbooru.com/index.php?id=4841557&page=post&s=view', 'url_type': 0, 'url_type_string': 'post url', 'match_name': 'gelbooru file page', 'can_parse': True}, {'normalised_url': 'https://img2.gelbooru.com//images/80/c8/80c8646b4a49395fb36c805f316c49a9.jpg', 'url_type': 5, 'url_type_string': 'unknown url', 'match_name': 'unknown url', 'can_parse': False, 'cannot_parse_reason' : 'unknown url class'} ] detailed_known_urls_metadata.append( detailed_known_urls_metadata_row ) expected_hide_service_names_tags_metadata_result = { 'metadata' : hide_service_names_tags_metadata } expected_metadata_result = { 'metadata' : metadata } expected_detailed_known_urls_metadata_result = { 'metadata' : detailed_known_urls_metadata } HG.test_controller.SetRead( 'hash_ids_to_hashes', file_ids_to_hashes ) HG.test_controller.SetRead( 'media_results', media_results ) HG.test_controller.SetRead( 'media_results_from_ids', media_results ) api_permissions.SetLastSearchResults( [ 1, 2, 3, 4, 5, 6 ] ) # fail on non-permitted files path = '/get_files/file_metadata?file_ids={}&only_return_identifiers=true'.format( urllib.parse.quote( json.dumps( [ 1, 2, 3, 7 ] ) ) ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 403 ) # fails on hashes even if the hashes are 'good' path = '/get_files/file_metadata?hashes={}&only_return_identifiers=true'.format( urllib.parse.quote( json.dumps( [ hash.hex() for hash in file_ids_to_hashes.values() ] ) ) ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 403 ) # identifiers from file_ids path = '/get_files/file_metadata?file_ids={}&only_return_identifiers=true'.format( urllib.parse.quote( json.dumps( [ 1, 2, 3 ] ) ) ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) d = json.loads( text ) self.assertEqual( d, expected_identifier_result ) # metadata from file_ids path = '/get_files/file_metadata?file_ids={}'.format( urllib.parse.quote( json.dumps( [ 1, 2, 3 ] ) ) ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) d = json.loads( text ) self.assertEqual( d, expected_metadata_result ) # now from hashes api_permissions = set_up_permissions[ 'everything' ] access_key_hex = api_permissions.GetAccessKey().hex() headers = { 'Hydrus-Client-API-Access-Key' : access_key_hex } # identifiers from hashes path = '/get_files/file_metadata?hashes={}&only_return_identifiers=true'.format( urllib.parse.quote( json.dumps( [ hash.hex() for hash in file_ids_to_hashes.values() ] ) ) ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) d = json.loads( text ) self.assertEqual( d, expected_identifier_result ) # metadata from hashes path = '/get_files/file_metadata?hashes={}'.format( urllib.parse.quote( json.dumps( [ hash.hex() for hash in file_ids_to_hashes.values() ] ) ) ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) d = json.loads( text ) self.assertEqual( d, expected_metadata_result ) # fails on borked hashes path = '/get_files/file_metadata?hashes={}'.format( urllib.parse.quote( json.dumps( [ 'deadbeef' ] ) ) ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 400 ) # hide service names to tags path = '/get_files/file_metadata?hashes={}&hide_service_names_tags=true'.format( urllib.parse.quote( json.dumps( [ hash.hex() for hash in file_ids_to_hashes.values() ] ) ) ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) d = json.loads( text ) self.assertEqual( d, expected_hide_service_names_tags_metadata_result ) # metadata from hashes with detailed url info path = '/get_files/file_metadata?hashes={}&detailed_url_information=true'.format( urllib.parse.quote( json.dumps( [ hash.hex() for hash in file_ids_to_hashes.values() ] ) ) ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 200 ) d = json.loads( text ) self.assertEqual( d, expected_detailed_known_urls_metadata_result ) # failure on missing file_ids HG.test_controller.SetRead( 'media_results_from_ids', HydrusExceptions.DataMissing( 'test missing' ) ) api_permissions = set_up_permissions[ 'everything' ] access_key_hex = api_permissions.GetAccessKey().hex() headers = { 'Hydrus-Client-API-Access-Key' : access_key_hex } path = '/get_files/file_metadata?file_ids={}'.format( urllib.parse.quote( json.dumps( [ 123456 ] ) ) ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() text = str( data, 'utf-8' ) self.assertEqual( response.status, 404 ) self.assertIn( 'test missing', text ) def _test_get_files( self, connection, set_up_permissions ): # files and thumbs file_id = 1 hash = b'\xadm5\x99\xa6\xc4\x89\xa5u\xeb\x19\xc0&\xfa\xce\x97\xa9\xcdey\xe7G(\xb0\xce\x94\xa6\x01\xd22\xf3\xc3' hash_hex = hash.hex() size = 100 mime = HC.IMAGE_PNG width = 20 height = 20 duration = None file_info_manager = ClientMediaManagers.FileInfoManager( file_id, hash, size = size, mime = mime, width = width, height = height, duration = duration ) service_keys_to_statuses_to_tags = { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY : { HC.CONTENT_STATUS_CURRENT : [ 'blue_eyes', 'blonde_hair' ], HC.CONTENT_STATUS_PENDING : [ 'bodysuit' ] } } service_keys_to_statuses_to_display_tags = { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY : { HC.CONTENT_STATUS_CURRENT : [ 'blue eyes', 'blonde hair' ], HC.CONTENT_STATUS_PENDING : [ 'bodysuit', 'clothing' ] } } tags_manager = ClientMediaManagers.TagsManager( service_keys_to_statuses_to_tags, service_keys_to_statuses_to_display_tags ) locations_manager = ClientMediaManagers.LocationsManager( dict(), dict(), set(), set() ) ratings_manager = ClientMediaManagers.RatingsManager( {} ) notes_manager = ClientMediaManagers.NotesManager( {} ) file_viewing_stats_manager = ClientMediaManagers.FileViewingStatsManager.STATICGenerateEmptyManager() media_result = ClientMediaResult.MediaResult( file_info_manager, tags_manager, locations_manager, ratings_manager, notes_manager, file_viewing_stats_manager ) HG.test_controller.SetRead( 'media_result', media_result ) HG.test_controller.SetRead( 'media_results_from_ids', ( media_result, ) ) path = os.path.join( HC.STATIC_DIR, 'hydrus.png' ) file_path = HG.test_controller.client_files_manager.GetFilePath( hash, HC.IMAGE_PNG, check_file_exists = False ) shutil.copy2( path, file_path ) thumb_hash = b'\x17\xde\xd6\xee\x1b\xfa\x002\xbdj\xc0w\x92\xce5\xf0\x12~\xfe\x915\xb3\xb3tA\xac\x90F\x95\xc2T\xc5' path = os.path.join( HC.STATIC_DIR, 'hydrus_small.png' ) thumb_path = HG.test_controller.client_files_manager._GenerateExpectedThumbnailPath( hash ) shutil.copy2( path, thumb_path ) api_permissions = set_up_permissions[ 'search_green_files' ] access_key_hex = api_permissions.GetAccessKey().hex() headers = { 'Hydrus-Client-API-Access-Key' : access_key_hex } # let's fail first path = '/get_files/file?file_id={}'.format( 10 ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 403 ) # path = '/get_files/thumbnail?file_id={}'.format( 10 ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 403 ) # path = '/get_files/file?hash={}'.format( hash_hex ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 403 ) # path = '/get_files/thumbnail?hash={}'.format( hash_hex ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 403 ) # now succeed path = '/get_files/file?file_id={}'.format( 1 ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) self.assertEqual( hashlib.sha256( data ).digest(), hash ) # range request path = '/get_files/file?file_id={}'.format( 1 ) partial_headers = dict( headers ) partial_headers[ 'Range' ] = 'bytes=100-199' connection.request( 'GET', path, headers = partial_headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 206 ) with open( file_path, 'rb' ) as f: f.seek( 100 ) actual_data = f.read( 100 ) self.assertEqual( data, actual_data ) # n onwards range request path = '/get_files/file?file_id={}'.format( 1 ) partial_headers = dict( headers ) partial_headers[ 'Range' ] = 'bytes=100-' connection.request( 'GET', path, headers = partial_headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 206 ) with open( file_path, 'rb' ) as f: f.seek( 100 ) actual_data = f.read() self.assertEqual( data, actual_data ) # last n onwards range request path = '/get_files/file?file_id={}'.format( 1 ) partial_headers = dict( headers ) partial_headers[ 'Range' ] = 'bytes=-100' connection.request( 'GET', path, headers = partial_headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 206 ) with open( file_path, 'rb' ) as f: actual_data = f.read()[-100:] self.assertEqual( data, actual_data ) # invalid range request path = '/get_files/file?file_id={}'.format( 1 ) partial_headers = dict( headers ) partial_headers[ 'Range' ] = 'bytes=200-199' connection.request( 'GET', path, headers = partial_headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 416 ) # multi range request, not currently supported path = '/get_files/file?file_id={}'.format( 1 ) partial_headers = dict( headers ) partial_headers[ 'Range' ] = 'bytes=100-199,300-399' connection.request( 'GET', path, headers = partial_headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 416 ) # path = '/get_files/thumbnail?file_id={}'.format( 1 ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) self.assertEqual( hashlib.sha256( data ).digest(), thumb_hash ) # api_permissions = set_up_permissions[ 'everything' ] access_key_hex = api_permissions.GetAccessKey().hex() headers = { 'Hydrus-Client-API-Access-Key' : access_key_hex } # path = '/get_files/file?hash={}'.format( hash_hex ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) self.assertEqual( hashlib.sha256( data ).digest(), hash ) # path = '/get_files/thumbnail?hash={}'.format( hash_hex ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) self.assertEqual( hashlib.sha256( data ).digest(), thumb_hash ) # with "sha256:"" on the front path = '/get_files/thumbnail?hash={}{}'.format( 'sha256:', hash_hex ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 200 ) self.assertEqual( hashlib.sha256( data ).digest(), thumb_hash ) # now 404 hash_404 = os.urandom( 32 ) file_info_manager = ClientMediaManagers.FileInfoManager( 123456, hash_404, size = size, mime = mime, width = width, height = height, duration = duration ) media_result = ClientMediaResult.MediaResult( file_info_manager, tags_manager, locations_manager, ratings_manager, notes_manager, file_viewing_stats_manager ) HG.test_controller.SetRead( 'media_result', media_result ) HG.test_controller.SetRead( 'media_results_from_ids', ( media_result, ) ) # path = '/get_files/file?hash={}'.format( hash_404.hex() ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 404 ) # path = '/get_files/thumbnail?hash={}'.format( hash_404.hex() ) connection.request( 'GET', path, headers = headers ) response = connection.getresponse() data = response.read() self.assertEqual( response.status, 404 ) # os.unlink( file_path ) os.unlink( thumb_path ) def _test_permission_failures( self, connection, set_up_permissions ): pass # failed permission tests def test_client_api( self ): host = '127.0.0.1' port = 45869 connection = http.client.HTTPConnection( host, port, timeout = 10 ) self._test_basics( connection ) set_up_permissions = self._test_client_api_basics( connection ) self._test_get_services( connection, set_up_permissions ) self._test_manage_database( connection, set_up_permissions ) self._test_add_files_add_file( connection, set_up_permissions ) self._test_add_files_other_actions( connection, set_up_permissions ) self._test_add_tags( connection, set_up_permissions ) self._test_add_urls( connection, set_up_permissions ) self._test_manage_cookies( connection, set_up_permissions ) self._test_manage_pages( connection, set_up_permissions ) self._test_search_files( connection, set_up_permissions ) self._test_search_files_predicate_parsing( connection, set_up_permissions ) self._test_file_metadata( connection, set_up_permissions ) self._test_get_files( connection, set_up_permissions ) self._test_permission_failures( connection, set_up_permissions ) self._test_cors_fails( connection ) connection.close() # port = 45899 connection = http.client.HTTPConnection( host, port, timeout = 10 ) self._test_cors_succeeds( connection )
from django.contrib import admin class InputFilter(admin.SimpleListFilter): template = 'admin/input_filter.html' def lookups(self, request, model_admin): # Dummy, required to show the filter. return ((),) def choices(self, changelist): # Grab only the "all" option. all_choice = next(super(InputFilter, self).choices(changelist)) all_choice['query_parts'] = ( (k, v) for k, v in changelist.get_filters_params().items() if k != self.parameter_name ) yield all_choice
"use strict"; const settings = require("../settings.json"); if (settings.api.client.oauth2.link.slice(-1) == "/") settings.api.client.oauth2.link = settings.api.client.oauth2.link.slice(0, -1); if (settings.api.client.oauth2.callbackpath.slice(0, 1) !== "/") settings.api.client.oauth2.callbackpath = "/" + settings.api.client.oauth2.callbackpath; if (settings.pterodactyl.domain.slice(-1) == "/") settings.pterodactyl.domain = settings.pterodactyl.domain.slice(0, -1); const fetch = require('node-fetch'); const indexjs = require("../index.js"); const arciotext = (require("./arcio.js")).text; const fs = require("fs"); module.exports.load = async function(app, db) { app.get("/login", async (req, res) => { if (req.query.redirect) req.session.redirect = "/" + req.query.redirect; let newsettings = JSON.parse(fs.readFileSync("./settings.json")); res.redirect(`https://discord.com/api/oauth2/authorize?client_id=${settings.api.client.oauth2.id}&redirect_uri=${encodeURIComponent(settings.api.client.oauth2.link + settings.api.client.oauth2.callbackpath)}&response_type=code&scope=identify%20email${newsettings.api.client.bot.joinguild.enabled == true ? "%20guilds.join" : ""}${settings.api.client.oauth2.prompt == false ? "&prompt=none" : (req.query.prompt ? (req.query.prompt == "none" ? "&prompt=none" : "") : "")}`); }); app.get("/logout", (req, res) => { let theme = indexjs.get(req); req.session.destroy(() => { return res.redirect(theme.settings.redirect.logout || "/"); }); }); app.get(settings.api.client.oauth2.callbackpath, async (req, res) => { let theme = indexjs.get(req); let customredirect = req.session.redirect; delete req.session.redirect; let failedcallback = theme.settings.redirect.failedcallback || "/"; if (!req.query.code) return res.redirect(failedcallback + "?err=MISSINGCODE"); let json = await fetch( 'https://discord.com/api/oauth2/token', { method: "post", body: "client_id=" + settings.api.client.oauth2.id + "&client_secret=" + settings.api.client.oauth2.secret + "&grant_type=authorization_code&code=" + encodeURIComponent(req.query.code) + "&redirect_uri=" + encodeURIComponent(settings.api.client.oauth2.link + settings.api.client.oauth2.callbackpath), headers: { 'Content-Type': 'application/x-www-form-urlencoded' } } ); if (json.ok == true) { let codeinfo = JSON.parse(await json.text()); let scopes = codeinfo.scope; let missingscopes = []; let newsettings = JSON.parse(fs.readFileSync("./settings.json")); if (scopes.replace(/identify/g, "") == scopes) missingscopes.push("identify"); if (scopes.replace(/email/g, "") == scopes) missingscopes.push("email"); if (newsettings.api.client.bot.joinguild.enabled == true) if (scopes.replace(/guilds.join/g, "") == scopes) missingscopes.push("guilds.join"); if (missingscopes.length !== 0) return res.redirect(failedcallback + "?err=MISSINGSCOPES&scopes=" + missingscopes.join("%20")); let userjson = await fetch( 'https://discord.com/api/users/@me', { method: "get", headers: { "Authorization": `Bearer ${codeinfo.access_token}` } } ); let userinfo = JSON.parse(await userjson.text()); if (userinfo.verified == true) { let ip = (newsettings.api.client.oauth2.ip["trust x-forwarded-for"] == true ? (req.headers['x-forwarded-for'] || req.connection.remoteAddress) : req.connection.remoteAddress); ip = (ip ? ip : "::1").replace(/::1/g, "::ffff:127.0.0.1").replace(/^.*:/, ''); if (newsettings.api.client.oauth2.ip.block.includes(ip)) return res.redirect(failedcallback + "?err=IPBLOCKED") if (newsettings.api.client.oauth2.ip["duplicate check"] == true) { let allips = await db.get("ips") || []; let mainip = await db.get("ip-" + userinfo.id); if (mainip) { if (mainip !== ip) { allips = allips.filter(ip2 => ip2 !== mainip); if (allips.includes(ip)) { return res.redirect(failedcallback + "?err=ANTIALT") } allips.push(ip); await db.set("ips", allips); await db.set("ip-" + userinfo.id, ip); } } else { if (allips.includes(ip)) { return res.redirect(failedcallback + "?err=ANTIALT") } allips.push(ip); await db.set("ips", allips); await db.set("ip-" + userinfo.id, ip); } } if (newsettings.api.client.oauth2.ip["cookie alt check"]) { let accountid = getCookie(req, "accountid"); if (accountid) { if (accountid !== userinfo.id) { return res.redirect(failedcallback + "?err=ANTIALT"); } } res.cookie('accountid', userinfo.id); } if (newsettings.api.client.bot.joinguild.enabled == true) { if (typeof newsettings.api.client.bot.joinguild.guildid == "string") { await fetch( `https://discord.com/api/guilds/${newsettings.api.client.bot.joinguild.guildid}/members/${userinfo.id}`, { method: "put", headers: { 'Content-Type': 'application/json', "Authorization": `Bot ${newsettings.api.client.bot.token}` }, body: JSON.stringify({ access_token: codeinfo.access_token }) } ); let checkmemberexist = await fetch( `https://discord.com/api/guilds/${guild}/members/${userinfo.id}`, { method: "get", headers: { 'Content-Type': 'application/json', "Authorization": `Bot ${newsettings.api.client.bot.token}` } } ); let checkmemberexistjson = checkmemberexist.json(); if (checkmemberexistjson.message && checkmemberexistjson.message === "Unknown Member" && settings.api.client.bot.joinguild.forcejoin) return res.redirect(failedcallback + "?err=DISCORD"); if (newsettings.api.client.bot.joinguild.registeredrole) { await fetch( `https://discord.com/api/guilds/${guild}/members/${userinfo.id}/roles/${newsettings.api.client.bot.joinguild.registeredrole}`, { method: "put", headers: { 'Content-Type': 'application/json', "Authorization": `Bot ${newsettings.api.client.bot.token}` } } ); } } else if (typeof newsettings.api.client.bot.joinguild.guildid == "object") { if (Array.isArray(newsettings.api.client.bot.joinguild.guildid)) { for (let guild of newsettings.api.client.bot.joinguild.guildid) { await fetch( `https://discord.com/api/guilds/${guild}/members/${userinfo.id}`, { method: "put", headers: { 'Content-Type': 'application/json', "Authorization": `Bot ${newsettings.api.client.bot.token}` }, body: JSON.stringify({ access_token: codeinfo.access_token }) } ); let checkmemberexist = await fetch( `https://discord.com/api/guilds/${guild}/members/${userinfo.id}`, { method: "get", headers: { 'Content-Type': 'application/json', "Authorization": `Bot ${newsettings.api.client.bot.token}` } } ); let checkmemberexistjson = checkmemberexist.json(); if (checkmemberexistjson.message && checkmemberexistjson.message === "Unknown Member" && settings.api.client.bot.joinguild.forcejoin) return res.redirect(failedcallback + "?err=DISCORD"); if (newsettings.api.client.bot.joinguild.registeredrole) { await fetch( `https://discord.com/api/guilds/${guild}/members/${userinfo.id}/roles/${newsettings.api.client.bot.joinguild.registeredrole}`, { method: "put", headers: { 'Content-Type': 'application/json', "Authorization": `Bot ${newsettings.api.client.bot.token}` } } ); } } } else { return res.send("Tell an administrator there is an error settings.json: api.client.bot.joinguild.guildid is not an array nor a string."); } } else { return res.send("Tell an administrator there is an error settings.json: api.client.bot.joinguild.guildid is not an array nor a string."); } } if (!await db.get("users-" + userinfo.id)) { if (newsettings.api.client.allow.newusers == true) { let genpassword = null; if (newsettings.api.client.passwordgenerator.signup == true) genpassword = makeid(newsettings.api.client.passwordgenerator["length"]); let accountjson = await fetch( settings.pterodactyl.domain + "/api/application/users", { method: "post", headers: { 'Content-Type': 'application/json', "Authorization": `Bearer ${settings.pterodactyl.key}` }, body: JSON.stringify({ username: userinfo.id, email: userinfo.email, first_name: userinfo.username, last_name: "#" + userinfo.discriminator, password: genpassword }) } ); if (await accountjson.status == 201) { let accountinfo = JSON.parse(await accountjson.text()); let userids = await db.get("users") || []; userids.push(accountinfo.attributes.id); await db.set("users", userids); await db.set("users-" + userinfo.id, accountinfo.attributes.id); req.session.newaccount = true; req.session.password = genpassword; } else { let accountlistjson = await fetch( settings.pterodactyl.domain + "/api/application/users?include=servers&filter[email]=" + encodeURIComponent(userinfo.email), { method: "get", headers: { 'Content-Type': 'application/json', "Authorization": `Bearer ${settings.pterodactyl.key}` } } ); let accountlist = await accountlistjson.json(); let user = accountlist.data.filter(acc => acc.attributes.email.toLowerCase() == userinfo.email.toLowerCase()); if (user.length == 1) { let userid = user[0].attributes.id; let userids = await db.get("users") || []; if (userids.filter(id => id == userid).length == 0) { userids.push(userid); await db.set("users", userids); await db.set("users-" + userinfo.id, userid); req.session.pterodactyl = user[0].attributes; } else { return res.redirect(failedcallback + "?err=ANOTHERACCOUNT"); } } else { return res.redirect(failedcallback + "?err=UNKNOWN"); }; }; } else { return res.redirect(failedcallback + "?err=DISABLED") } }; let cacheaccount = await fetch( settings.pterodactyl.domain + "/api/application/users/" + (await db.get("users-" + userinfo.id)) + "?include=servers", { method: "get", headers: { 'Content-Type': 'application/json', "Authorization": `Bearer ${settings.pterodactyl.key}` } } ); if (await cacheaccount.statusText == "Not Found") return res.redirect(failedcallback + "?err=CANNOTGETINFO"); let cacheaccountinfo = JSON.parse(await cacheaccount.text()); req.session.pterodactyl = cacheaccountinfo.attributes; req.session.userinfo = userinfo; if(newsettings.api.client.webhook.auditlogs.enabled && !newsettings.api.client.webhook.auditlogs.disabled.includes("LOGIN")) { let params = JSON.stringify({ embeds: [ { title: "Login", description: `**__User:__** ${req.session.userinfo.username}#${req.session.userinfo.discriminator} (${req.session.userinfo.id}) \n**IP:** ${newsettings.api.client.oauth2.ip["duplicate check"] == true ? await db.get("ip-" + req.session.userinfo.id) : "IP Checking is off"}`, color: hexToDecimal("#ffff00") } ] }) fetch(`${newsettings.api.client.webhook.webhook_url}`, { method: "POST", headers: { 'Content-type': 'application/json', }, body: params }).catch(e => console.warn(chalk.red("[WEBSITE] There was an error sending to the webhook: " + e))); } if (customredirect) return res.redirect(customredirect); return res.redirect(theme.settings.redirect.callback || "/"); }; res.redirect(failedcallback + "?err=UNVERIFIED"); } else { res.redirect(failedcallback + "?err=INVALIDCODE"); }; }); }; function makeid(length) { let result = ''; let characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'; let charactersLength = characters.length; for (let i = 0; i < length; i++) { result += characters.charAt(Math.floor(Math.random() * charactersLength)); } return result; } function hexToDecimal(hex) { return parseInt(hex.replace("#",""), 16) } // Get a cookie. function getCookie(req, cname) { let cookies = req.headers.cookie; if (!cookies) return null; let name = cname + "="; let ca = cookies.split(';'); for (let i = 0; i < ca.length; i++) { let c = ca[i]; while (c.charAt(0) == ' ') { c = c.substring(1); } if (c.indexOf(name) == 0) { return decodeURIComponent(c.substring(name.length, c.length)); } } return ""; }
#include <stdlib.h> #include <stdbool.h> #include <stdio.h> #include "mergesort.h" bool needsSorting(int rangeSize) { return rangeSize >= 2; } void mergeRanges(int values[], int startIndex, int midPoint, int endIndex) { const int rangeSize = endIndex - startIndex; int *destination = (int*) calloc(rangeSize + 1, sizeof(int)); int firstIndex = startIndex; int secondIndex = midPoint; int copyIndex = 0; while(firstIndex < midPoint && secondIndex < endIndex) { if(values[firstIndex] < values[secondIndex]) { destination[copyIndex] = values[firstIndex]; firstIndex++; } else { destination[copyIndex] = values[secondIndex]; secondIndex++; } copyIndex++; } while(firstIndex < midPoint) { destination[copyIndex] = values[firstIndex]; copyIndex++; firstIndex++; } while (secondIndex < endIndex) { destination[copyIndex] = values[secondIndex]; ++copyIndex; ++secondIndex; } destination [rangeSize] = '\0'; for(int i = 0; i < rangeSize; i++) { values [i + startIndex] = destination[i]; } free(destination); } void mergesortRange(int values[], int startIndex, int endIndex) { int rangeSize = endIndex - startIndex; if(needsSorting(rangeSize)) { int midPoint = (startIndex + endIndex) / 2; mergesortRange(values, startIndex, midPoint); mergesortRange(values , midPoint, endIndex); mergeRanges (values, startIndex, midPoint , endIndex); } } void mergesort( int size, int values[]) { mergesortRange(values, 0 , size); }
game.HeroDeathManager = Object.extend({ init: function(x, y, settings){ this.alwaysUpdate = true; }, update: function(){ if(game.data.player.dead){ //INCREMENT TIMER AND CHECK IF TIMER > DEATH ANIMATION TIME //this.body.vel.x -= this.body.accel.x * me.timer.tick; > this.dead; me.game.world.removeChild(game.data.player); me.game.world.removeChild(game.data.miniPlayer); //reseting the player me.state.current().resetPlayer(10, 0); } return true; } });
import os import threading import sys import json import traceback if sys.version_info[0] < 3: import Queue as queue else: import queue __read_thread = None __input_queue = None win = sys.platform.startswith('win') if win: __input_queue = queue.Queue() def read_input_loop(): global __input_queue while True: line = sys.stdin.readline() __input_queue.put(line) if line == '': break def __readWhileAvailable(): if win: # An ugly solution - just open a blocking thread to handle input global __input_queue global __read_thread if not __read_thread: __read_thread = threading.Thread(target=read_input_loop) __read_thread.daemon = True __read_thread.start() buff = '' # Now, read from the queue. First read we block and wait and then wait for timeout. buff += __input_queue.get() return buff else: # Wait for the first char from stdin buff = sys.stdin.readline() # While available, read all the other chars return buff """Demisto instance for scripts only""" template_code = ''' from __future__ import print_function import json import uuid import sys class Demisto: """Wrapper class to interface with the Demisto server via stdin, stdout""" def __init__(self, context): self.callingContext = context args = self.args() if 'demisto_machine_learning_magic_key' in args: import os os.environ['DEMISTO_MACHINE_LEARNING_MAGIC_KEY'] = args['demisto_machine_learning_magic_key'] def log(self, msg): json.dump({'type': 'entryLog', 'args': {'message': msg}}, sys.stdout) sys.stdout.write('\\n') sys.stdout.flush() def investigation(self): return self.callingContext[u'context'][u'Inv'] def incidents(self): return self.callingContext[u'context'][u'Incidents'] def parentEntry(self): return self.callingContext[u'context'][u'ParentEntry'] def context(self): return self.callingContext[u'context'][u'ExecutionContext'] def args(self): return self.callingContext.get(u'args', {}) def uniqueFile(self): return str(uuid.uuid4()) def getFilePath(self, id): return self.__do({'type': 'getFileByEntryID', 'command': 'getFilePath', 'args': {'id': id}}) def getLicenseID(self): return self.__do({'type': 'executeCommand', 'command': 'getLicenseID', 'args': {}})['id'] def get(self, obj, field): """ Get the field from the given dict using dot notation """ parts = field.split('.') for part in parts: if obj and part in obj: obj = obj[part] else: return None return obj def gets(self, obj, field): return str(self.get(obj, field)) def getArg(self, arg): return self.get(self.callingContext, 'args.' + arg) def execute(self, module, command, args): return self.__do({'type': 'execute', 'module': module, 'command': command.strip(), 'args': args}) def executeCommand(self, command, args): return self.__do({'type': 'executeCommand', 'command': command.strip(), 'args': args}) def demistoUrls(self): return self.__do({'type': 'demistoUrls'}) def info(self, *args): argsObj = {} argsObj["args"] = list(args) self.__do({'type': 'log', 'command': 'info', 'args': argsObj}) def error(self, *args): argsObj = {} argsObj["args"] = list(args) self.__do({'type': 'log', 'command': 'error', 'args': argsObj}) def exception(self, ex): return self.__do({'type': 'exception', 'command': 'exception', 'args': ex}) def debug(self, *args): argsObj = {} argsObj["args"] = list(args) self.__do({'type': 'log', 'command': 'debug', 'args': argsObj}) def getAllSupportedCommands(self): return self.__do({'type': 'getAllModulesSupportedCmds'}) def getModules(self): return self.__do({'type': 'getAllModules'}) def setContext(self, name, value): return self.__do({'type': 'setContext', 'name': name, 'value': value}) def dt(self, data, q): return self.__do({'type': 'dt', 'name': q, 'value': data})['result'] def __do(self, cmd): # Watch out there is another defintion like this # prepare command to send to server json.dump(cmd, sys.stdout) sys.stdout.write('\\n') # send command to Demisto server sys.stdout.flush() # wait to receive response from Demisto server data = globals()['__readWhileAvailable']() if data.find('$$##') > -1: raise ValueError(data[4:]) return json.loads(data) def convert(self, results): """ Convert whatever result into entry """ if type(results) is dict: if 'Contents' in results and 'ContentsFormat' in results: return results else: return {'Type': 1, 'Contents': json.dumps(results), 'ContentsFormat': 'json'} if type(results) is list: res = [] for r in results: res.append(self.convert(r)) return res if sys.version_info.major >= 3 and type(results) is bytes: return {'Type': 1, 'Contents': results.decode('utf-8'), 'ContentsFormat': 'text'} return {'Type': 1, 'Contents': str(results), 'ContentsFormat': 'text'} def results(self, results): res = [] converted = self.convert(results) if type(converted) is list: res = converted else: res.append(converted) json.dump({'type': 'result', 'results': res}, sys.stdout) sys.stdout.write('\\n') sys.stdout.flush() demisto = Demisto(context) try: import __builtin__ from StringIO import StringIO except ImportError: # Python 3 import builtins as __builtin__ from io import StringIO def demisto_print(*args): global demisto output = StringIO() __builtin__.print(*args, file=output) result = output.getvalue().strip() demisto.log(result) print = demisto_print ###CODE_HERE### ''' """Demisto instance for integrations only""" integ_template_code = ''' from __future__ import print_function import json import uuid import sys class Demisto: """Wrapper class to interface with the Demisto server via stdin, stdout""" def __init__(self, context): self.callingContext = context args = self.args() if 'demisto_machine_learning_magic_key' in args: import os os.environ['DEMISTO_MACHINE_LEARNING_MAGIC_KEY'] = args['demisto_machine_learning_magic_key'] def log(self, msg): json.dump({'type': 'entryLog', 'args': {'message': 'Integration log: ' + msg}}, sys.stdout) sys.stdout.write('\\n') sys.stdout.flush() def investigation(self): return self.callingContext[u'context'][u'Inv'] def incidents(self): return self.callingContext[u'context'][u'Incidents'] def parentEntry(self): return self.callingContext[u'context'][u'ParentEntry'] def context(self): return self.callingContext[u'context'][u'ExecutionContext'] def integrationInstance(self): return self.callingContext[u'context'][u'IntegrationInstance'] def args(self): return self.callingContext.get(u'args', {}) def uniqueFile(self): return str(uuid.uuid4()) def getFilePath(self, id): return self.__do({'type': 'getFileByEntryID', 'command': 'getFilePath', 'args': {'id': id}}) def getLastRun(self): return self.__do({'type': 'executeCommand', 'command': 'getLastRun', 'args': {}}) def setLastRun(self, value): return self.__do({'type': 'executeCommand', 'command': 'setLastRun', 'args': {'value': value}}) def getIntegrationContext(self): return self.__do({'type': 'executeCommand', 'command': 'getIntegrationContext', 'args': {}}) def setIntegrationContext(self, value): return self.__do({'type': 'executeCommand', 'command': 'setIntegrationContext', 'args': {'value': value}}) def getLicenseID(self): return self.__do({'type': 'executeCommand', 'command': 'getLicenseID', 'args': {}})['id'] def params(self): return self.callingContext.get(u'params', {}) def command(self): return self.callingContext.get(u'command', '') def get(self, obj, field): """ Get the field from the given dict using dot notation """ parts = field.split('.') for part in parts: if obj and part in obj: obj = obj[part] else: return None return obj def demistoUrls(self): return self.__do({'type': 'demistoUrls'}) def info(self, *args): argsObj = {} argsObj["args"] = list(args) self.__do({'type': 'log', 'command': 'info', 'args': argsObj}) def error(self, *args): argsObj = {} argsObj["args"] = list(args) self.__do({'type': 'log', 'command': 'error', 'args': argsObj}) def debug(self, *args): argsObj = {} argsObj["args"] = list(args) self.__do({'type': 'log', 'command': 'debug', 'args': argsObj}) def gets(self, obj, field): return str(self.get(obj, field)) def getArg(self, arg): return self.get(self.callingContext, 'args.' + arg) def getParam(self, p): return self.get(self.callingContext, 'params.' + p) def dt(self, data, q): return self.__do({'type': 'dt', 'name': q, 'value': data})['result'] def __do(self, cmd): # Watch out there is another defintion like this json.dump(cmd, sys.stdout) sys.stdout.write('\\n') sys.stdout.flush() data = globals()['__readWhileAvailable']() if data.find('$$##') > -1: raise ValueError(data[4:]) return json.loads(data) def __convert(self, results): """ Convert whatever result into entry """ if type(results) is dict: if 'Contents' in results and 'ContentsFormat' in results: return results else: return {'Type': 1, 'Contents': json.dumps(results), 'ContentsFormat': 'json'} if type(results) is list: res = [] for r in results: res.append(self.__convert(r)) return res if sys.version_info.major >= 3 and type(results) is bytes: return {'Type': 1, 'Contents': results.decode('utf-8'), 'ContentsFormat': 'text'} return {'Type': 1, 'Contents': str(results), 'ContentsFormat': 'text'} def results(self, results): res = [] converted = self.__convert(results) if type(converted) is list: res = converted else: res.append(converted) json.dump({'type': 'result', 'results': res}, sys.stdout) sys.stdout.write('\\n') sys.stdout.flush() def incidents(self, incidents): self.results({'Type': 1, 'Contents': json.dumps(incidents), 'ContentsFormat': 'json'}) def credentials(self, credentials): self.results({'Type': 1, 'Contents': json.dumps(credentials), 'ContentsFormat': 'json'}) demisto = Demisto(context) try: import __builtin__ from StringIO import StringIO except ImportError: # Python 3 import builtins as __builtin__ from io import StringIO def demisto_print(*args): global demisto output = StringIO() __builtin__.print(*args, file=output) result = output.getvalue().strip() demisto.log(result) print = demisto_print ###CODE_HERE### ''' # rollback file system to its previous state # delete home dir and tmp dir # notifies demisto server that the current executed script is completed # and the process is ready to execute the next script def send_script_completed(): json.dump({'type': 'completed'}, sys.stdout) sys.stdout.write('\\n') sys.stdout.flush() def send_script_exception(exc_type, exc_value, exc_traceback): ex_string = traceback.format_exception(exc_type, exc_value, exc_traceback) if ex_string == 'None\n': ex_string = str(ex) json.dump({'type': 'exception', 'args': {'exception': ex_string}}, sys.stdout) sys.stdout.write('\\n') sys.stdout.flush() def send_pong(): json.dump({'type': 'pong'}, sys.stdout) sys.stdout.write('\\n') sys.stdout.flush() # receives ping and sends back pong until we get something else # the the function stopped and returns the received string def do_ping_pong(): while True: ping = __readWhileAvailable() if ping == 'ping\n': send_pong() # return pong to server to indicate that everything is fine else: return ping backup_env_vars = {} for key in os.environ.keys(): backup_env_vars[key] = os.environ[key] def rollback_system(): os.environ = {} for key in backup_env_vars.keys(): os.environ[key] = backup_env_vars[key] while True: contextString = do_ping_pong() if contextString == '': # finish executing python break contextJSON = json.loads(contextString) code_string = contextJSON['script'] contextJSON.pop('script', None) is_integ_script = contextJSON['integration'] complete_code = '' if is_integ_script: complete_code = integ_template_code.replace('###CODE_HERE###', code_string) else: complete_code = template_code.replace('###CODE_HERE###', code_string) try: code = compile(complete_code, '<string>', 'exec') sub_globals = { '__readWhileAvailable': __readWhileAvailable, 'context': contextJSON, 'win': win } exec(code, sub_globals, sub_globals) # guardrails-disable-line except Exception as ex: exc_type, exc_value, exc_traceback = sys.exc_info() send_script_exception(exc_type, exc_value, exc_traceback) except SystemExit: # print 'Will not stop on sys.exit(0)' pass rollback_system() # ping back to Demisto server that script is completed send_script_completed() # if the script running on native python then terminate the process after finished the script is_python_native = contextJSON['native'] if is_python_native: break if __read_thread: __read_thread.join(timeout=1)
// Copyright (c) 2012 Ecma International. All rights reserved. // Ecma International makes this code available under the terms and conditions set // forth on http://hg.ecmascript.org/tests/test262/raw-file/tip/LICENSE (the // "Use Terms"). Any redistribution of this code must retain the above // copyright and this notice and otherwise comply with the Use Terms. /*--- es5id: 15.4.4.22-9-c-i-18 description: > Array.prototype.reduceRight - element to be retrieved is own accessor property without a get function on an Array includes: [runTestCase.js] ---*/ function testcase() { var testResult = false; function callbackfn(prevVal, curVal, idx, obj) { if (idx === 1) { testResult = (typeof curVal === "undefined"); } } var arr = [0, , 2]; Object.defineProperty(arr, "1", { set: function () { }, configurable: true }); arr.reduceRight(callbackfn, "initialValue"); return testResult; } runTestCase(testcase);
#! /usr/bin/env python3 import timeit def insertion_sort(list_to_sort, field=lambda l: l[0]): """ Insertion sort. """ #goes through the unsorted list for i in range(1, len(list_to_sort)): j = i - 1 key = list_to_sort[i] #checks if the current iterated field is less then the field we are checking while j >= 0 and field(list_to_sort[j]) > field(key): list_to_sort[j + 1] = list_to_sort[j] j -= 1 list_to_sort[j + 1] = key print("_" * 40) db = [("j", "g"), ("a", "u"), ("k", "l"), ("o", "i"), ("b", "s"), ("@", "."), ("p", "s"), ("o", "e")] print("Before insertion sort") print(db) insertion_sort(db, lambda e: e[0]) print("After insertion sort") print(db) #db1 = [1,5,8,2,4,9,10] #insertion_sort(db1, lambda e: e) # print(db1) def quick_sort(list_to_sort, field=lambda l: l[0]): # three empty arrays less, equal, greater = [], [], [] # if we have something to sort if len(list_to_sort) > 1: #take the middle item (any item works) pivot = list_to_sort[len(list_to_sort) // 2] #for each item in the unsorted list, check if the middle item is lesser,greater or equal to the iterated item - do the right action. for obj in list_to_sort: if field(obj) < field(pivot): less.append(obj) elif field(obj) == field(pivot): equal.append(obj) elif field(obj) > field(pivot): greater.append(obj) return quick_sort(less, field) + equal + quick_sort(greater, field) else: return list_to_sort print("_" * 40) db2 = [1, 5, 6, 7, 3, 2, 1, 5, 6, 3, 2, 6, 9, 8] print("Before quick sort") print(db2) db2 = quick_sort(db2, lambda l: l) print("After quick sort") print(db2)
from dataclasses import dataclass, field from apischema import alias from apischema.objects import object_fields @alias(lambda s: f"prefixed_{s}") @dataclass class Data: not_aliased: int = field(metadata=alias(override=False)) not_prefixed: int = field(metadata=alias("not_overridden", override=False)) prefixed: int prefixed_alias: str = field(metadata=alias("alias")) def test_alias(): assert {name: field.alias for name, field in object_fields(Data).items()} == { "not_aliased": "not_aliased", "not_prefixed": "not_overridden", "prefixed": "prefixed_prefixed", "prefixed_alias": "prefixed_alias", }
# -*- Mode: Python -*- # $Id: filesys.py,v 1.9 2003/12/24 16:10:56 akuchling Exp $ # Author: Sam Rushing <rushing@nightmare.com> # # Generic filesystem interface. # # We want to provide a complete wrapper around any and all # filesystem operations. # this class is really just for documentation, # identifying the API for a filesystem object. # opening files for reading, and listing directories, should # return a producer. class abstract_filesystem: def __init__ (self): pass def current_directory (self): "Return a string representing the current directory." pass def listdir (self, path, long=0): """Return a listing of the directory at 'path' The empty string indicates the current directory. If 'long' is set, instead return a list of (name, stat_info) tuples """ pass def open (self, path, mode): "Return an open file object" pass def stat (self, path): "Return the equivalent of os.stat() on the given path." pass def isdir (self, path): "Does the path represent a directory?" pass def isfile (self, path): "Does the path represent a plain file?" pass def cwd (self, path): "Change the working directory." pass def cdup (self): "Change to the parent of the current directory." pass def longify (self, path): """Return a 'long' representation of the filename [for the output of the LIST command]""" pass # standard wrapper around a unix-like filesystem, with a 'false root' # capability. # security considerations: can symbolic links be used to 'escape' the # root? should we allow it? if not, then we could scan the # filesystem on startup, but that would not help if they were added # later. We will probably need to check for symlinks in the cwd method. # what to do if wd is an invalid directory? import os import stat import re import string def safe_stat (path): try: return (path, os.stat (path)) except: return None import glob class os_filesystem: path_module = os.path # set this to zero if you want to disable pathname globbing. # [we currently don't glob, anyway] do_globbing = 1 def __init__ (self, root, wd='/'): self.root = root self.wd = wd def current_directory (self): return self.wd def isfile (self, path): p = self.normalize (self.path_module.join (self.wd, path)) return self.path_module.isfile (self.translate(p)) def isdir (self, path): p = self.normalize (self.path_module.join (self.wd, path)) return self.path_module.isdir (self.translate(p)) def cwd (self, path): p = self.normalize (self.path_module.join (self.wd, path)) translated_path = self.translate(p) if not self.path_module.isdir (translated_path): return 0 else: old_dir = os.getcwd() # temporarily change to that directory, in order # to see if we have permission to do so. try: can = 0 try: os.chdir (translated_path) can = 1 self.wd = p except: pass finally: if can: os.chdir (old_dir) return can def cdup (self): return self.cwd ('..') def listdir (self, path, long=0): p = self.translate (path) # I think we should glob, but limit it to the current # directory only. ld = os.listdir (p) if not long: return list_producer (ld, None) else: old_dir = os.getcwd() try: os.chdir (p) # if os.stat fails we ignore that file. result = filter (None, map (safe_stat, ld)) finally: os.chdir (old_dir) return list_producer (result, self.longify) # TODO: implement a cache w/timeout for stat() def stat (self, path): p = self.translate (path) return os.stat (p) def open (self, path, mode): p = self.translate (path) return open (p, mode) def unlink (self, path): p = self.translate (path) return os.unlink (p) def mkdir (self, path): p = self.translate (path) return os.mkdir (p) def rmdir (self, path): p = self.translate (path) return os.rmdir (p) def rename(self, src, dst): return os.rename(self.translate(src),self.translate(dst)) # utility methods def normalize (self, path): # watch for the ever-sneaky '/+' path element path = re.sub('/+', '/', path) p = self.path_module.normpath (path) # remove 'dangling' cdup's. if len(p) > 2 and p[:3] == '/..': p = '/' return p def translate (self, path): # we need to join together three separate # path components, and do it safely. # <real_root>/<current_directory>/<path> # use the operating system's path separator. path = string.join (string.split (path, '/'), os.sep) p = self.normalize (self.path_module.join (self.wd, path)) p = self.normalize (self.path_module.join (self.root, p[1:])) return p def longify (self, (path, stat_info)): return unix_longify (path, stat_info) def __repr__ (self): return '<unix-style fs root:%s wd:%s>' % ( self.root, self.wd ) if os.name == 'posix': class unix_filesystem (os_filesystem): pass class schizophrenic_unix_filesystem (os_filesystem): PROCESS_UID = os.getuid() PROCESS_EUID = os.geteuid() PROCESS_GID = os.getgid() PROCESS_EGID = os.getegid() def __init__ (self, root, wd='/', persona=(None, None)): os_filesystem.__init__ (self, root, wd) self.persona = persona def become_persona (self): if self.persona is not (None, None): uid, gid = self.persona # the order of these is important! os.setegid (gid) os.seteuid (uid) def become_nobody (self): if self.persona is not (None, None): os.seteuid (self.PROCESS_UID) os.setegid (self.PROCESS_GID) # cwd, cdup, open, listdir def cwd (self, path): try: self.become_persona() return os_filesystem.cwd (self, path) finally: self.become_nobody() def cdup (self, path): try: self.become_persona() return os_filesystem.cdup (self) finally: self.become_nobody() def open (self, filename, mode): try: self.become_persona() return os_filesystem.open (self, filename, mode) finally: self.become_nobody() def listdir (self, path, long=0): try: self.become_persona() return os_filesystem.listdir (self, path, long) finally: self.become_nobody() # For the 'real' root, we could obtain a list of drives, and then # use that. Doesn't win32 provide such a 'real' filesystem? # [yes, I think something like this "\\.\c\windows"] class msdos_filesystem (os_filesystem): def longify (self, (path, stat_info)): return msdos_longify (path, stat_info) # A merged filesystem will let you plug other filesystems together. # We really need the equivalent of a 'mount' capability - this seems # to be the most general idea. So you'd use a 'mount' method to place # another filesystem somewhere in the hierarchy. # Note: this is most likely how I will handle ~user directories # with the http server. class merged_filesystem: def __init__ (self, *fsys): pass # this matches the output of NT's ftp server (when in # MSDOS mode) exactly. def msdos_longify (file, stat_info): if stat.S_ISDIR (stat_info[stat.ST_MODE]): dir = '<DIR>' else: dir = ' ' date = msdos_date (stat_info[stat.ST_MTIME]) return '%s %s %8d %s' % ( date, dir, stat_info[stat.ST_SIZE], file ) def msdos_date (t): try: info = time.gmtime (t) except: info = time.gmtime (0) # year, month, day, hour, minute, second, ... hour = info[3] if hour > 11: merid = 'PM' hour = hour - 12 else: merid = 'AM' return '%02d-%02d-%02d %02d:%02d%s' % ( info[1], info[2], info[0]%100, hour, info[4], merid ) months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] mode_table = { '0':'---', '1':'--x', '2':'-w-', '3':'-wx', '4':'r--', '5':'r-x', '6':'rw-', '7':'rwx' } import time def unix_longify (file, stat_info): # for now, only pay attention to the lower bits mode = ('%o' % stat_info[stat.ST_MODE])[-3:] mode = string.join (map (lambda x: mode_table[x], mode), '') if stat.S_ISDIR (stat_info[stat.ST_MODE]): dirchar = 'd' else: dirchar = '-' date = ls_date (long(time.time()), stat_info[stat.ST_MTIME]) return '%s%s %3d %-8d %-8d %8d %s %s' % ( dirchar, mode, stat_info[stat.ST_NLINK], stat_info[stat.ST_UID], stat_info[stat.ST_GID], stat_info[stat.ST_SIZE], date, file ) # Emulate the unix 'ls' command's date field. # it has two formats - if the date is more than 180 # days in the past, then it's like this: # Oct 19 1995 # otherwise, it looks like this: # Oct 19 17:33 def ls_date (now, t): try: info = time.gmtime (t) except: info = time.gmtime (0) # 15,600,000 == 86,400 * 180 if (now - t) > 15600000: return '%s %2d %d' % ( months[info[1]-1], info[2], info[0] ) else: return '%s %2d %02d:%02d' % ( months[info[1]-1], info[2], info[3], info[4] ) # =========================================================================== # Producers # =========================================================================== class list_producer: def __init__ (self, list, func=None): self.list = list self.func = func # this should do a pushd/popd def more (self): if not self.list: return '' else: # do a few at a time bunch = self.list[:50] if self.func is not None: bunch = map (self.func, bunch) self.list = self.list[50:] return string.joinfields (bunch, '\r\n') + '\r\n'
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. #ifndef KUDU_FS_FILE_BLOCK_MANAGER_H #define KUDU_FS_FILE_BLOCK_MANAGER_H #include <map> #include <memory> #include <string> #include <unordered_set> #include <vector> #include "kudu/fs/block_id.h" #include "kudu/fs/block_manager.h" #include "kudu/util/atomic.h" #include "kudu/util/locks.h" #include "kudu/util/random.h" namespace kudu { class Env; class MemTracker; class MetricEntity; class WritableFile; namespace fs { class PathInstanceMetadataFile; namespace internal { class FileBlockLocation; class FileReadableBlock; class FileWritableBlock; struct BlockManagerMetrics; } // namespace internal // A file-backed block storage implementation. // // This is a naive block implementation which maps each block to its own // file on disk. To prevent the block directory from becoming too large, // blocks are aggregated into a 3-level directory hierarchy. // // The block manager can take advantage of multiple filesystem paths. A block // written to a given path will be assigned an ID that includes enough // information to uniquely identify the path's underlying disk. The ID is // resolved back into a filesystem path when the block is opened for reading. // The structure of this ID limits the block manager to at most 65,536 disks. // // When creating blocks, the block manager will round robin through the // available filesystem paths. // // TODO: Support path-based block placement hints. // The file-backed block manager. class FileBlockManager : public BlockManager { public: // Creates a new in-memory instance of a FileBlockManager. // // 'env' should remain alive for the lifetime of the block manager. FileBlockManager(Env* env, const BlockManagerOptions& opts); virtual ~FileBlockManager(); virtual Status Create() OVERRIDE; virtual Status Open() OVERRIDE; virtual Status CreateBlock(const CreateBlockOptions& opts, gscoped_ptr<WritableBlock>* block) OVERRIDE; virtual Status CreateBlock(gscoped_ptr<WritableBlock>* block) OVERRIDE; virtual Status OpenBlock(const BlockId& block_id, gscoped_ptr<ReadableBlock>* block) OVERRIDE; virtual Status DeleteBlock(const BlockId& block_id) OVERRIDE; virtual Status CloseBlocks(const std::vector<WritableBlock*>& blocks) OVERRIDE; private: friend class internal::FileBlockLocation; friend class internal::FileReadableBlock; friend class internal::FileWritableBlock; // Synchronizes the metadata for a block with the given id. Status SyncMetadata(const internal::FileBlockLocation& block_id); // Looks up the path of the file backing a particular block ID. // // On success, overwrites 'path' with the file's path. bool FindBlockPath(const BlockId& block_id, std::string* root_path) const; Env* env() const { return env_; } // For manipulating files. Env* env_; // If true, only read operations are allowed. const bool read_only_; // Filesystem paths where all block directories are found. const std::vector<std::string> root_paths_; // Maps path indices their instance files. // // There's no need to synchronize access to the map as it is only written // to during Create() and Open(); all subsequent accesses are reads. typedef std::map<uint16_t, PathInstanceMetadataFile*> PathMap; PathMap root_paths_by_idx_; // For generating block IDs. ThreadSafeRandom rand_; AtomicInt<int64_t> next_block_id_; // Protects 'dirty_dirs_' and 'next_root_path_'. mutable simple_spinlock lock_; // Tracks the block directories which are dirty from block creation. This // lets us perform some simple coalescing when synchronizing metadata. std::unordered_set<std::string> dirty_dirs_; // Points to the filesystem path to be used when creating the next block. PathMap::iterator next_root_path_; // Metric container for the block manager. // May be null if instantiated without metrics. gscoped_ptr<internal::BlockManagerMetrics> metrics_; // Tracks memory consumption of any allocations numerous enough to be // interesting. std::shared_ptr<MemTracker> mem_tracker_; DISALLOW_COPY_AND_ASSIGN(FileBlockManager); }; } // namespace fs } // namespace kudu #endif
/** @file Header file for Elf32 Convert solution Copyright (c) 2010 - 2014, Intel Corporation. All rights reserved.<BR> This program and the accompanying materials are licensed and made available under the terms and conditions of the BSD License which accompanies this distribution. The full text of the license may be found at http://opensource.org/licenses/bsd-license.php THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. **/ #ifndef _ELF_32_CONVERT_ #define _ELF_32_CONVERT_ BOOLEAN InitializeElf32 ( UINT8 *FileBuffer, ELF_FUNCTION_TABLE *ElfFunctions ); #endif
# app/auth/__init__.py from flask import Blueprint, request, current_app, session, redirect, url_for notifications = Blueprint('notifications', __name__, template_folder='templates') from app.notifications import routes from app.auth.models import User @notifications.before_request def before_request(): if not session.get('_id'): if request.method == 'POST': return "error" else: return redirect(url_for('main.home')) if session.get('user_id'): if request.path == '/get_update': refer_path = request.referrer[len(request.url_root)-1:] no_login = ['/activate', '/forgot', '/register', '/reset', '/setup'] if request.path == '/': return "error" for url in no_login: if request.path.find(url) == 0: return "error" return None user = User.query.get(session['user_id']) if request.method == 'GET': session['notif_count'] = str(user.update_count()) if not user.validate_user(): return redirect(url_for('authentication.set_up_profile'))
from rest_framework.views import APIView from rest_framework.response import Response from rest_framework import status from rest_framework import viewsets from rest_framework.authentication import TokenAuthentication from rest_framework import filters from rest_framework.authtoken.views import ObtainAuthToken from rest_framework.settings import api_settings from rest_framework.permissions import IsAuthenticated from profiles_api import serializers from profiles_api import models from profiles_api import permissions class HelloApiView(APIView): """Test API View""" serializer_class = serializers.HelloSerializer def get(self, request, format=None): """Returns a list of APIView features""" an_apiview = [ 'Uses HTTP methods as function (get, post, patch, put, delete)', 'Is similar to a traditional Django View', 'Gives you the most control over you application logic', 'Is mapped manually to URLs', ] return Response({'message': 'Hello!', 'an_apiview': an_apiview}) def post(self, request): """Creates a hello message with our name""" serializer = self.serializer_class(data=request.data) if serializer.is_valid(): name = serializer.validated_data.get('name') message = f'Hello {name}' return Response({'message': message}) else: return Response( serializer.errors, status= status.HTTP_400_BAD_REQUEST ) def put(self, request, pk=None): """Handles updating an object""" return Response({'method': 'PUT'}) def patch(self, request, pk=None): """Handles a partial update of an object""" return Response({'method': 'PATCH'}) def delete(self, request, pk=None): """Deletes an object""" return Response({'method': 'DELETE'}) class HelloViewSet(viewsets.ViewSet): """Test API ViewSet""" serializer_class = serializers.HelloSerializer def list(self, request): """Returns a Hello message""" a_viewset = [ 'Uses actions (list, create, retrieve, update, partial_update, destroy)', 'Automatically maps to URLs using Routers', 'Provides more functionality with less code', ] return Response({'message': 'Hello!', 'a_viewset': a_viewset}) def create(self, request): """Create a new hello message""" serializer = self.serializer_class(data=request.data) if serializer.is_valid(): name = serializer.validated_data.get('name') message = f'Hello {name}!' return Response({'message': message}) else: return Response( serializer.errors, status=status.HTTP_400_BAD_REQUEST ) def retrieve(self, request, pk=None): """Handle gestting an object by its ID""" return Response({'http_method': 'GET'}) def update(self, request, pk=None): """Handle updating an object""" return Response({'http_method': 'PUT'}) def partial_update(self, request, pk=None): """Handle updating part of an object""" return Response({'http_method': 'PATCH'}) def destroy(self, request, pk=None): """Handle removing an object""" return Response({'http_method': 'DELETE'}) class UserProfileViewSet(viewsets.ModelViewSet): """Handle creating and updating profiles""" serializer_class = serializers.UserProfileSerializer queryset = models.UserProfile.objects.all() authentication_classes = (TokenAuthentication,) permission_classes = (permissions.UpdateOwnProfile,) filter_backends = (filters.SearchFilter,) search_fields = ('name', 'email',) class UserLoginApiView(ObtainAuthToken): """Handle creating user authentifcation tokens""" renderer_classes = api_settings.DEFAULT_RENDERER_CLASSES class UserProfileFeedViewSet(viewsets.ModelViewSet): """Handles creating, reading, updating profile feed items""" authentication_classes = (TokenAuthentication,) serializer_class = serializers.ProfileFeedItemSerializer queryset = models.ProfileFeedItem.objects.all() permission_classes = (permissions.UpdateOwnStatus, IsAuthenticated,) def perform_create(self, serializer): """Sets the user profile to the logged in user""" serializer.save(user_profile=self.request.user)
from contextlib import contextmanager from rllab_maml.core.serializable import Serializable from rllab_maml.misc.tensor_utils import flatten_tensors, unflatten_tensors import tensorflow as tf load_params = True @contextmanager def suppress_params_loading(): global load_params load_params = False yield load_params = True class Parameterized(object): def __init__(self): self._cached_params = {} self._cached_param_dtypes = {} self._cached_param_shapes = {} self._cached_assign_ops = {} self._cached_assign_placeholders = {} def get_params_internal(self, **tags): """ Internal method to be implemented which does not perform caching """ raise NotImplementedError def get_params(self, **tags): """ Get the list of parameters, filtered by the provided tags. Some common tags include 'regularizable' and 'trainable' """ tag_tuple = tuple(sorted(list(tags.items()), key=lambda x: x[0])) if tag_tuple not in self._cached_params: self._cached_params[tag_tuple] = self.get_params_internal(**tags) return self._cached_params[tag_tuple] def get_param_dtypes(self, **tags): tag_tuple = tuple(sorted(list(tags.items()), key=lambda x: x[0])) if tag_tuple not in self._cached_param_dtypes: params = self.get_params(**tags) param_values = tf.get_default_session().run(params) self._cached_param_dtypes[tag_tuple] = [val.dtype for val in param_values] return self._cached_param_dtypes[tag_tuple] def get_param_shapes(self, **tags): tag_tuple = tuple(sorted(list(tags.items()), key=lambda x: x[0])) if tag_tuple not in self._cached_param_shapes: params = self.get_params(**tags) param_values = tf.get_default_session().run(params) self._cached_param_shapes[tag_tuple] = [val.shape for val in param_values] return self._cached_param_shapes[tag_tuple] def get_param_values(self, **tags): params = self.get_params(**tags) param_values = tf.get_default_session().run(params) return flatten_tensors(param_values) def set_param_values(self, flattened_params, **tags): debug = tags.pop("debug", False) param_values = unflatten_tensors( flattened_params, self.get_param_shapes(**tags)) ops = [] feed_dict = dict() for param, dtype, value in zip( self.get_params(**tags), self.get_param_dtypes(**tags), param_values): if param not in self._cached_assign_ops: assign_placeholder = tf.placeholder(dtype=param.dtype.base_dtype) assign_op = tf.assign(param, assign_placeholder) self._cached_assign_ops[param] = assign_op self._cached_assign_placeholders[param] = assign_placeholder ops.append(self._cached_assign_ops[param]) feed_dict[self._cached_assign_placeholders[param]] = value.astype(dtype) if debug: print("setting value of %s" % param.name) tf.get_default_session().run(ops, feed_dict=feed_dict) def flat_to_params(self, flattened_params, **tags): return unflatten_tensors(flattened_params, self.get_param_shapes(**tags)) def __getstate__(self): d = Serializable.__getstate__(self) global load_params if load_params: d["params"] = self.get_param_values() return d def __setstate__(self, d): Serializable.__setstate__(self, d) global load_params if load_params: tf.get_default_session().run(tf.variables_initializer(self.get_params())) self.set_param_values(d["params"]) class JointParameterized(Parameterized): def __init__(self, components): super(JointParameterized, self).__init__() self.components = components def get_params_internal(self, **tags): params = [param for comp in self.components for param in comp.get_params_internal(**tags)] # only return unique parameters return sorted(set(params), key=hash)
/* * Copyright (c) 2007, Laminar Research. * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. * */ #include <stdio.h> #include <errno.h> #include <stdlib.h> #include <string.h> /* * GenTerrain is compiled with * * gcc GenTerrain.c -o GenTerrain * * GenTerrain is a generator for object spreadasheets. Basically the format of the file describing the objects to x-plane is long and * annoying, since it cross-multiplies a ton of terrain types (made more by variants) with the same set of buildings over and over and over. * So this function generates that pre-multiply. * * Usually it is used like this: * * ./GenTerrain 1 > /rendering_code/config/obj_properties_us.txt * */ /**************************************************************************************************************************************** * * UTILITIES * ****************************************************************************************************************************************/ int gCountry; int gOnce; const char * gCountryNames[2] = { "WORLD", "US" }; const char * gCountryPrefix[2] = { "/lib/global8/", "/lib/global8/us/" }; typedef struct { int width; int depth; int height1; int height2; int road; int fill; const char * feature; } ObjSpec_t; const char * gObjs[10000]; int gCtr = 0; void PrintObj(const char * feature, const char * terrain, int variant, int width1, int width2, int depth1, int depth2, int height1, int height2, int road, int fill, const char * name) { int n; for (n = 0; n < gCtr; ++n) { if (strcmp(gObjs[n], name)==0) { if (gOnce) return; break; } } if (n == gCtr) { gObjs[gCtr++] = strdup(name); } char tbuf[256]; if (terrain && variant && variant == 5) sprintf(tbuf, "terrain/%s_av", terrain); else if (terrain && variant) sprintf(tbuf, "terrain/%s%d", terrain, variant); else if (terrain) sprintf(tbuf, "terrain/%s", terrain); else strcpy(tbuf, "NO_VALUE"); if (width2 != 0) { printf("FAC_PROP %-20s %-30s %5d %5d %5d %5d %5d %5d %3d %3d %s\n", feature, tbuf, width1, width2, depth1, depth2, height1, height2, road, fill, name); } else if (height2 != 0) { printf("OBS_PROP %-20s %-30s %5d %5s %5d %5s %5d %5d %3d %3d %s\n", feature, tbuf, width1, " ", depth1, " ", height1, height2, road, fill, name); } else { printf("OBJ_PROP %-20s %-30s %5d %5s %5d %5s %5d %5s %3d %3d %s\n", feature, tbuf, width1, " ", depth1, " ", height1, " ", road, fill, name); } } void PrintHeader(const char * label, const char * comment) { if (gOnce) return; printf("# ---------------------- %s %s\n", label ? label : "Obstacles",comment ? comment : ""); printf("#COMMAND %-20s %-30s %5s %5s %5s %5s %5s %5s %3s %3s %s\n", "FEATURE", "TERRAIN", "WID1", "WID2", "DEP1", "DEP2", "HGT1", "HGT2", "RD", "FIL", "OBJ"); } void PrintSpec(const char * suite, const char * terrain, int variant, const ObjSpec_t spec[]) { int n; int j; const char * sterrain; int j1 = 0, j2 = 0; if (variant) { j1 = 1; j2 = 5; } for (j = j1; j <= j2; ++j) { n = 0; while (spec[n].width != 0) { char buf[256]; int o = 0; if (terrain == NULL) { printf("ERROR: NULL TERRAIN\n"); exit(1); } if (suite != NULL && suite[0] != 0) o += sprintf(buf+o, "%s_", suite); if (spec[n].feature) o += sprintf(buf+o, "%s_", spec[n].feature); o += sprintf(buf+o,"%d_%d", spec[n].width,spec[n].depth); if (spec[n].height2 != 0) o += sprintf(buf+o,"_%d", spec[n].height2); else if (spec[n].height1 != 0) o += sprintf(buf+o,"_%d", spec[n].height1); if (spec[n].fill && spec[n].road) strcat(buf, "a"); else if (spec[n].fill) strcat(buf, "f"); else strcat(buf, "r"); PrintObj(spec[n].feature ? spec[n].feature : "NO_VALUE", terrain, j, spec[n].width,0,spec[n].depth,0,spec[n].height1, spec[n].height2, spec[n].road, spec[n].fill, buf); ++n; } } } /**************************************************************************************************************************************** * * City specification Types * * These define a profile of objects to be used for a given city. * ****************************************************************************************************************************************/ enum { spec_TownSq, // Town - the outlay transition between city and farm/natural spec_OutSq, // Outer-town area - basically residential spec_InSq, // City inner - non-single-unit buildings + sky scrapers limited by height restrictions spec_IndSq, // Industrial spec_HillSq, // Like out, but with hill patterns spec_TownIrr, // Town - the outlay transition between city and farm/natural spec_OutIrr, // Outer-town area - basically residential spec_InIrr, // City inner - non-single-unit buildings + sky scrapers limited by height restrictions spec_IndIrr, // Industrial spec_HillIrr, // Like out, but with hill patterns spec_Park, // Parkland and grass/forset areas spec_Max }; typedef void (* TerrainFunc_t)(const char * name, int vari); void FuncTownSq(const char *, int); void FuncOutSq(const char *, int); void FuncInSq(const char *, int); void FuncIndSq(const char *, int); void FuncHillSq(const char *, int); void FuncTownIrr(const char *, int); void FuncOutIrr(const char *, int); void FuncInIrr(const char *, int); void FuncIndIrr(const char *, int); void FuncHillIrr(const char *, int); void FuncPark(const char *, int); void FuncObjs(const char *, const char *, int); TerrainFunc_t gFuncs[spec_Max] = { FuncTownSq, FuncOutSq, FuncInSq, FuncIndSq, FuncHillSq, FuncTownIrr, FuncOutIrr, FuncInIrr, FuncIndIrr, FuncHillIrr, FuncPark, }; typedef struct { const char * name; int kind; int vari; } TerrainItem_t; TerrainItem_t terrains[] = { //------------------------ ICE CITY {"ice_city", spec_OutIrr, 1}, {"ice_city", spec_OutIrr, 1}, //------------------------NORTH2 CITY {"north2_city_irr_ind", spec_IndIrr, 1}, {"north2_city_irr_in", spec_InIrr, 1}, {"north2_city_irr_out", spec_OutIrr, 1}, {"north2_city_irr_twn", spec_TownIrr, 1}, //------------------------ NORTH CITY {"north_city_irr_ind", spec_IndIrr, 1}, {"north_city_irr_in", spec_InIrr, 1}, {"north_city_irr_out", spec_OutIrr, 1}, {"north_city_irr_twn", spec_TownIrr, 1}, //------------------------ WET CITY {"wet_city_irr_ind", spec_IndIrr, 1}, {"wet_city_irr_in", spec_InIrr, 1}, {"wet_city_irr_out", spec_OutIrr, 1}, {"wet_city_irr_twn", spec_TownIrr, 1}, //------------------------ TEMPERATE CITY {"temp_city_park", spec_Park, 0}, {"temp_city_golf", spec_Park, 0}, {"temp_city_irr_ind", spec_IndIrr, 1}, {"temp_city_irr_in", spec_InIrr, 1}, {"temp_city_irr_out", spec_OutIrr, 1}, {"temp_city_irr_twn", spec_TownIrr, 1}, {"temp_city_sq_ind", spec_IndSq, 1}, {"temp_city_sq_in", spec_InSq, 1}, {"temp_city_sq_out", spec_OutSq, 1}, {"temp_city_sq_twn", spec_TownSq, 1}, {"temp_cityhill_irr", spec_HillIrr, 1}, {"temp_cityhill_sq", spec_HillSq, 1}, //------------------------ DRY CITY {"dry_city_park", spec_Park, 0}, {"dry_city_golf", spec_Park, 0}, {"dry_city_irr_ind", spec_IndIrr, 1}, {"dry_city_irr_in", spec_InIrr, 1}, {"dry_city_irr_out", spec_OutIrr, 1}, {"dry_city_irr_twn", spec_TownIrr, 1}, {"dry_city_sq_ind", spec_IndSq, 1}, {"dry_city_sq_in", spec_InSq, 1}, {"dry_city_sq_out", spec_OutSq, 1}, {"dry_city_sq_twn", spec_TownSq, 1}, {"dry_cityhill_irr", spec_HillIrr, 1}, {"dry_cityhill_sq", spec_HillSq, 1}, //------------------------ DESERT CITY {"des_city_park", spec_Park, 0}, {"des_city_golf", spec_Park, 0}, {"des_city_irr_ind", spec_IndIrr, 1}, {"des_city_irr_in", spec_InIrr, 1}, {"des_city_irr_out", spec_OutIrr, 1}, {"des_city_irr_twn", spec_TownIrr, 1}, {"des_city_sq_ind", spec_IndSq, 1}, {"des_city_sq_in", spec_InSq, 1}, {"des_city_sq_out", spec_OutSq, 1}, {"des_city_sq_twn", spec_TownSq, 1}, {"des_cityhill_irr", spec_HillIrr, 1}, {"des_cityhill_sq", spec_HillSq, 1}, {NULL, spec_Max} }; /**************************************************************************************************************************************** * * CITY SPEC BUILDERS * * These functions list the actual objects we must build * ****************************************************************************************************************************************/ void FuncTownSq(const char * t, int vari) { static ObjSpec_t the_spec_us[] = { { 200, 200, 0, 0, 1, 0, NULL }, // Splat a huge block - use for big-box or other meta-areas { 120, 30, 0, 0, 1, 0, NULL }, // Now build long-thin pieces. Widen smaller pieces to create { 90, 40, 0, 0, 1, 0, NULL }, { 90, 30, 0, 0, 1, 0, NULL }, { 60, 30, 0, 0, 1, 0, NULL }, { 30, 30, 0, 0, 1, 0, NULL }, { 60, 60, 0, 0, 0, 1, NULL }, // Vegetation { 60, 30, 0, 0, 0, 1, NULL }, { 30, 30, 0, 0, 0, 1, NULL }, { 0, 0, 0, 0, 0, 0, NULL } }; static ObjSpec_t the_spec_world[] = { { 500, 500, 0, 0, 1, 1, NULL }, // Europe - big blocks to try to make a pseudo radial grid wthin highways. { 500, 250, 0, 0, 1, 1, NULL }, { 250, 250, 0, 0, 1, 1, NULL }, { 250, 120, 0, 0, 1, 1, NULL }, { 120, 120, 0, 0, 1, 1, NULL }, { 120, 50, 0, 0, 1, 0, NULL }, // Hack: do NOT use a smallest block on the inside..makes it look too atomized. // { 60, 60, 0, 0, 1, 1, NULL }, { 0, 0, 0, 0, 1, 1, NULL } }; PrintHeader(t, "(Town-type square)"); PrintSpec("town_sq", t, vari, gCountry ? the_spec_us : the_spec_world); } void FuncOutSq(const char * t, int vari) { static ObjSpec_t the_spec_us[] = { { 40, 20, 40, 0, 1, 0, NULL }, // B2-B { 20, 20, 40, 0, 1, 0, NULL }, { 200, 200, 0, 0, 1, 0, NULL }, // Splat a huge block - use for big-box or other meta-areas { 120, 30, 0, 0, 1, 0, NULL }, // Now build long-thin pieces. Widen smaller pieces to create { 90, 30, 0, 0, 1, 0, NULL }, // sparseness { 90, 20, 0, 0, 1, 0, NULL }, { 60, 30, 0, 0, 1, 0, NULL }, { 60, 20, 0, 0, 1, 0, NULL }, { 30, 30, 0, 0, 1, 0, NULL }, { 30, 20, 0, 0, 1, 0, NULL }, { 60, 60, 0, 0, 0, 1, NULL }, // Vegetation { 60, 30, 0, 0, 0, 1, NULL }, { 30, 30, 0, 0, 0, 1, NULL }, { 0, 0, 0, 0, 0, 0, NULL } }; static ObjSpec_t the_spec_world[] = { { 500, 500, 0, 0, 1, 1, NULL }, // Europe - big blocks to try to make a pseudo radial grid wthin highways. { 500, 250, 0, 0, 1, 1, NULL }, { 250, 250, 0, 0, 1, 1, NULL }, { 250, 120, 0, 0, 1, 1, NULL }, { 120, 120, 0, 0, 1, 1, NULL }, { 120, 50, 0, 0, 1, 0, NULL }, // Hack: do NOT use a smallest block on the inside..makes it look too atomized. // { 60, 60, 0, 0, 1, 1, NULL }, { 0, 0, 0, 0, 1, 1, NULL } }; PrintHeader(t, "(OuterCity-type irregular)"); PrintSpec("out_irr", t, vari, gCountry ? the_spec_us : the_spec_world); PrintHeader(t, "(OuterCity-type square)"); PrintSpec("out_sq", t, vari, gCountry ? the_spec_us : the_spec_world); } void FuncInSq(const char * t, int vari) { static ObjSpec_t the_spec_us[] = { { 45, 30, 150, 0, 1, 0, NULL }, // S1-H group { 65, 25, 100, 0, 1, 0, NULL }, // S2-A group { 45, 20, 100, 0, 1, 0, NULL }, // S2-A group { 60, 30, 80, 0, 1, 0, NULL }, // S2-B group { 25, 25, 80, 0, 1, 0, NULL }, { 60, 30, 65, 0, 1, 0, NULL }, // B2-E { 30, 25, 65, 0, 1, 0, NULL }, { 60, 30, 55, 0, 1, 0, NULL }, // B2-D { 30, 25, 55, 0, 1, 0, NULL }, { 80, 30, 45, 0, 1, 0, NULL }, // B2-C { 30, 20, 45, 0, 1, 0, NULL }, { 40, 20, 40, 0, 1, 0, NULL }, // B2-B { 20, 20, 40, 0, 1, 0, NULL }, { 200, 200, 0, 0, 1, 0, NULL }, // Splat a huge block - use for big-box or other meta-areas { 120, 30, 0, 0, 1, 0, NULL }, // Now build long-thin pieces. High granularity for precise placement. { 120, 15, 0, 0, 1, 0, NULL }, { 90, 30, 0, 0, 1, 0, NULL }, { 90, 15, 0, 0, 1, 0, NULL }, { 60, 30, 0, 0, 1, 0, NULL }, { 60, 15, 0, 0, 1, 0, NULL }, { 30, 30, 0, 0, 1, 0, NULL }, { 30, 15, 0, 0, 1, 0, NULL }, { 0, 0, 0, 0, 0, 0, NULL } }; static ObjSpec_t the_spec_world[] = { { 500, 500, 0, 0, 1, 1, NULL }, // Europe - big blocks to try to make a pseudo radial grid wthin highways. { 500, 250, 0, 0, 1, 1, NULL }, { 250, 250, 0, 0, 1, 1, NULL }, { 250, 120, 0, 0, 1, 1, NULL }, { 120, 120, 0, 0, 1, 1, NULL }, { 120, 50, 0, 0, 1, 0, NULL }, // Hack: do NOT use a smallest block on the inside..makes it look too atomized. // { 60, 60, 0, 0, 1, 1, NULL }, { 0, 0, 0, 0, 1, 1, NULL } }; PrintHeader(t, "(InnerCity-type - square)"); PrintSpec("in_sq",t, vari, gCountry ? the_spec_us : the_spec_world); } void FuncIndSq(const char * t, int vari) { static ObjSpec_t the_spec_us[] = { { 200, 200, 0, 0, 1, 0, NULL }, { 200, 100, 0, 0, 1, 0, NULL }, { 100, 60, 0, 0, 1, 0, NULL }, { 60, 60, 0, 0, 1, 0, NULL }, { 90, 30, 0, 0, 1, 0, NULL }, { 60, 30, 0, 0, 1, 0, NULL }, { 30, 30, 0, 0, 1, 0, NULL }, { 0, 0, 0, 0, 0, 0, NULL } }; static ObjSpec_t the_spec_world[] = { { 500, 500, 0, 0, 1, 1, NULL }, // Europe - big blocks to try to make a pseudo radial grid wthin highways. { 500, 250, 0, 0, 1, 1, NULL }, { 250, 250, 0, 0, 1, 1, NULL }, { 250, 120, 0, 0, 1, 1, NULL }, { 120, 120, 0, 0, 1, 1, NULL }, { 120, 50, 0, 0, 1, 0, NULL }, // Hack: do NOT use a smallest block on the inside..makes it look too atomized. // { 60, 60, 0, 0, 1, 1, NULL }, { 0, 0, 0, 0, 1, 1, NULL } }; PrintHeader(t, "(Industrial-type square)"); PrintSpec("ind_sq", t, vari, gCountry ? the_spec_us : the_spec_world); } void FuncHillSq(const char * t, int vari) { static ObjSpec_t the_spec_us[] = { { 120, 30, 0, 0, 1, 0, NULL }, // Smaller buildings for hill-friendliness { 90, 30, 0, 0, 1, 0, NULL }, // Smaller buildings for hill-friendliness { 60, 30, 0, 0, 1, 0, NULL }, { 30, 30, 0, 0, 1, 0, NULL }, { 60, 60, 0, 0, 0, 1, NULL }, // Vegetation { 60, 30, 0, 0, 0, 1, NULL }, { 30, 30, 0, 0, 0, 1, NULL }, { 0, 0, 0, 0, 0, 0, NULL } }; static ObjSpec_t the_spec_world[] = { { 250, 250, 0, 0, 1, 1, NULL }, { 250, 120, 0, 0, 1, 1, NULL }, { 120, 120, 0, 0, 1, 1, NULL }, { 120, 50, 0, 0, 1, 1, NULL }, // Small objs backin Europe for hill! { 50, 50, 0, 0, 1, 1, NULL }, { 0, 0, 0, 0, 1, 1, NULL } }; PrintHeader(t, "(Hill-type square)"); PrintSpec("hill_sq",t, vari, gCountry ? the_spec_us : the_spec_world); } void FuncTownIrr(const char * t, int vari) { static ObjSpec_t the_spec_us[] = { { 200, 200, 0, 0, 1, 0, NULL }, // Splat a huge block - use for big-box or other meta-areas { 120, 30, 0, 0, 1, 0, NULL }, // Now build long-thin pieces. Widen smaller pieces to create { 90, 40, 0, 0, 1, 0, NULL }, { 90, 30, 0, 0, 1, 0, NULL }, { 60, 30, 0, 0, 1, 0, NULL }, { 30, 30, 0, 0, 1, 0, NULL }, { 60, 60, 0, 0, 0, 1, NULL }, // Vegetation { 60, 30, 0, 0, 0, 1, NULL }, { 30, 30, 0, 0, 0, 1, NULL }, { 0, 0, 0, 0, 0, 0, NULL } }; static ObjSpec_t the_spec_world[] = { { 500, 500, 0, 0, 1, 1, NULL }, // Europe - big blocks to try to make a pseudo radial grid wthin highways. { 500, 250, 0, 0, 1, 1, NULL }, { 250, 250, 0, 0, 1, 1, NULL }, { 250, 120, 0, 0, 1, 1, NULL }, { 120, 120, 0, 0, 1, 1, NULL }, { 120, 50, 0, 0, 1, 0, NULL }, // Hack: do NOT use a smallest block on the inside..makes it look too atomized. // { 60, 60, 0, 0, 1, 1, NULL }, { 0, 0, 0, 0, 1, 1, NULL } }; PrintHeader(t, "(OuterCity-type irregular)"); PrintSpec("out_irr", t, vari, gCountry ? the_spec_us : the_spec_world); PrintHeader(t, "(Town-type Irrgular)"); PrintSpec("town_irr", t, vari, gCountry ? the_spec_us : the_spec_world); } void FuncOutIrr(const char * t, int vari) { static ObjSpec_t the_spec_us[] = { { 40, 20, 40, 0, 1, 0, NULL }, // B2-B { 20, 20, 40, 0, 1, 0, NULL }, { 200, 200, 0, 0, 1, 0, NULL }, // Splat a huge block - use for big-box or other meta-areas { 120, 30, 0, 0, 1, 0, NULL }, // Now build long-thin pieces. Widen smaller pieces to create { 90, 30, 0, 0, 1, 0, NULL }, // sparseness { 90, 20, 0, 0, 1, 0, NULL }, { 60, 30, 0, 0, 1, 0, NULL }, { 60, 20, 0, 0, 1, 0, NULL }, { 30, 30, 0, 0, 1, 0, NULL }, { 30, 20, 0, 0, 1, 0, NULL }, { 60, 60, 0, 0, 0, 1, NULL }, // Vegetation { 60, 30, 0, 0, 0, 1, NULL }, { 30, 30, 0, 0, 0, 1, NULL }, { 0, 0, 0, 0, 0, 0, NULL } }; static ObjSpec_t the_spec_world[] = { { 500, 500, 0, 0, 1, 1, NULL }, // Europe - big blocks to try to make a pseudo radial grid wthin highways. { 500, 250, 0, 0, 1, 1, NULL }, { 250, 250, 0, 0, 1, 1, NULL }, { 250, 120, 0, 0, 1, 1, NULL }, { 120, 120, 0, 0, 1, 1, NULL }, { 120, 50, 0, 0, 1, 0, NULL }, // Hack: do NOT use a smallest block on the inside..makes it look too atomized. // { 60, 60, 0, 0, 1, 1, NULL }, { 0, 0, 0, 0, 1, 1, NULL } }; PrintHeader(t, "(OuterCity-type irregular)"); PrintSpec("out_irr", t, vari, gCountry ? the_spec_us : the_spec_world); } void FuncInIrr(const char * t, int vari) { static ObjSpec_t the_spec_us[] = { { 45, 30, 150, 0, 1, 0, NULL }, // S1-H group { 65, 25, 100, 0, 1, 0, NULL }, // S2-A group { 45, 20, 100, 0, 1, 0, NULL }, // S2-A group { 60, 30, 80, 0, 1, 0, NULL }, // S2-B group { 25, 25, 80, 0, 1, 0, NULL }, { 60, 30, 65, 0, 1, 0, NULL }, // B2-E { 30, 25, 65, 0, 1, 0, NULL }, { 60, 30, 55, 0, 1, 0, NULL }, // B2-D { 30, 25, 55, 0, 1, 0, NULL }, { 80, 30, 45, 0, 1, 0, NULL }, // B2-C { 30, 20, 45, 0, 1, 0, NULL }, { 40, 20, 40, 0, 1, 0, NULL }, // B2-B { 20, 20, 40, 0, 1, 0, NULL }, { 200, 200, 0, 0, 1, 0, NULL }, // Splat a huge block - use for big-box or other meta-areas { 120, 30, 0, 0, 1, 0, NULL }, // Now build long-thin pieces. High granularity for precise placement. { 120, 15, 0, 0, 1, 0, NULL }, { 90, 30, 0, 0, 1, 0, NULL }, { 90, 15, 0, 0, 1, 0, NULL }, { 60, 30, 0, 0, 1, 0, NULL }, { 60, 15, 0, 0, 1, 0, NULL }, { 30, 30, 0, 0, 1, 0, NULL }, { 30, 15, 0, 0, 1, 0, NULL }, { 0, 0, 0, 0, 0, 0, NULL } }; static ObjSpec_t the_spec_world[] = { { 500, 500, 0, 0, 1, 1, NULL }, // Europe - big blocks to try to make a pseudo radial grid wthin highways. { 500, 250, 0, 0, 1, 1, NULL }, { 250, 250, 0, 0, 1, 1, NULL }, { 250, 120, 0, 0, 1, 1, NULL }, { 120, 120, 0, 0, 1, 1, NULL }, { 120, 50, 0, 0, 1, 0, NULL }, // Hack: do NOT use a smallest block on the inside..makes it look too atomized. // { 60, 60, 0, 0, 1, 1, NULL }, { 0, 0, 0, 0, 1, 1, NULL } }; PrintHeader(t, "(InnerCity-type irregular)"); PrintSpec("in_irr", t, vari, gCountry ? the_spec_us : the_spec_world); } void FuncIndIrr(const char * t, int vari) { static ObjSpec_t the_spec_us[] = { { 200, 200, 0, 0, 1, 0, NULL }, { 200, 100, 0, 0, 1, 0, NULL }, { 100, 60, 0, 0, 1, 0, NULL }, { 60, 60, 0, 0, 1, 0, NULL }, { 90, 30, 0, 0, 1, 0, NULL }, { 60, 30, 0, 0, 1, 0, NULL }, { 30, 30, 0, 0, 1, 0, NULL }, { 0, 0, 0, 0, 0, 0, NULL } }; static ObjSpec_t the_spec_world[] = { { 500, 500, 0, 0, 1, 1, NULL }, // Europe - big blocks to try to make a pseudo radial grid wthin highways. { 500, 250, 0, 0, 1, 1, NULL }, { 250, 250, 0, 0, 1, 1, NULL }, { 250, 120, 0, 0, 1, 1, NULL }, { 120, 120, 0, 0, 1, 1, NULL }, { 120, 50, 0, 0, 1, 0, NULL }, // Hack: do NOT use a smallest block on the inside..makes it look too atomized. // { 60, 60, 0, 0, 1, 1, NULL }, { 0, 0, 0, 0, 1, 1, NULL } }; PrintHeader(t, "(Industrial-type Irregular)"); PrintSpec("ind_irr", t, vari, gCountry ? the_spec_us : the_spec_world); } void FuncHillIrr(const char * t, int vari) { static ObjSpec_t the_spec_us[] = { { 120, 30, 0, 0, 1, 0, NULL }, // Smaller buildings for hill-friendliness { 90, 30, 0, 0, 1, 0, NULL }, // Smaller buildings for hill-friendliness { 60, 30, 0, 0, 1, 0, NULL }, { 30, 30, 0, 0, 1, 0, NULL }, { 60, 60, 0, 0, 0, 1, NULL }, // Vegetation { 60, 30, 0, 0, 0, 1, NULL }, { 30, 30, 0, 0, 0, 1, NULL }, { 0, 0, 0, 0, 0, 0, NULL } }; static ObjSpec_t the_spec_world[] = { { 250, 250, 0, 0, 1, 1, NULL }, { 250, 120, 0, 0, 1, 1, NULL }, { 120, 120, 0, 0, 1, 1, NULL }, { 120, 50, 0, 0, 1, 1, NULL }, // Small objs backin Europe for hill! { 50, 50, 0, 0, 1, 1, NULL }, { 0, 0, 0, 0, 1, 1, NULL } }; PrintHeader(t, "(Hill-type Irregular)"); PrintSpec("hill_irr", t, vari, gCountry ? the_spec_us : the_spec_world); } void FuncPark(const char * t, int vari) { static ObjSpec_t the_spec_us[] = { { 500, 250, 0, 0, 1, 1, NULL }, // Park-like vevgtation and stuff { 250, 120, 0, 0, 1, 1, NULL }, { 120, 60, 0, 0, 1, 1, NULL }, { 90, 30, 0, 0, 1, 1, NULL }, { 60, 30, 0, 0, 1, 1, NULL }, { 0, 0, 0, 0, 1, 1, NULL } }; static ObjSpec_t the_spec_world[] = { { 250, 250, 0, 0, 1, 1, NULL }, // Simplified european splat { 250, 120, 0, 0, 1, 1, NULL }, { 120, 50, 0, 0, 1, 1, NULL }, { 0, 0, 0, 0, 0, 0, NULL } }; PrintHeader(t, "(Park-type)"); PrintSpec("park", t, vari, gCountry ? the_spec_us : the_spec_world); } void FuncObjs(const char * s, const char * t, int vari) { static ObjSpec_t the_spec[] = { { 10, 10, 20, 650,1,0, "feat_RadioTower" }, { 5, 5, 20, 650,1,0, "feat_RadioTower" }, // 0-628 { 40, 40, 20, 200,1,0, "feat_Crane" }, // 9-192 { 50, 40, 20, 600,1,0, "feat_Building" }, // 2-527 { 50, 40, 20, 140,1,0, "feat_Windmill" }, // 11-123 { 100, 100, 20, 140,1,0, "feat_Refinery" }, // 49-113 { 100, 100, 20, 350,1,0, "feat_Tank" }, // 0-334 { 100, 100, 20, 400,1,0, "feat_Smokestack" }, // 7-381 { 100, 100, 20, 400,1,0, "feat_Smokestacks" }, // 12-366 { 100, 100, 20, 300,1,0, "feat_Plant" }, // 7-251 { 50, 50, 20, 350,1,0, "feat_CoolingTower"}, // 41-306 { 20, 20, 20, 400,1,0, "feat_Monument" }, // 13-350 /* feat_Dam 9 227 feat_Tramway 9 259 feat_Pole 0 164 feat_Elevator 10 101 feat_Arch 52 192 feat_Spire 17 137 feat_Dome 18 93 feat_Sign 3 62 feat_RadarASR 0 102 feat_RadarARSR 0 0 feat_Building 2 527 */ { 0, 0, 0, 0,0,0, NULL } }; PrintSpec(s, t, vari, the_spec); } int main(int argc, char ** argv) { gOnce = 0; if (argc < 2) { fprintf(stderr, "PLEASE ENTER A COUNTRY CODE 0 = world 1 = us\n"); exit(1); } int ctr = 1; if (strcmp(argv[ctr], "-once")==0) { gOnce = 1; ++ctr; } if (ctr >= argc) { fprintf(stderr, "PLEASE ENTER A COUNTRY CODE 0 = world 1 = us\n"); exit(1); } gCountry = atoi(argv[ctr]); printf("## OBJECTS FOR COUNTRY %s\n",gCountryNames[gCountry]); printf("OBJ_PREFIX %s\n", gCountryPrefix[gCountry]); int t = 0; while (terrains[t].kind != spec_Max) { FuncObjs("", terrains[t].name, terrains[t].vari); gFuncs[terrains[t].kind](terrains[t].name, terrains[t].vari); ++t; } printf("\n# Requires %d unique objects.\n", gCtr); return 0; }
/* * Copyright (c) 2004-2005 The Trustees of Indiana University and Indiana * University Research and Technology * Corporation. All rights reserved. * Copyright (c) 2004-2014 The University of Tennessee and The University * of Tennessee Research Foundation. All rights * reserved. * Copyright (c) 2004-2009 High Performance Computing Center Stuttgart, * University of Stuttgart. All rights reserved. * Copyright (c) 2004-2005 The Regents of the University of California. * All rights reserved. * $COPYRIGHT$ * * Additional copyrights may follow * * $HEADER$ */ #include "opal_config.h" #include "opal/util/output.h" #include "opal/win32/opal_inet.h" /* * convert from presentation format (which usually means ASCII printable) * to network format (which is usually some kind of binary format). * * return: * 1 if the address was valid for the specified address family * 0 if the address wasn't valid (`dst' is untouched in this case) * -1 if some other error occurred (`dst' is untouched in this case, too) */ int opal_inet_pton(int af, const char *src, void *dst) { int addr_len; struct sockaddr sa; struct sockaddr_in *sin = (struct sockaddr_in *) &sa; struct sockaddr_in6 *sin6 = (struct sockaddr_in6 *) &sa; memset(&sa, 0, sizeof(struct sockaddr)); switch (af) { case AF_INET: addr_len = sizeof(struct sockaddr_in); break; case AF_INET6: addr_len = sizeof(struct sockaddr_in6); break; default: return -1; } if (0 == WSAStringToAddress((LPTSTR) src, af, NULL, (LPSOCKADDR) &sa, &addr_len)) { switch (af) { case AF_INET: memcpy(dst, &sin->sin_addr, sizeof(struct in_addr)); break; case AF_INET6: memcpy(dst, &sin6->sin6_addr, sizeof(struct in6_addr)); break; } return 1; } else { opal_output(0, "WSAStringToAddress failed %s:%d. Error code: %d", __FILE__, __LINE__, GetLastError()); return 0; } } /* * convert a network format address to presentation format. * * return: * pointer to presentation format address (`dst'), or NULL. */ const char *opal_inet_ntop(int af, const void *src, char *dst, size_t size) { int addr_len; struct sockaddr sa; DWORD str_len = size; struct sockaddr_in *sin = (struct sockaddr_in *) &sa; struct sockaddr_in6 *sin6 = (struct sockaddr_in6 *) &sa; memset(&sa, 0, sizeof(struct sockaddr)); switch (af) { case AF_INET: addr_len = sizeof(struct sockaddr_in); sin->sin_family = af; memcpy(&sin->sin_addr, src, sizeof(struct in_addr)); break; case AF_INET6: addr_len = sizeof(struct sockaddr_in6); sin6->sin6_family = af; memcpy(&sin6->sin6_addr, src, sizeof(struct in6_addr)); break; default: return NULL; } if (0 == WSAAddressToString((LPSOCKADDR) &sa, addr_len, NULL, dst, &str_len)) { return dst; } else { opal_output(0, "WSAAddressToString failed %s:%d. Error code: %d", __FILE__, __LINE__, GetLastError()); return NULL; } }
/* * Copyright (C) 2009 Apple Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #ifndef YarrParser_h #define YarrParser_h #include <runtime/UString.h> #include "Yarr.h" #include <wtf/ASCIICType.h> #include <wtf/unicode/Unicode.h> namespace JSC { namespace Yarr { #define REGEXP_ERROR_PREFIX "Invalid regular expression: " enum BuiltInCharacterClassID { DigitClassID, SpaceClassID, WordClassID, NewlineClassID, }; // The Parser class should not be used directly - only via the Yarr::parse() method. template<class Delegate> class Parser { private: template<class FriendDelegate> friend const char* parse(FriendDelegate& delegate, const UString& pattern, unsigned backReferenceLimit); enum ErrorCode { NoError, PatternTooLarge, QuantifierOutOfOrder, QuantifierWithoutAtom, MissingParentheses, ParenthesesUnmatched, ParenthesesTypeInvalid, CharacterClassUnmatched, CharacterClassOutOfOrder, EscapeUnterminated, NumberOfErrorCodes }; /* * CharacterClassParserDelegate: * * The class CharacterClassParserDelegate is used in the parsing of character * classes. This class handles detection of character ranges. This class * implements enough of the delegate interface such that it can be passed to * parseEscape() as an EscapeDelegate. This allows parseEscape() to be reused * to perform the parsing of escape characters in character sets. */ class CharacterClassParserDelegate { public: CharacterClassParserDelegate(Delegate& delegate, ErrorCode& err) : m_delegate(delegate) , m_err(err) , m_state(Empty) , m_character(0) { } /* * begin(): * * Called at beginning of construction. */ void begin(bool invert) { m_delegate.atomCharacterClassBegin(invert); } /* * atomPatternCharacter(): * * This method is called either from parseCharacterClass() (for an unescaped * character in a character class), or from parseEscape(). In the former case * the value true will be passed for the argument 'hyphenIsRange', and in this * mode we will allow a hypen to be treated as indicating a range (i.e. /[a-z]/ * is different to /[a\-z]/). */ void atomPatternCharacter(UChar ch, bool hyphenIsRange = false) { switch (m_state) { case AfterCharacterClass: // Following a builtin character class we need look out for a hyphen. // We're looking for invalid ranges, such as /[\d-x]/ or /[\d-\d]/. // If we see a hyphen following a charater class then unlike usual // we'll report it to the delegate immediately, and put ourself into // a poisoned state. Any following calls to add another character or // character class will result in an error. (A hypen following a // character-class is itself valid, but only at the end of a regex). if (hyphenIsRange && ch == '-') { m_delegate.atomCharacterClassAtom('-'); m_state = AfterCharacterClassHyphen; return; } // Otherwise just fall through - cached character so treat this as Empty. case Empty: m_character = ch; m_state = CachedCharacter; return; case CachedCharacter: if (hyphenIsRange && ch == '-') m_state = CachedCharacterHyphen; else { m_delegate.atomCharacterClassAtom(m_character); m_character = ch; } return; case CachedCharacterHyphen: if (ch < m_character) { m_err = CharacterClassOutOfOrder; return; } m_delegate.atomCharacterClassRange(m_character, ch); m_state = Empty; return; // See coment in atomBuiltInCharacterClass below. // This too is technically an error, per ECMA-262, and again we // we chose to allow this. Note a subtlely here that while we // diverge from the spec's definition of CharacterRange we do // remain in compliance with the grammar. For example, consider // the expression /[\d-a-z]/. We comply with the grammar in // this case by not allowing a-z to be matched as a range. case AfterCharacterClassHyphen: m_delegate.atomCharacterClassAtom(ch); m_state = Empty; return; } } /* * atomBuiltInCharacterClass(): * * Adds a built-in character class, called by parseEscape(). */ void atomBuiltInCharacterClass(BuiltInCharacterClassID classID, bool invert) { switch (m_state) { case CachedCharacter: // Flush the currently cached character, then fall through. m_delegate.atomCharacterClassAtom(m_character); case Empty: case AfterCharacterClass: m_state = AfterCharacterClass; m_delegate.atomCharacterClassBuiltIn(classID, invert); return; // If we hit either of these cases, we have an invalid range that // looks something like /[x-\d]/ or /[\d-\d]/. // According to ECMA-262 this should be a syntax error, but // empirical testing shows this to break teh webz. Instead we // comply with to the ECMA-262 grammar, and assume the grammar to // have matched the range correctly, but tweak our interpretation // of CharacterRange. Effectively we implicitly handle the hyphen // as if it were escaped, e.g. /[\w-_]/ is treated as /[\w\-_]/. case CachedCharacterHyphen: m_delegate.atomCharacterClassAtom(m_character); m_delegate.atomCharacterClassAtom('-'); // fall through case AfterCharacterClassHyphen: m_delegate.atomCharacterClassBuiltIn(classID, invert); m_state = Empty; return; } } /* * end(): * * Called at end of construction. */ void end() { if (m_state == CachedCharacter) m_delegate.atomCharacterClassAtom(m_character); else if (m_state == CachedCharacterHyphen) { m_delegate.atomCharacterClassAtom(m_character); m_delegate.atomCharacterClassAtom('-'); } m_delegate.atomCharacterClassEnd(); } // parseEscape() should never call these delegate methods when // invoked with inCharacterClass set. void assertionWordBoundary(bool) { ASSERT_NOT_REACHED(); } void atomBackReference(unsigned) { ASSERT_NOT_REACHED(); } private: Delegate& m_delegate; ErrorCode& m_err; enum CharacterClassConstructionState { Empty, CachedCharacter, CachedCharacterHyphen, AfterCharacterClass, AfterCharacterClassHyphen, } m_state; UChar m_character; }; Parser(Delegate& delegate, const UString& pattern, unsigned backReferenceLimit) : m_delegate(delegate) , m_backReferenceLimit(backReferenceLimit) , m_err(NoError) , m_data(pattern.characters()) , m_size(pattern.length()) , m_index(0) , m_parenthesesNestingDepth(0) { } /* * parseEscape(): * * Helper for parseTokens() AND parseCharacterClass(). * Unlike the other parser methods, this function does not report tokens * directly to the member delegate (m_delegate), instead tokens are * emitted to the delegate provided as an argument. In the case of atom * escapes, parseTokens() will call parseEscape() passing m_delegate as * an argument, and as such the escape will be reported to the delegate. * * However this method may also be used by parseCharacterClass(), in which * case a CharacterClassParserDelegate will be passed as the delegate that * tokens should be added to. A boolean flag is also provided to indicate * whether that an escape in a CharacterClass is being parsed (some parsing * rules change in this context). * * The boolean value returned by this method indicates whether the token * parsed was an atom (outside of a characted class \b and \B will be * interpreted as assertions). */ template<bool inCharacterClass, class EscapeDelegate> bool parseEscape(EscapeDelegate& delegate) { ASSERT(!m_err); ASSERT(peek() == '\\'); consume(); if (atEndOfPattern()) { m_err = EscapeUnterminated; return false; } switch (peek()) { // Assertions case 'b': consume(); if (inCharacterClass) delegate.atomPatternCharacter('\b'); else { delegate.assertionWordBoundary(false); return false; } break; case 'B': consume(); if (inCharacterClass) delegate.atomPatternCharacter('B'); else { delegate.assertionWordBoundary(true); return false; } break; // CharacterClassEscape case 'd': consume(); delegate.atomBuiltInCharacterClass(DigitClassID, false); break; case 's': consume(); delegate.atomBuiltInCharacterClass(SpaceClassID, false); break; case 'w': consume(); delegate.atomBuiltInCharacterClass(WordClassID, false); break; case 'D': consume(); delegate.atomBuiltInCharacterClass(DigitClassID, true); break; case 'S': consume(); delegate.atomBuiltInCharacterClass(SpaceClassID, true); break; case 'W': consume(); delegate.atomBuiltInCharacterClass(WordClassID, true); break; // DecimalEscape case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': { // To match Firefox, we parse an invalid backreference in the range [1-7] as an octal escape. // First, try to parse this as backreference. if (!inCharacterClass) { ParseState state = saveState(); unsigned backReference = consumeNumber(); if (backReference <= m_backReferenceLimit) { delegate.atomBackReference(backReference); break; } restoreState(state); } // Not a backreference, and not octal. if (peek() >= '8') { delegate.atomPatternCharacter('\\'); break; } // Fall-through to handle this as an octal escape. } // Octal escape case '0': delegate.atomPatternCharacter(consumeOctal()); break; // ControlEscape case 'f': consume(); delegate.atomPatternCharacter('\f'); break; case 'n': consume(); delegate.atomPatternCharacter('\n'); break; case 'r': consume(); delegate.atomPatternCharacter('\r'); break; case 't': consume(); delegate.atomPatternCharacter('\t'); break; case 'v': consume(); delegate.atomPatternCharacter('\v'); break; // ControlLetter case 'c': { ParseState state = saveState(); consume(); if (!atEndOfPattern()) { int control = consume(); // To match Firefox, inside a character class, we also accept numbers and '_' as control characters. if (inCharacterClass ? WTF::isASCIIAlphanumeric(control) || (control == '_') : WTF::isASCIIAlpha(control)) { delegate.atomPatternCharacter(control & 0x1f); break; } } restoreState(state); delegate.atomPatternCharacter('\\'); break; } // HexEscape case 'x': { consume(); int x = tryConsumeHex(2); if (x == -1) delegate.atomPatternCharacter('x'); else delegate.atomPatternCharacter(x); break; } // UnicodeEscape case 'u': { consume(); int u = tryConsumeHex(4); if (u == -1) delegate.atomPatternCharacter('u'); else delegate.atomPatternCharacter(u); break; } // IdentityEscape default: delegate.atomPatternCharacter(consume()); } return true; } /* * parseAtomEscape(), parseCharacterClassEscape(): * * These methods alias to parseEscape(). */ bool parseAtomEscape() { return parseEscape<false>(m_delegate); } void parseCharacterClassEscape(CharacterClassParserDelegate& delegate) { parseEscape<true>(delegate); } /* * parseCharacterClass(): * * Helper for parseTokens(); calls dirctly and indirectly (via parseCharacterClassEscape) * to an instance of CharacterClassParserDelegate, to describe the character class to the * delegate. */ void parseCharacterClass() { ASSERT(!m_err); ASSERT(peek() == '['); consume(); CharacterClassParserDelegate characterClassConstructor(m_delegate, m_err); characterClassConstructor.begin(tryConsume('^')); while (!atEndOfPattern()) { switch (peek()) { case ']': consume(); characterClassConstructor.end(); return; case '\\': parseCharacterClassEscape(characterClassConstructor); break; default: characterClassConstructor.atomPatternCharacter(consume(), true); } if (m_err) return; } m_err = CharacterClassUnmatched; } /* * parseParenthesesBegin(): * * Helper for parseTokens(); checks for parentheses types other than regular capturing subpatterns. */ void parseParenthesesBegin() { ASSERT(!m_err); ASSERT(peek() == '('); consume(); if (tryConsume('?')) { if (atEndOfPattern()) { m_err = ParenthesesTypeInvalid; return; } switch (consume()) { case ':': m_delegate.atomParenthesesSubpatternBegin(false); break; case '=': m_delegate.atomParentheticalAssertionBegin(); break; case '!': m_delegate.atomParentheticalAssertionBegin(true); break; default: m_err = ParenthesesTypeInvalid; } } else m_delegate.atomParenthesesSubpatternBegin(); ++m_parenthesesNestingDepth; } /* * parseParenthesesEnd(): * * Helper for parseTokens(); checks for parse errors (due to unmatched parentheses). */ void parseParenthesesEnd() { ASSERT(!m_err); ASSERT(peek() == ')'); consume(); if (m_parenthesesNestingDepth > 0) m_delegate.atomParenthesesEnd(); else m_err = ParenthesesUnmatched; --m_parenthesesNestingDepth; } /* * parseQuantifier(): * * Helper for parseTokens(); checks for parse errors and non-greedy quantifiers. */ void parseQuantifier(bool lastTokenWasAnAtom, unsigned min, unsigned max) { ASSERT(!m_err); ASSERT(min <= max); if (lastTokenWasAnAtom) m_delegate.quantifyAtom(min, max, !tryConsume('?')); else m_err = QuantifierWithoutAtom; } /* * parseTokens(): * * This method loops over the input pattern reporting tokens to the delegate. * The method returns when a parse error is detected, or the end of the pattern * is reached. One piece of state is tracked around the loop, which is whether * the last token passed to the delegate was an atom (this is necessary to detect * a parse error when a quantifier provided without an atom to quantify). */ void parseTokens() { bool lastTokenWasAnAtom = false; while (!atEndOfPattern()) { switch (peek()) { case '|': consume(); m_delegate.disjunction(); lastTokenWasAnAtom = false; break; case '(': parseParenthesesBegin(); lastTokenWasAnAtom = false; break; case ')': parseParenthesesEnd(); lastTokenWasAnAtom = true; break; case '^': consume(); m_delegate.assertionBOL(); lastTokenWasAnAtom = false; break; case '$': consume(); m_delegate.assertionEOL(); lastTokenWasAnAtom = false; break; case '.': consume(); m_delegate.atomBuiltInCharacterClass(NewlineClassID, true); lastTokenWasAnAtom = true; break; case '[': parseCharacterClass(); lastTokenWasAnAtom = true; break; case '\\': lastTokenWasAnAtom = parseAtomEscape(); break; case '*': consume(); parseQuantifier(lastTokenWasAnAtom, 0, quantifyInfinite); lastTokenWasAnAtom = false; break; case '+': consume(); parseQuantifier(lastTokenWasAnAtom, 1, quantifyInfinite); lastTokenWasAnAtom = false; break; case '?': consume(); parseQuantifier(lastTokenWasAnAtom, 0, 1); lastTokenWasAnAtom = false; break; case '{': { ParseState state = saveState(); consume(); if (peekIsDigit()) { unsigned min = consumeNumber(); unsigned max = min; if (tryConsume(',')) max = peekIsDigit() ? consumeNumber() : quantifyInfinite; if (tryConsume('}')) { if (min <= max) parseQuantifier(lastTokenWasAnAtom, min, max); else m_err = QuantifierOutOfOrder; lastTokenWasAnAtom = false; break; } } restoreState(state); } // if we did not find a complete quantifer, fall through to the default case. default: m_delegate.atomPatternCharacter(consume()); lastTokenWasAnAtom = true; } if (m_err) return; } if (m_parenthesesNestingDepth > 0) m_err = MissingParentheses; } /* * parse(): * * This method calls parseTokens() to parse over the input and converts any * error code to a const char* for a result. */ const char* parse() { if (m_size > MAX_PATTERN_SIZE) m_err = PatternTooLarge; else parseTokens(); ASSERT(atEndOfPattern() || m_err); // The order of this array must match the ErrorCode enum. static const char* errorMessages[NumberOfErrorCodes] = { 0, // NoError REGEXP_ERROR_PREFIX "regular expression too large", REGEXP_ERROR_PREFIX "numbers out of order in {} quantifier", REGEXP_ERROR_PREFIX "nothing to repeat", REGEXP_ERROR_PREFIX "missing )", REGEXP_ERROR_PREFIX "unmatched parentheses", REGEXP_ERROR_PREFIX "unrecognized character after (?", REGEXP_ERROR_PREFIX "missing terminating ] for character class", REGEXP_ERROR_PREFIX "range out of order in character class", REGEXP_ERROR_PREFIX "\\ at end of pattern" }; return errorMessages[m_err]; } // Misc helper functions: typedef unsigned ParseState; ParseState saveState() { return m_index; } void restoreState(ParseState state) { m_index = state; } bool atEndOfPattern() { ASSERT(m_index <= m_size); return m_index == m_size; } int peek() { ASSERT(m_index < m_size); return m_data[m_index]; } bool peekIsDigit() { return !atEndOfPattern() && WTF::isASCIIDigit(peek()); } unsigned peekDigit() { ASSERT(peekIsDigit()); return peek() - '0'; } int consume() { ASSERT(m_index < m_size); return m_data[m_index++]; } unsigned consumeDigit() { ASSERT(peekIsDigit()); return consume() - '0'; } unsigned consumeNumber() { unsigned n = consumeDigit(); // check for overflow. for (unsigned newValue; peekIsDigit() && ((newValue = n * 10 + peekDigit()) >= n); ) { n = newValue; consume(); } return n; } unsigned consumeOctal() { ASSERT(WTF::isASCIIOctalDigit(peek())); unsigned n = consumeDigit(); while (n < 32 && !atEndOfPattern() && WTF::isASCIIOctalDigit(peek())) n = n * 8 + consumeDigit(); return n; } bool tryConsume(UChar ch) { if (atEndOfPattern() || (m_data[m_index] != ch)) return false; ++m_index; return true; } int tryConsumeHex(int count) { ParseState state = saveState(); int n = 0; while (count--) { if (atEndOfPattern() || !WTF::isASCIIHexDigit(peek())) { restoreState(state); return -1; } n = (n << 4) | WTF::toASCIIHexValue(consume()); } return n; } Delegate& m_delegate; unsigned m_backReferenceLimit; ErrorCode m_err; const UChar* m_data; unsigned m_size; unsigned m_index; unsigned m_parenthesesNestingDepth; // Derived by empirical testing of compile time in PCRE and WREC. static const unsigned MAX_PATTERN_SIZE = 1024 * 1024; }; /* * Yarr::parse(): * * The parse method is passed a pattern to be parsed and a delegate upon which * callbacks will be made to record the parsed tokens forming the regex. * Yarr::parse() returns null on success, or a const C string providing an error * message where a parse error occurs. * * The Delegate must implement the following interface: * * void assertionBOL(); * void assertionEOL(); * void assertionWordBoundary(bool invert); * * void atomPatternCharacter(UChar ch); * void atomBuiltInCharacterClass(BuiltInCharacterClassID classID, bool invert); * void atomCharacterClassBegin(bool invert) * void atomCharacterClassAtom(UChar ch) * void atomCharacterClassRange(UChar begin, UChar end) * void atomCharacterClassBuiltIn(BuiltInCharacterClassID classID, bool invert) * void atomCharacterClassEnd() * void atomParenthesesSubpatternBegin(bool capture = true); * void atomParentheticalAssertionBegin(bool invert = false); * void atomParenthesesEnd(); * void atomBackReference(unsigned subpatternId); * * void quantifyAtom(unsigned min, unsigned max, bool greedy); * * void disjunction(); * * The regular expression is described by a sequence of assertion*() and atom*() * callbacks to the delegate, describing the terms in the regular expression. * Following an atom a quantifyAtom() call may occur to indicate that the previous * atom should be quantified. In the case of atoms described across multiple * calls (parentheses and character classes) the call to quantifyAtom() will come * after the call to the atom*End() method, never after atom*Begin(). * * Character classes may either be described by a single call to * atomBuiltInCharacterClass(), or by a sequence of atomCharacterClass*() calls. * In the latter case, ...Begin() will be called, followed by a sequence of * calls to ...Atom(), ...Range(), and ...BuiltIn(), followed by a call to ...End(). * * Sequences of atoms and assertions are broken into alternatives via calls to * disjunction(). Assertions, atoms, and disjunctions emitted between calls to * atomParenthesesBegin() and atomParenthesesEnd() form the body of a subpattern. * atomParenthesesBegin() is passed a subpatternId. In the case of a regular * capturing subpattern, this will be the subpatternId associated with these * parentheses, and will also by definition be the lowest subpatternId of these * parentheses and of any nested paretheses. The atomParenthesesEnd() method * is passed the subpatternId of the last capturing subexpression nested within * these paretheses. In the case of a capturing subpattern with no nested * capturing subpatterns, the same subpatternId will be passed to the begin and * end functions. In the case of non-capturing subpatterns the subpatternId * passed to the begin method is also the first possible subpatternId that might * be nested within these paretheses. If a set of non-capturing parentheses does * not contain any capturing subpatterns, then the subpatternId passed to begin * will be greater than the subpatternId passed to end. */ template<class Delegate> const char* parse(Delegate& delegate, const UString& pattern, unsigned backReferenceLimit = quantifyInfinite) { return Parser<Delegate>(delegate, pattern, backReferenceLimit).parse(); } } } // namespace JSC::Yarr #endif // YarrParser_h
import React from "react" import { IconContext } from "react-icons" import { FaHtml5, FaCss3, FaReact, FaJsSquare, FaNode, FaPython } from "react-icons/fa" const Skills = () => ( <div className="row work"> <div className="three columns header-col"> <h1> <span>Skills</span> </h1> </div> <div className="nine columns main-col skills-icons"> <FaHtml5 size={32} /> <FaCss3 size={32} /> <FaJsSquare size={32} /> <FaReact size={32} /> <FaNode size={32} /> </div> </div> ) export default Skills
!function(e,t){if("object"==typeof exports&&"object"==typeof module)module.exports=t(require("vue-i18n"),require("vue"),require("element-ui"),require("lodash"),require("axios"),require("vuex"),require("vue-router"));else if("function"==typeof define&&define.amd)define(["vue-i18n","vue","element-ui","lodash","axios","vuex","vue-router"],t);else{var n,r="object"==typeof exports?t(require("vue-i18n"),require("vue"),require("element-ui"),require("lodash"),require("axios"),require("vuex"),require("vue-router")):t(e["vue-i18n"],e.vue,e["element-ui"],e.lodash,e.axios,e.vuex,e["vue-router"]);for(n in r)("object"==typeof exports?exports:e)[n]=r[n]}}(window,function(n,r,o,a,i,u,s){return f={},l.m=c={0:function(e,t,n){e.exports=n("56d7")},"0259":function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){return(0,r.default)(e),0<=e.indexOf((0,o.default)(t))};var r=a(n("d076")),o=a(n("6071"));function a(e){return e&&e.__esModule?e:{default:e}}e.exports=t.default,e.exports.default=t.default},"0353":function(e,t,n){"use strict";var r,i=n("6bf8"),u=RegExp.prototype.exec,s=String.prototype.replace,o=u,l="lastIndex",c=(r=/a/,n=/b*/g,u.call(r,"a"),u.call(n,"a"),0!==r[l]||0!==n[l]),f=void 0!==/()??/.exec("")[1];(c||f)&&(o=function(e){var t,n,r,o,a=this;return f&&(n=new RegExp("^"+a.source+"$(?!\\s)",i.call(a))),c&&(t=a[l]),r=u.call(a,e),c&&r&&(a[l]=a.global?r.index+r[0].length:t),f&&r&&1<r.length&&s.call(r[0],n,function(){for(o=1;o<arguments.length-2;o++)void 0===arguments[o]&&(r[o]=void 0)}),r}),e.exports=o},"05fd":function(e,t,n){e.exports=n("baa7")("native-function-to-string",Function.toString)},"065d":function(e,t,n){var r=n("bb8b"),o=n("5edc");e.exports=n("26df")?function(e,t,n){return r.f(e,t,o(1,n))}:function(e,t,n){return e[t]=n,e}},"065e":function(e,t){e.exports="constructor,hasOwnProperty,isPrototypeOf,propertyIsEnumerable,toLocaleString,toString,valueOf".split(",")},"0677":function(e,t){e.exports=function(e){return"object"==typeof e?null!==e:"function"==typeof e}},"06e2":function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){return(0,o.default)(e),s.test(e)};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};var a=/([01][0-9]|2[0-3])/,i=/[0-5][0-9]/,u=new RegExp("[-+]".concat(a.source,":").concat(i.source)),u=new RegExp("([zZ]|".concat(u.source,")")),a=new RegExp("".concat(a.source,":").concat(i.source,":").concat(/([0-5][0-9]|60)/.source).concat(/(\.[0-9]+)?/.source)),i=new RegExp("".concat(/[0-9]{4}/.source,"-").concat(/(0[1-9]|1[0-2])/.source,"-").concat(/([12]\d|0[1-9]|3[01])/.source)),u=new RegExp("".concat(a.source).concat(u.source)),s=new RegExp("".concat(i.source,"[ tT]").concat(u.source));e.exports=t.default,e.exports.default=t.default},"0926":function(e,t){e.exports=function(e){try{return!!e()}catch(e){return!0}}},"09f1":function(e,t,n){!function(d){e.exports=function(){"use strict";var e=function e(t){var n=t.id;var r=t.viewBox;var o=t.content;this.id=n;this.viewBox=r;this.content=o};e.prototype.stringify=function e(){return this.content},e.prototype.toString=function e(){return this.stringify()},e.prototype.destroy=function e(){var t=this;["id","viewBox","content"].forEach(function(e){return delete t[e]})};var r=function(e){var t=!!document.importNode;var n=(new DOMParser).parseFromString(e,"image/svg+xml").documentElement;if(t)return document.importNode(n,true);return n},t=typeof window!=="undefined"?window:typeof d!=="undefined"?d:typeof self!=="undefined"?self:{};function n(e,t){return t={exports:{}},e(t,t.exports),t.exports}var o=n(function(n,e){(function(e,t){if(false);else n.exports=t()})(t,function(){function a(e){var t=e&&typeof e==="object";return t&&Object.prototype.toString.call(e)!=="[object RegExp]"&&Object.prototype.toString.call(e)!=="[object Date]"}function r(e){return Array.isArray(e)?[]:{}}function i(e,t){var n=t&&t.clone===true;return n&&a(e)?l(r(e),e,t):e}function u(n,e,r){var o=n.slice();e.forEach(function(e,t){if(typeof o[t]==="undefined")o[t]=i(e,r);else if(a(e))o[t]=l(n[t],e,r);else if(n.indexOf(e)===-1)o.push(i(e,r))});return o}function s(t,n,r){var o={};if(a(t))Object.keys(t).forEach(function(e){o[e]=i(t[e],r)});Object.keys(n).forEach(function(e){if(!a(n[e])||!t[e])o[e]=i(n[e],r);else o[e]=l(t[e],n[e],r)});return o}function l(e,t,n){var r=Array.isArray(t);var o=n||{arrayMerge:u};var a=o.arrayMerge||u;if(r)return Array.isArray(e)?a(e,t,n):i(t,n);else return s(e,t,n)}l.all=function e(t,n){if(!Array.isArray(t)||t.length<2)throw new Error("first argument should be an array with at least two elements");return t.reduce(function(e,t){return l(e,t,n)})};return l})}),a=n(function(e,t){var n={svg:{name:"xmlns",uri:"http://www.w3.org/2000/svg"},xlink:{name:"xmlns:xlink",uri:"http://www.w3.org/1999/xlink"}};t.default=n;e.exports=t.default}),i=function(n){return Object.keys(n).map(function(e){var t=n[e].toString().replace(/"/g,"&quot;");return e+'="'+t+'"'}).join(" ")},u=a.svg,s=a.xlink,l={};l[u.name]=u.uri,l[s.name]=s.uri;var c=function(e,t){if(e===void 0)e="";var n=o(l,t||{});var r=i(n);return"<svg "+r+">"+e+"</svg>"},f;return function(t){function n(){t.apply(this,arguments)}if(t)n.__proto__=t;n.prototype=Object.create(t&&t.prototype);n.prototype.constructor=n;var e={isMounted:{}};e.isMounted.get=function(){return!!this.node};n.createFromExistingNode=function e(t){return new n({id:t.getAttribute("id"),viewBox:t.getAttribute("viewBox"),content:t.outerHTML})};n.prototype.destroy=function e(){if(this.isMounted)this.unmount();t.prototype.destroy.call(this)};n.prototype.mount=function e(t){if(this.isMounted)return this.node;var n=typeof t==="string"?document.querySelector(t):t;var r=this.render();this.node=r;n.appendChild(r);return r};n.prototype.render=function e(){var t=this.stringify();return r(c(t)).childNodes[0]};n.prototype.unmount=function e(){this.node.parentNode.removeChild(this.node)};Object.defineProperties(n.prototype,e);return n}(e)}()}.call(this,n("2409"))},"0b34":function(e,t){e=e.exports="undefined"!=typeof window&&window.Math==Math?window:"undefined"!=typeof self&&self.Math==Math?self:Function("return this")();"number"==typeof __g&&(__g=e)},"0c29":function(e,t){t.f=Object.getOwnPropertySymbols},"0cad":function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){return(0,o.default)(e),e.replace(/&/g,"&amp;").replace(/"/g,"&quot;").replace(/'/g,"&#x27;").replace(/</g,"&lt;").replace(/>/g,"&gt;").replace(/\//g,"&#x2F;").replace(/\\/g,"&#x5C;").replace(/`/g,"&#96;")};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};e.exports=t.default,e.exports.default=t.default},"0cb2":function(e,t,n){var r=n("597a"),o=n("d48a");e.exports=n("5e9e")?function(e,t,n){return r.f(e,t,o(1,n))}:function(e,t,n){return e[t]=n,e}},1103:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){(0,r.default)(e);t=t?"\\x00-\\x09\\x0B\\x0C\\x0E-\\x1F\\x7F":"\\x00-\\x1F\\x7F";return(0,o.default)(e,t)};var r=a(n("d076")),o=a(n("4b34"));function a(e){return e&&e.__esModule?e:{default:e}}e.exports=t.default,e.exports.default=t.default},"11d8":function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){(0,o.default)(e);t=t?new RegExp("[".concat(t.replace(/[.*+?^${}()|[\]\\]/g,"\\$&"),"]+$"),"g"):/\s+$/g;return e.replace(t,"")};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};e.exports=t.default,e.exports.default=t.default},"120f":function(e,t,n){"use strict";function b(){return this}var y=n("3d8a"),_=n("e99b"),x=n("84e8"),M=n("065d"),w=n("953d"),O=n("3460"),S=n("bac3"),A=n("addc"),E=n("839a")("iterator"),P=!([].keys&&"next"in[].keys()),$="values";e.exports=function(e,t,n,r,o,a,i){O(n,t,r);function u(e){if(!P&&e in m)return m[e];switch(e){case"keys":case $:return function(){return new n(this,e)}}return function(){return new n(this,e)}}var s,l,c,f=t+" Iterator",d=o==$,p=!1,m=e.prototype,v=m[E]||m["@@iterator"]||o&&m[o],g=v||u(o),h=o?d?u("entries"):g:void 0,r="Array"==t&&m.entries||v;if(r&&(c=A(r.call(new e)))!==Object.prototype&&c.next&&(S(c,f,!0),y||"function"==typeof c[E]||M(c,E,b)),d&&v&&v.name!==$&&(p=!0,g=function(){return v.call(this)}),y&&!i||!P&&!p&&m[E]||M(m,E,g),w[t]=g,w[f]=b,o)if(s={values:d?g:u($),keys:a?g:u("keys"),entries:h},i)for(l in s)l in m||x(m,l,s[l]);else _(_.P+_.F*(P||p),t,s);return s}},1230:function(e,t,n){"use strict";t.__esModule=!0,t.default={el:{colorpicker:{confirm:"OK",clear:"クリア"},datepicker:{now:"現在",today:"今日",cancel:"キャンセル",clear:"クリア",confirm:"OK",selectDate:"日付を選択",selectTime:"時間を選択",startDate:"開始日",startTime:"開始時間",endDate:"終了日",endTime:"終了時間",prevYear:"前年",nextYear:"翌年",prevMonth:"前月",nextMonth:"翌月",year:"年",month1:"1月",month2:"2月",month3:"3月",month4:"4月",month5:"5月",month6:"6月",month7:"7月",month8:"8月",month9:"9月",month10:"10月",month11:"11月",month12:"12月",weeks:{sun:"日",mon:"月",tue:"火",wed:"水",thu:"木",fri:"金",sat:"土"},months:{jan:"1月",feb:"2月",mar:"3月",apr:"4月",may:"5月",jun:"6月",jul:"7月",aug:"8月",sep:"9月",oct:"10月",nov:"11月",dec:"12月"}},select:{loading:"ロード中",noMatch:"データなし",noData:"データなし",placeholder:"選択してください"},cascader:{noMatch:"データなし",loading:"ロード中",placeholder:"選択してください",noData:"データなし"},pagination:{goto:"",pagesize:"件/ページ",total:"総計 {total} 件",pageClassifier:"ページ目へ"},messagebox:{title:"メッセージ",confirm:"OK",cancel:"キャンセル",error:"正しくない入力"},upload:{deleteTip:"Delキーを押して削除する",delete:"削除する",preview:"プレビュー",continue:"続行する"},table:{emptyText:"データなし",confirmFilter:"確認",resetFilter:"初期化",clearFilter:"すべて",sumText:"合計"},tree:{emptyText:"データなし"},transfer:{noMatch:"データなし",noData:"データなし",titles:["リスト 1","リスト 2"],filterPlaceholder:"キーワードを入力",noCheckedFormat:"総計 {total} 件",hasCheckedFormat:"{checked}/{total} を選択した"},image:{error:"FAILED"},pageHeader:{title:"Back"},popconfirm:{confirmButtonText:"Yes",cancelButtonText:"No"}}}},1374:function(e,t,n){"use strict";var r=n("bb8b"),o=n("5edc");e.exports=function(e,t,n){t in e?r.f(e,t,o(0,n)):e[t]=n}},"13d8":function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){return(0,o.default)(e),function(e){var n="\\d{".concat(e.digits_after_decimal[0],"}");e.digits_after_decimal.forEach(function(e,t){0!==t&&(n="".concat(n,"|\\d{").concat(e,"}"))});var t="(\\".concat(e.symbol.replace(/\./g,"\\."),")").concat(e.require_symbol?"":"?"),r="[1-9]\\d{0,2}(\\".concat(e.thousands_separator,"\\d{3})*"),o="(".concat(["0","[1-9]\\d*",r].join("|"),")?"),r="(\\".concat(e.decimal_separator,"(").concat(n,"))").concat(e.require_decimal?"":"?"),r=o+(e.allow_decimal||e.require_decimal?r:"");e.allow_negatives&&!e.parens_for_negatives&&(e.negative_sign_after_digits?r+="-?":e.negative_sign_before_digits&&(r="-?"+r));e.allow_negative_sign_placeholder?r="( (?!\\-))?".concat(r):e.allow_space_after_symbol?r=" ?".concat(r):e.allow_space_after_digits&&(r+="( (?!$))?");e.symbol_after_digits?r+=t:r=t+r;e.allow_negatives&&(e.parens_for_negatives?r="(\\(".concat(r,"\\)|").concat(r,")"):e.negative_sign_before_digits||e.negative_sign_after_digits||(r="-?"+r));return new RegExp("^(?!-? )(?=.*\\d)".concat(r,"$"))}(t=(0,r.default)(t,i)).test(e)};var r=a(n("6d97")),o=a(n("d076"));function a(e){return e&&e.__esModule?e:{default:e}}var i={symbol:"$",require_symbol:!1,allow_space_after_symbol:!1,symbol_after_digits:!1,allow_negatives:!0,parens_for_negatives:!1,negative_sign_before_digits:!1,negative_sign_after_digits:!1,allow_negative_sign_placeholder:!1,thousands_separator:",",decimal_separator:".",allow_decimal:!0,require_decimal:!1,digits_after_decimal:[2],allow_space_after_digits:!1};e.exports=t.default,e.exports.default=t.default},1558:function(e,t,n){"use strict";t.__esModule=!0,t.default={el:{colorpicker:{confirm:"确定",clear:"清空"},datepicker:{now:"此刻",today:"今天",cancel:"取消",clear:"清空",confirm:"确定",selectDate:"选择日期",selectTime:"选择时间",startDate:"开始日期",startTime:"开始时间",endDate:"结束日期",endTime:"结束时间",prevYear:"前一年",nextYear:"后一年",prevMonth:"上个月",nextMonth:"下个月",year:"年",month1:"1 月",month2:"2 月",month3:"3 月",month4:"4 月",month5:"5 月",month6:"6 月",month7:"7 月",month8:"8 月",month9:"9 月",month10:"10 月",month11:"11 月",month12:"12 月",weeks:{sun:"日",mon:"一",tue:"二",wed:"三",thu:"四",fri:"五",sat:"六"},months:{jan:"一月",feb:"二月",mar:"三月",apr:"四月",may:"五月",jun:"六月",jul:"七月",aug:"八月",sep:"九月",oct:"十月",nov:"十一月",dec:"十二月"}},select:{loading:"加载中",noMatch:"无匹配数据",noData:"无数据",placeholder:"请选择"},cascader:{noMatch:"无匹配数据",loading:"加载中",placeholder:"请选择",noData:"暂无数据"},pagination:{goto:"前往",pagesize:"条/页",total:"共 {total} 条",pageClassifier:"页"},messagebox:{title:"提示",confirm:"确定",cancel:"取消",error:"输入的数据不合法!"},upload:{deleteTip:"按 delete 键可删除",delete:"删除",preview:"查看图片",continue:"继续上传"},table:{emptyText:"暂无数据",confirmFilter:"筛选",resetFilter:"重置",clearFilter:"全部",sumText:"合计"},tree:{emptyText:"暂无数据"},transfer:{noMatch:"无匹配数据",noData:"无数据",titles:["列表 1","列表 2"],filterPlaceholder:"请输入搜索内容",noCheckedFormat:"共 {total} 项",hasCheckedFormat:"已选 {checked}/{total} 项"},image:{error:"加载失败"},pageHeader:{title:"返回"},popconfirm:{confirmButtonText:"确定",cancelButtonText:"取消"}}}},1663:function(e,t,n){var i=n("212e"),u=n("3ab0");e.exports=function(a){return function(e,t){var n,r=String(u(e)),o=i(t),e=r.length;return o<0||e<=o?a?"":void 0:(t=r.charCodeAt(o))<55296||56319<t||o+1===e||(n=r.charCodeAt(o+1))<56320||57343<n?a?r.charAt(o):t:a?r.slice(o,o+2):n-56320+(t-55296<<10)+65536}}},1685:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){(0,o.default)(e);t=t?new RegExp("^[".concat(t.replace(/[.*+?^${}()|[\]\\]/g,"\\$&"),"]+"),"g"):/^\s+/g;return e.replace(t,"")};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};e.exports=t.default,e.exports.default=t.default},"16b7":function(e,t,n){(e.exports=n("690e")(!1)).push([e.i,".svg-icon[data-v-f9f7fefc]{width:1em;height:1em;vertical-align:-.15em;fill:currentColor;overflow:hidden}.svg-external-icon[data-v-f9f7fefc]{background-color:currentColor;-webkit-mask-size:cover!important;mask-size:cover!important;display:inline-block}",""])},"1a9a":function(e,t,n){var a=n("839a")("iterator"),i=!1;try{var r=[7][a]();r.return=function(){i=!0},Array.from(r,function(){throw 2})}catch(e){}e.exports=function(e,t){if(!t&&!i)return!1;var n=!1;try{var r=[7],o=r[a]();o.next=function(){return{done:n=!0}},r[a]=function(){return o},e(r)}catch(e){}return n}},"1b0b":function(e,t,n){var r=n("a86f"),o=n("3250"),a=n("839a")("species");e.exports=function(e,t){var n,e=r(e).constructor;return void 0===e||null==(n=r(e)[a])?t:o(n)}},"1b96":function(e,t,n){var r=n("cea2");e.exports=Object("z").propertyIsEnumerable(0)?Object:function(e){return"String"==r(e)?e.split(""):Object(e)}},"1bc7":function(e,t,n){for(var r=n("25ba"),o=n("93ca"),a=n("84e8"),i=n("0b34"),u=n("065d"),s=n("953d"),n=n("839a"),l=n("iterator"),c=n("toStringTag"),f=s.Array,d={CSSRuleList:!0,CSSStyleDeclaration:!1,CSSValueList:!1,ClientRectList:!1,DOMRectList:!1,DOMStringList:!1,DOMTokenList:!0,DataTransferItemList:!1,FileList:!1,HTMLAllCollection:!1,HTMLCollection:!1,HTMLFormElement:!1,HTMLSelectElement:!1,MediaList:!0,MimeTypeArray:!1,NamedNodeMap:!1,NodeList:!0,PaintRequestList:!1,Plugin:!1,PluginArray:!1,SVGLengthList:!1,SVGNumberList:!1,SVGPathSegList:!1,SVGPointList:!1,SVGStringList:!1,SVGTransformList:!1,SourceBufferList:!1,StyleSheetList:!0,TextTrackCueList:!1,TextTrackList:!1,TouchList:!1},p=o(d),m=0;m<p.length;m++){var v,g=p[m],h=d[g],b=i[g],y=b&&b.prototype;if(y&&(y[l]||u(y,l,f),y[c]||u(y,c,g),s[g]=f,h))for(v in r)y[v]||a(y,v,r[v],!0)}},"1e4d":function(e,t,n){var a=n("3250");e.exports=function(r,o,e){if(a(r),void 0===o)return r;switch(e){case 1:return function(e){return r.call(o,e)};case 2:return function(e,t){return r.call(o,e,t)};case 3:return function(e,t,n){return r.call(o,e,t,n)}}return function(){return r.apply(o,arguments)}}},"1ff3":function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){(0,o.default)(e);var n=a.test(e);return t&&n&&t.strict?i(e):n};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};var a=/^([\+-]?\d{4}(?!\d{2}\b))((-?)((0[1-9]|1[0-2])(\3([12]\d|0[1-9]|3[01]))?|W([0-4]\d|5[0-3])(-?[1-7])?|(00[1-9]|0[1-9]\d|[12]\d{2}|3([0-5]\d|6[1-6])))([T\s]((([01]\d|2[0-3])((:?)[0-5]\d)?|24:?00)([\.,]\d+(?!:))?)?(\17[0-5]\d([\.,]\d+)?)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?)?)?$/,i=function(e){var t=e.match(/^(\d{4})-?(\d{3})([ T]{1}\.*|$)/);if(t){var n=Number(t[1]),r=Number(t[2]);return n%4==0&&n%100!=0||n%400==0?r<=366:r<=365}var o=e.match(/(\d{4})-?(\d{0,2})-?(\d*)/).map(Number),t=o[1],n=o[2],r=o[3],e=n&&"0".concat(n).slice(-2),o=r&&"0".concat(r).slice(-2),o=new Date("".concat(t,"-").concat(e||"01","-").concat(o||"01"));return!n||!r||o.getUTCFullYear()===t&&o.getUTCMonth()+1===n&&o.getUTCDate()===r};e.exports=t.default,e.exports.default=t.default},"201c":function(e,t,n){var r=n("212e"),o=Math.min;e.exports=function(e){return 0<e?o(r(e),9007199254740991):0}},"212a":function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){if((0,c.default)(e),(t=(0,f.default)(t,v)).require_display_name||t.allow_display_name){var n=e.match(g);if(n){var r=function(e,t){return function(e){if(Array.isArray(e))return e}(e)||function(e,t){var n=[],r=!0,o=!1,a=void 0;try{for(var i,u=e[Symbol.iterator]();!(r=(i=u.next()).done)&&(n.push(i.value),!t||n.length!==t);r=!0);}catch(e){o=!0,a=e}finally{try{r||null==u.return||u.return()}finally{if(o)throw a}}return n}(e,t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance")}()}(n,3);if(o=r[1],e=r[2],!function(e){var t=e.match(/^"(.+)"$/i),e=t?t[1]:e;if(!e.trim())return!1;if(/[\.";<>]/.test(e)){if(!t)return!1;if(!(e.split('"').length===e.split('\\"').length))return!1}return!0}(o=o.endsWith(" ")?o.substr(0,o.length-1):o))return!1}else if(t.require_display_name)return!1}if(!t.ignore_max_length&&e.length>M)return!1;var r=e.split("@"),o=r.pop(),e=r.join("@"),r=o.toLowerCase();if(t.domain_specific_validation&&("gmail.com"===r||"googlemail.com"===r)){r=(e=e.toLowerCase()).split("+")[0];if(!(0,d.default)(r.replace(".",""),{min:6,max:30}))return!1;for(var a=r.split("."),i=0;i<a.length;i++)if(!b.test(a[i]))return!1}if(!(0,d.default)(e,{max:64})||!(0,d.default)(o,{max:254}))return!1;if(!(0,p.default)(o,{require_tld:t.require_tld})){if(!t.allow_ip_domain)return!1;if(!(0,m.default)(o)){if(!o.startsWith("[")||!o.endsWith("]"))return!1;o=o.substr(1,o.length-2);if(0===o.length||!(0,m.default)(o))return!1}}if('"'===e[0])return e=e.slice(1,e.length-1),(t.allow_utf8_local_part?x:y).test(e);for(var u=t.allow_utf8_local_part?_:h,s=e.split("."),l=0;l<s.length;l++)if(!u.test(s[l]))return!1;return!0};var c=r(n("d076")),f=r(n("6d97")),d=r(n("b067")),p=r(n("7771")),m=r(n("69e5"));function r(e){return e&&e.__esModule?e:{default:e}}var v={allow_display_name:!1,require_display_name:!1,allow_utf8_local_part:!0,require_tld:!0},g=/^([^\x00-\x1F\x7F-\x9F\cX]+)<(.+)>$/i,h=/^[a-z\d!#\$%&'\*\+\-\/=\?\^_`{\|}~]+$/i,b=/^[a-z\d]+$/,y=/^([\s\x01-\x08\x0b\x0c\x0e-\x1f\x7f\x21\x23-\x5b\x5d-\x7e]|(\\[\x01-\x09\x0b\x0c\x0d-\x7f]))*$/i,_=/^[a-z\d!#\$%&'\*\+\-\/=\?\^_`{\|}~\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]+$/i,x=/^([\s\x01-\x08\x0b\x0c\x0e-\x1f\x7f\x21\x23-\x5b\x5d-\x7e\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]|(\\[\x01-\x09\x0b\x0c\x0d-\x7f\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]))*$/i,M=254;e.exports=t.default,e.exports.default=t.default},"212e":function(e,t){var n=Math.ceil,r=Math.floor;e.exports=function(e){return isNaN(e=+e)?0:(0<e?r:n)(e)}},"21c4":function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function e(t){var n=1<arguments.length&&void 0!==arguments[1]?arguments[1]:"";(0,i.default)(t);n=String(n);if(!n)return e(t,10)||e(t,13);var r=t.replace(/[\s-]+/g,"");var o=0;var a;if("10"===n){if(!u.test(r))return!1;for(a=0;a<9;a++)o+=(a+1)*r.charAt(a);if("X"===r.charAt(9)?o+=100:o+=10*r.charAt(9),o%11==0)return!!r}else if("13"===n){if(!s.test(r))return!1;for(a=0;a<12;a++)o+=l[a%2]*r.charAt(a);if(r.charAt(12)-(10-o%10)%10==0)return!!r}return!1};var r,i=(r=n("d076"))&&r.__esModule?r:{default:r};var u=/^(?:[0-9]{9}X|[0-9]{10})$/,s=/^(?:[0-9]{13})$/,l=[1,3];e.exports=t.default,e.exports.default=t.default},"21d9":function(e,t,n){var r=n("3a4c"),o=n("065e").concat("length","prototype");t.f=Object.getOwnPropertyNames||function(e){return r(e,o)}},2409:function(e,t){var n=function(){return this}();try{n=n||new Function("return this")()}catch(e){"object"==typeof window&&(n=window)}e.exports=n},"25ba":function(e,t,n){"use strict";var r=n("87b2"),o=n("6fef"),a=n("953d"),i=n("3471");e.exports=n("120f")(Array,"Array",function(e,t){this._t=i(e),this._i=0,this._k=t},function(){var e=this._t,t=this._k,n=this._i++;return!e||n>=e.length?(this._t=void 0,o(1)):o(0,"keys"==t?n:"values"==t?e[n]:[n,e[n]])},"values"),a.Arguments=a.Array,r("keys"),r("values"),r("entries")},"25dc":function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.commaDecimal=t.dotDecimal=t.arabicLocales=t.englishLocales=t.decimal=t.alphanumeric=t.alpha=void 0;var r={"en-US":/^[A-Z]+$/i,"bg-BG":/^[А-Я]+$/i,"cs-CZ":/^[A-ZÁČĎÉĚÍŇÓŘŠŤÚŮÝŽ]+$/i,"da-DK":/^[A-ZÆØÅ]+$/i,"de-DE":/^[A-ZÄÖÜß]+$/i,"el-GR":/^[Α-ω]+$/i,"es-ES":/^[A-ZÁÉÍÑÓÚÜ]+$/i,"fr-FR":/^[A-ZÀÂÆÇÉÈÊËÏÎÔŒÙÛÜŸ]+$/i,"it-IT":/^[A-ZÀÉÈÌÎÓÒÙ]+$/i,"nb-NO":/^[A-ZÆØÅ]+$/i,"nl-NL":/^[A-ZÁÉËÏÓÖÜÚ]+$/i,"nn-NO":/^[A-ZÆØÅ]+$/i,"hu-HU":/^[A-ZÁÉÍÓÖŐÚÜŰ]+$/i,"pl-PL":/^[A-ZĄĆĘŚŁŃÓŻŹ]+$/i,"pt-PT":/^[A-ZÃÁÀÂÇÉÊÍÕÓÔÚÜ]+$/i,"ru-RU":/^[А-ЯЁ]+$/i,"sl-SI":/^[A-ZČĆĐŠŽ]+$/i,"sk-SK":/^[A-ZÁČĎÉÍŇÓŠŤÚÝŽĹŔĽÄÔ]+$/i,"sr-RS@latin":/^[A-ZČĆŽŠĐ]+$/i,"sr-RS":/^[А-ЯЂЈЉЊЋЏ]+$/i,"sv-SE":/^[A-ZÅÄÖ]+$/i,"tr-TR":/^[A-ZÇĞİıÖŞÜ]+$/i,"uk-UA":/^[А-ЩЬЮЯЄIЇҐі]+$/i,"ku-IQ":/^[ئابپتجچحخدرڕزژسشعغفڤقکگلڵمنوۆھەیێيطؤثآإأكضصةظذ]+$/i,ar:/^[ءآأؤإئابةتثجحخدذرزسشصضطظعغفقكلمنهوىيًٌٍَُِّْٰ]+$/};t.alpha=r;var o={"en-US":/^[0-9A-Z]+$/i,"bg-BG":/^[0-9А-Я]+$/i,"cs-CZ":/^[0-9A-ZÁČĎÉĚÍŇÓŘŠŤÚŮÝŽ]+$/i,"da-DK":/^[0-9A-ZÆØÅ]+$/i,"de-DE":/^[0-9A-ZÄÖÜß]+$/i,"el-GR":/^[0-9Α-ω]+$/i,"es-ES":/^[0-9A-ZÁÉÍÑÓÚÜ]+$/i,"fr-FR":/^[0-9A-ZÀÂÆÇÉÈÊËÏÎÔŒÙÛÜŸ]+$/i,"it-IT":/^[0-9A-ZÀÉÈÌÎÓÒÙ]+$/i,"hu-HU":/^[0-9A-ZÁÉÍÓÖŐÚÜŰ]+$/i,"nb-NO":/^[0-9A-ZÆØÅ]+$/i,"nl-NL":/^[0-9A-ZÁÉËÏÓÖÜÚ]+$/i,"nn-NO":/^[0-9A-ZÆØÅ]+$/i,"pl-PL":/^[0-9A-ZĄĆĘŚŁŃÓŻŹ]+$/i,"pt-PT":/^[0-9A-ZÃÁÀÂÇÉÊÍÕÓÔÚÜ]+$/i,"ru-RU":/^[0-9А-ЯЁ]+$/i,"sl-SI":/^[0-9A-ZČĆĐŠŽ]+$/i,"sk-SK":/^[0-9A-ZÁČĎÉÍŇÓŠŤÚÝŽĹŔĽÄÔ]+$/i,"sr-RS@latin":/^[0-9A-ZČĆŽŠĐ]+$/i,"sr-RS":/^[0-9А-ЯЂЈЉЊЋЏ]+$/i,"sv-SE":/^[0-9A-ZÅÄÖ]+$/i,"tr-TR":/^[0-9A-ZÇĞİıÖŞÜ]+$/i,"uk-UA":/^[0-9А-ЩЬЮЯЄIЇҐі]+$/i,"ku-IQ":/^[٠١٢٣٤٥٦٧٨٩0-9ئابپتجچحخدرڕزژسشعغفڤقکگلڵمنوۆھەیێيطؤثآإأكضصةظذ]+$/i,ar:/^[٠١٢٣٤٥٦٧٨٩0-9ءآأؤإئابةتثجحخدذرزسشصضطظعغفقكلمنهوىيًٌٍَُِّْٰ]+$/};t.alphanumeric=o;var a={"en-US":".",ar:"٫"};t.decimal=a;var i=["AU","GB","HK","IN","NZ","ZA","ZM"];t.englishLocales=i;for(var u,s=0;s<i.length;s++)r[u="en-".concat(i[s])]=r["en-US"],o[u]=o["en-US"],a[u]=a["en-US"];var l=["AE","BH","DZ","EG","IQ","JO","KW","LB","LY","MA","QM","QA","SA","SD","SY","TN","YE"];t.arabicLocales=l;for(var c,f=0;f<l.length;f++)r[c="ar-".concat(l[f])]=r.ar,o[c]=o.ar,a[c]=a.ar;var d=["ar-EG","ar-LB","ar-LY"];t.dotDecimal=d;var p=["bg-BG","cs-CZ","da-DK","de-DE","el-GR","en-ZM","es-ES","fr-FR","it-IT","ku-IQ","hu-HU","nb-NO","nn-NO","nl-NL","pl-PL","pt-PT","ru-RU","sl-SI","sr-RS@latin","sr-RS","sv-SE","tr-TR","uk-UA"];t.commaDecimal=p;for(var m=0;m<d.length;m++)a[d[m]]=a["en-US"];for(var v=0;v<p.length;v++)a[p[v]]=",";r["pt-BR"]=r["pt-PT"],o["pt-BR"]=o["pt-PT"],a["pt-BR"]=a["pt-PT"],r["pl-Pl"]=r["pl-PL"],o["pl-Pl"]=o["pl-PL"],a["pl-Pl"]=a["pl-PL"]},"26df":function(e,t,n){e.exports=!n("0926")(function(){return 7!=Object.defineProperty({},"a",{get:function(){return 7}}).a})},2743:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){if((0,u.default)(e),!e||2083<=e.length||/[\s<>]/.test(e))return!1;if(0===e.indexOf("mailto:"))return!1;var n,r,o,a;if(t=(0,c.default)(t,f),1<(a=(e=(a=(e=(a=e.split("#")).shift()).split("?")).shift()).split("://")).length){if(o=a.shift().toLowerCase(),t.require_valid_protocol&&-1===t.protocols.indexOf(o))return!1}else{if(t.require_protocol)return!1;if("//"===e.substr(0,2)){if(!t.allow_protocol_relative_urls)return!1;a[0]=e.substr(2)}}if(""===(e=a.join("://")))return!1;if(""===(e=(a=e.split("/")).shift())&&!t.require_host)return!0;if(1<(a=e.split("@")).length){if(t.disallow_auth)return!1;if(0<=(i=a.shift()).indexOf(":")&&2<i.split(":").length)return!1}r=a.join("@"),e=o=null;var i=r.match(d);i?(n="",e=i[1],o=i[2]||null):(a=r.split(":"),n=a.shift(),a.length&&(o=a.join(":")));if(null!==o&&(a=parseInt(o,10),!/^[0-9]+$/.test(o)||a<=0||65535<a))return!1;if(!((0,l.default)(n)||(0,s.default)(n,t)||e&&(0,l.default)(e,6)))return!1;if(n=n||e,t.host_whitelist&&!p(n,t.host_whitelist))return!1;if(t.host_blacklist&&p(n,t.host_blacklist))return!1;return!0};var u=r(n("d076")),s=r(n("7771")),l=r(n("69e5")),c=r(n("6d97"));function r(e){return e&&e.__esModule?e:{default:e}}var f={protocols:["http","https","ftp"],require_tld:!0,require_protocol:!1,require_host:!0,require_valid_protocol:!0,allow_underscores:!1,allow_trailing_dot:!1,allow_protocol_relative_urls:!1},d=/^\[([^\]]+)\](?::([0-9]+))?$/;function p(e,t){for(var n,r=0;r<t.length;r++){var o=t[r];if(e===o||(n=o,"[object RegExp]"===Object.prototype.toString.call(n)&&o.test(e)))return 1}}e.exports=t.default,e.exports.default=t.default},"285b":function(e,t,n){var r=n("35d4"),o=n("5edc"),a=n("3471"),i=n("5d10"),u=n("4fd4"),s=n("83d3"),l=Object.getOwnPropertyDescriptor;t.f=n("26df")?l:function(e,t){if(e=a(e),t=i(t,!0),s)try{return l(e,t)}catch(e){}if(u(e,t))return o(!r.f.call(e,t),e[t])}},"2a5d":function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){return(0,o.default)(e),parseInt(e,t||10)};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};e.exports=t.default,e.exports.default=t.default},"2b37":function(e,t,n){var f=n("1e4d"),d=n("b1d4"),p=n("dcea"),m=n("a86f"),v=n("201c"),g=n("e3bb"),h={},b={};(t=e.exports=function(e,t,n,r,o){var a,i,u,s,o=o?function(){return e}:g(e),l=f(n,r,t?2:1),c=0;if("function"!=typeof o)throw TypeError(e+" is not iterable!");if(p(o)){for(a=v(e.length);c<a;c++)if((s=t?l(m(i=e[c])[0],i[1]):l(e[c]))===h||s===b)return s}else for(u=o.call(e);!(i=u.next()).done;)if((s=d(u,l,i.value,t))===h||s===b)return s}).BREAK=h,t.RETURN=b},"2d39":function(e,t,n){var u=n("0b34"),s=n("edec").set,l=u.MutationObserver||u.WebKitMutationObserver,c=u.process,f=u.Promise,d="process"==n("cea2")(c);e.exports=function(){function e(){var e,t;for(d&&(e=c.domain)&&e.exit();n;){t=n.fn,n=n.next;try{t()}catch(e){throw n?o():r=void 0,e}}r=void 0,e&&e.enter()}var n,r,t,o,a,i;return o=d?function(){c.nextTick(e)}:!l||u.navigator&&u.navigator.standalone?f&&f.resolve?(t=f.resolve(void 0),function(){t.then(e)}):function(){s.call(u,e)}:(a=!0,i=document.createTextNode(""),new l(e).observe(i,{characterData:!0}),function(){i.data=a=!a}),function(e){e={fn:e,next:void 0};r&&(r.next=e),n||(n=e,o()),r=e}}},"2f06":function(e,t,n){var r=n("8fcc");(r="string"==typeof(r=r.__esModule?r.default:r)?[[e.i,r,""]]:r).locals&&(e.exports=r.locals);(0,n("5925").default)("5ca3b974",r,!0,{sourceMap:!1,shadowMode:!1})},3074:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){return(0,o.default)(e),a.test(e)};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};var a=/^[a-f0-9]{32}$/;e.exports=t.default,e.exports.default=t.default},3250:function(e,t){e.exports=function(e){if("function"!=typeof e)throw TypeError(e+" is not a function!");return e}},"32ea":function(e,t,n){var r=n("8078"),o=n("93ca");n("b2be")("keys",function(){return function(e){return o(r(e))}})},3410:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){return(0,o.default)(e),a.test(e)};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};var a=/^#?([0-9A-F]{3}|[0-9A-F]{6})$/i;e.exports=t.default,e.exports.default=t.default},3460:function(e,t,n){"use strict";var r=n("7ee3"),o=n("5edc"),a=n("bac3"),i={};n("065d")(i,n("839a")("iterator"),function(){return this}),e.exports=function(e,t,n){e.prototype=r(i,{next:o(1,n)}),a(e,t+" Iterator")}},3471:function(e,t,n){var r=n("1b96"),o=n("3ab0");e.exports=function(e){return r(o(e))}},"35d4":function(e,t){t.f={}.propertyIsEnumerable},"3a0d":function(e,t,n){var r=n("baa7")("keys"),o=n("d8b3");e.exports=function(e){return r[e]||(r[e]=o(e))}},"3a4c":function(e,t,n){var i=n("4fd4"),u=n("3471"),s=n("52a4")(!1),l=n("3a0d")("IE_PROTO");e.exports=function(e,t){var n,r=u(e),o=0,a=[];for(n in r)n!=l&&i(r,n)&&a.push(n);for(;t.length>o;)i(r,n=t[o++])&&(~s(a,n)||a.push(n));return a}},"3ab0":function(e,t){e.exports=function(e){if(null==e)throw TypeError("Can't call method on "+e);return e}},"3b2c":function(e,t,n){var r=n("16b7");(r="string"==typeof(r=r.__esModule?r.default:r)?[[e.i,r,""]]:r).locals&&(e.exports=r.locals);(0,n("5925").default)("7690991c",r,!0,{sourceMap:!1,shadowMode:!1})},"3d8a":function(e,t){e.exports=!1},"3f9e":function(e,t,n){var i=n("bb8b"),u=n("a86f"),s=n("93ca");e.exports=n("26df")?Object.defineProperties:function(e,t){u(e);for(var n,r=s(t),o=r.length,a=0;a<o;)i.f(e,n=r[a++],t[n]);return e}},"40e7":function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t,n){(0,o.default)(e),"[object RegExp]"!==Object.prototype.toString.call(t)&&(t=new RegExp(t,n));return t.test(e)};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};e.exports=t.default,e.exports.default=t.default},"417b":function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){return(0,o.default)(e),a.test(e)},t.fullWidth=void 0;var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};var a=/[^\u0020-\u007E\uFF61-\uFF9F\uFFA0-\uFFDC\uFFE8-\uFFEE0-9a-zA-Z]/;t.fullWidth=a},"41ae":function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){(0,o.default)(e),t=t||{};var n=new RegExp("^(?:[-+])?(?:[0-9]+)?(?:\\".concat(t.locale?a.decimal[t.locale]:".","[0-9]*)?(?:[eE][\\+\\-]?(?:[0-9]+))?$"));if(""===e||"."===e||"-"===e||"+"===e)return!1;var r=parseFloat(e.replace(",","."));return n.test(e)&&(!t.hasOwnProperty("min")||r>=t.min)&&(!t.hasOwnProperty("max")||r<=t.max)&&(!t.hasOwnProperty("lt")||r<t.lt)&&(!t.hasOwnProperty("gt")||r>t.gt)},t.locales=void 0;var r,o=(r=n("d076"))&&r.__esModule?r:{default:r},a=n("25dc");var i=Object.keys(a.decimal);t.locales=i},"43ec":function(e,t,n){"use strict";var r=n("1663")(!0);e.exports=function(e,t,n){return t+(n?r(e,t).length:1)}},"473c":function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){var t=1<arguments.length&&void 0!==arguments[1]?arguments[1]:"en-US";if((0,o.default)(e),t in a.alphanumeric)return a.alphanumeric[t].test(e);throw new Error("Invalid locale '".concat(t,"'"))},t.locales=void 0;var r,o=(r=n("d076"))&&r.__esModule?r:{default:r},a=n("25dc");var i=Object.keys(a.alphanumeric);t.locales=i},4760:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){if((0,o.default)(e),t&&t.no_colons)return i.test(e);return a.test(e)};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};var a=/^([0-9a-fA-F][0-9a-fA-F]:){5}([0-9a-fA-F][0-9a-fA-F])$/,i=/^([0-9a-fA-F]){12}$/;e.exports=t.default,e.exports.default=t.default},4836:function(e,t,n){var r=n("a86f"),o=n("9cff"),a=n("d4c9");e.exports=function(e,t){if(r(e),o(t)&&t.constructor===e)return t;e=a.f(e);return(0,e.resolve)(t),e.promise}},"497f":function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){var t=1<arguments.length&&void 0!==arguments[1]?arguments[1]:"en-US";if((0,o.default)(e),t in a.alpha)return a.alpha[t].test(e);throw new Error("Invalid locale '".concat(t,"'"))},t.locales=void 0;var r,o=(r=n("d076"))&&r.__esModule?r:{default:r},a=n("25dc");var i=Object.keys(a.alpha);t.locales=i},"4a92":function(e,t,n){e.exports=!n("5e9e")&&!n("99fe")(function(){return 7!=Object.defineProperty(n("e7e0")("div"),"a",{get:function(){return 7}}).a})},"4b34":function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){return(0,o.default)(e),e.replace(new RegExp("[".concat(t,"]+"),"g"),"")};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};e.exports=t.default,e.exports.default=t.default},"4ef4":function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){return(0,o.default)(e),a.test(e)},t.halfWidth=void 0;var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};var a=/[\u0020-\u007E\uFF61-\uFF9F\uFFA0-\uFFDC\uFFE8-\uFFEE0-9a-zA-Z]/;t.halfWidth=a},"4fd4":function(e,t){var n={}.hasOwnProperty;e.exports=function(e,t){return n.call(e,t)}},5114:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){if((0,o.default)(e),t&&t.no_symbols)return i.test(e);return a.test(e)};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};var a=/^[+-]?([0-9]*[.])?[0-9]+$/,i=/^[0-9]+$/;e.exports=t.default,e.exports.default=t.default},5131:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){return(0,o.default)(e),e===e.toLowerCase()};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};e.exports=t.default,e.exports.default=t.default},"51ff":function(e,t,n){var r={"./add.svg":"c2a3"};function o(e){e=a(e);return n(e)}function a(e){if(n.o(r,e))return r[e];e=new Error("Cannot find module '"+e+"'");throw e.code="MODULE_NOT_FOUND",e}o.keys=function(){return Object.keys(r)},o.resolve=a,(e.exports=o).id="51ff"},"52a4":function(e,t,n){var s=n("3471"),l=n("201c"),c=n("732b");e.exports=function(u){return function(e,t,n){var r,o=s(e),a=l(o.length),i=c(n,a);if(u&&t!=t){for(;i<a;)if((r=o[i++])!=r)return!0}else for(;i<a;i++)if((u||i in o)&&o[i]===t)return u||i||0;return!u&&-1}}},"54ac":function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){(0,o.default)(e);var t=e.length;if(0<t&&t%8==0&&a.test(e))return!0;return!1};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};var a=/^[A-Z2-7]+=*$/;e.exports=t.default,e.exports.default=t.default},"56d7":function(e,t,n){"use strict";n.r(t),n.d(t,"bootstrap",function(){return K}),n.d(t,"mount",function(){return V}),n.d(t,"unmount",function(){return q});var r={};n.r(r),n.d(r,"cutWords",function(){return Z});n("32ea"),n("1bc7"),n("25ba"),n("5f1c"),n("6ba0"),n("b47f"),n("6e69");var o=n("8bbf"),a=n.n(o),i=n("e04f"),u=n.n(i),s=(n("9f35"),n("5f72")),l=n.n(s);n("b20f");function c(e,t,n,r,o,a,i,u){var s,l,c="function"==typeof e?e.options:e;return t&&(c.render=t,c.staticRenderFns=n,c._compiled=!0),r&&(c.functional=!0),a&&(c._scopeId="data-v-"+a),i?(s=function(e){(e=e||this.$vnode&&this.$vnode.ssrContext||this.parent&&this.parent.$vnode&&this.parent.$vnode.ssrContext)||"undefined"==typeof __VUE_SSR_CONTEXT__||(e=__VUE_SSR_CONTEXT__),o&&o.call(this,e),e&&e._registeredComponents&&e._registeredComponents.add(i)},c._ssrRegister=s):o&&(s=u?function(){o.call(this,(c.functional?this.parent:this).$root.$options.shadowRoot)}:o),s&&(c.functional?(c._injectStyles=s,l=c.render,c.render=function(e,t){return s.call(t),l(e,t)}):(u=c.beforeCreate,c.beforeCreate=u?[].concat(u,s):[s])),{exports:e,options:c}}var f=c({name:"VersionManage",components:{},data:function(){return{leftTabs:[]}},computed:{keepState:function(){return 0<=this.leftTabs.indexOf(this.$route.path)}},mounted:function(){var t=this;this.$root.eventBus.$on("globalTabsChange",function(e){e&&(t.leftTabs=e)})}},function(){var e=this,t=e.$createElement,t=e._self._c||t;return t("div",{attrs:{id:"versionManage-app"}},[t("keep-alive",[e.keepState?t("router-view"):e._e()],1),e.keepState?e._e():t("router-view")],1)},[],!1,null,null,null).exports,d=n("6389"),p=n.n(d),m=n("d04c"),v=n.n(m),g=(n("ac67"),n("f7bd")),h=n.n(g);function b(e,t,n){return t in e?h()(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}n("a450");var y=n("60bb"),x=n.n(y),t=n("cebe"),o=n.n(t),i=n("5880"),d=n.n(i),m={versionManageFormState:function(e){return e.versionManage.versionManage.formState},versionManage:function(e){return e.versionManage.versionManage},versionManageIos:function(e){return e.versionManage.versionManageIos}},M="VERSIONMANAGE_FORMSTATE",w="VERSIONMANAGEIOS_FORMSTATE";function O(e){return j({url:"/manage/versionManage/getList",method:"get",params:e})}t={namespaced:!0,state:{versionManage:{configs:{title:"",description:"",version:"",versionName:"",forcibly:!1,url:""}},versionManageIos:{configs:{title:"",description:"",version:"",versionName:"",forcibly:!1,url:""}}},mutations:(b(g={},M,function(e,t){e.versionManage.configs=Object.assign({title:"",description:"",version:"",versionName:"",forcibly:!1,url:""},t)}),b(g,w,function(e,t){e.versionManageIos.configs=Object.assign({title:"",description:"",version:"",versionName:"",forcibly:!1,url:""},t)}),g),actions:{getVersionInfo:function(e){var t=e.commit;O(1<arguments.length&&void 0!==arguments[1]?arguments[1]:{client:"0"}).then(function(e){e=e.data&&e.data.docs?e.data.docs[0]:{};t(M,e)})},getIosVersionInfo:function(e){var t=e.commit;O(1<arguments.length&&void 0!==arguments[1]?arguments[1]:{client:"1"}).then(function(e){e=e.data&&e.data.docs?e.data.docs[0]:{};t(w,e)})}}};a.a.use(d.a);var S,A=new d.a.Store({modules:{versionManage:t},getters:m}),E=0,P={};function $(){0!==E||P.alwaysShow||S.close()}function T(e){e=0<arguments.length&&void 0!==e?e:{};P=x.a.isEmpty(e)?{}:e,0===E&&(S=s.Loading.service({lock:!0,text:P.str||"数据加载中...",spinner:"el-icon-loading",background:"rgba(0, 0, 0, 0.7)"})),E++}function C(){E<=0||0===--E&&x.a.debounce($,300)()}g=o.a.create({baseURL:Object({NODE_ENV:"production",BASE_URL:"/"}).VUE_APP_BASE_API,timeout:12e4});g.interceptors.request.use(function(e){return T(_.isEmpty(e.params)||_.isEmpty(e.params.loadingConfig)?{}:e.params.loadingConfig),e},function(e){return Promise.reject(e)}),g.interceptors.response.use(function(e){C();e=e.data;return 200!==e.status?(Object(s.Message)({message:e.message||"Error",type:"error",duration:5e3}),50008!==e.status&&50012!==e.status&&50014!==e.status||s.MessageBox.confirm("You have been logged out, you can cancel to stay on this page, or log in again","Confirm logout",{confirmButtonText:"Re-Login",cancelButtonText:"Cancel",type:"warning"}).then(function(){A.dispatch("user/resetToken").then(function(){location.reload()})}),Promise.reject(new Error(e.message||"Error"))):e},function(e){C();var t=e.message;return"Network Error"==(t=e.response&&e.response.data&&e.response.data.message?e.response.data.message:t)||"Request failed with status code 502"==t?{status:500,message:"Network Error"}:(Object(s.Message)({message:t,type:"error",duration:5e3}),Promise.reject(e))});var j=g;function N(e){var t=u.a.get("sidebarStatus");e.sidebarOpened="1"==t,e.sidebar&&(e.sidebar.opened=e.sidebarOpened)}function k(t,e){var n,r=Object.keys(t);return Object.getOwnPropertySymbols&&(n=Object.getOwnPropertySymbols(t),e&&(n=n.filter(function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable})),r.push.apply(r,n)),r}function F(t){for(var e=1;e<arguments.length;e++){var n=null!=arguments[e]?arguments[e]:{};e%2?k(Object(n),!0).forEach(function(e){b(t,e,n[e])}):Object.getOwnPropertyDescriptors?Object.defineProperties(t,Object.getOwnPropertyDescriptors(n)):k(Object(n)).forEach(function(e){Object.defineProperty(t,e,Object.getOwnPropertyDescriptor(n,e))})}return t}var d={name:"versionManage",data:function(){return{sidebarOpened:!0,device:"desktop",fileList:[],appUrl:"",rules:{title:[{required:!0,message:this.$t("validate.selectNull",{label:this.$t("versionManage.title")}),trigger:"blur"}],description:[{required:!0,message:this.$t("validate.selectNull",{label:this.$t("versionManage.description")}),trigger:"blur"}],version:[{required:!0,message:this.$t("validate.selectNull",{label:this.$t("versionManage.version")}),trigger:"blur"}],versionName:[{required:!0,message:this.$t("validate.selectNull",{label:this.$t("versionManage.versionName")}),trigger:"blur"}]},rules1:{title:[{required:!0,message:this.$t("validate.selectNull",{label:this.$t("versionManage.title")}),trigger:"blur"}],description:[{required:!0,message:this.$t("validate.selectNull",{label:this.$t("versionManage.description")}),trigger:"blur"}],version:[{required:!0,message:this.$t("validate.selectNull",{label:this.$t("versionManage.version")}),trigger:"blur"}],versionName:[{required:!0,message:this.$t("validate.selectNull",{label:this.$t("versionManage.versionName")}),trigger:"blur"}]}}},components:{},methods:{handleRemove:function(e,t){},handlePreview:function(e){},handleExceed:function(e,t){this.$message.warning("当前限制选择 1 个文件,本次选择了 ".concat(e.length," 个文件,共选择了 ").concat(e.length+t.length," 个文件"))},beforeRemove:function(e,t){return this.$confirm("确定移除 ".concat(e.name,"?"))},uploadSuccess:function(e,t){this.appUrl=e.data?e.data.path:"",this.$message({message:"文件上传成功!",type:"success"})},submitForm:function(t){var n=this;this.$refs[t].validate(function(e){if(!e)return!1;e=n.versionManage.configs;"ruleIosForm"==t&&(e=n.versionManageIos.configs),n.appUrl&&(e=Object.assign({},e,{url:n.appUrl})),j({url:"/manage/versionManage/updateOne",method:"post",data:e}).then(function(e){200===e.status?(n.$store.dispatch("versionManage/getVersionInfo"),n.$message({message:n.$t("main.updateSuccess"),type:"success"})):n.$message.error(e.message)})})},resetForm:function(e){this.$refs[e].resetFields()}},computed:F(F({},Object(i.mapGetters)(["versionManage","versionManageIos"])),{},{classObj:function(){return{hideSidebar:!this.sidebarOpened,openSidebar:this.sidebarOpened,withoutAnimation:"false",mobile:"mobile"===this.device}}}),mounted:function(){var t=this;!function(t){var n=t.$root;setTimeout(function(){N(t)},500),n&&n.eventBus&&(n.eventBus.$on("toggleSideBar",function(e){setTimeout(function(){N(t)},500)}),n.eventBus.$on("toggleDevice",function(e){t.device=e}),n.eventBus.$on("handleTabInfo",function(e){Object(y.isEmpty)(e)||n.eventBus.$emit("globalTabsChange",e)}));var e=document.body.getBoundingClientRect();t.device=e.width-1<992?"mobile":"desktop",j({url:"/api/systemConfig/getConfig",params:{},method:"get"}).then(function(e){200===e.status&&(t.appConfig=e.data)})}(this),this.$store.dispatch("versionManage/getVersionInfo"),this.$store.dispatch("versionManage/getIosVersionInfo"),setTimeout(function(){var e=[];e.push({name:t.versionManage.configs.url,url:t.versionManage.configs.url}),t.fileList=e,t.appUrl=t.versionManage.configs.url},1e3)}},I=(n("a64a"),c(d,function(){var t=this,e=t.$createElement,e=t._self._c||e;return e("div",{staticClass:"adminVersionConfig",class:t.classObj},[e("div",{staticClass:"main-container"},[e("el-row",{staticClass:"dr-datatable"},[e("h2",{staticClass:"line-gate"},[t._v("Android")]),e("el-col",[e("el-form",{ref:"ruleForm",staticClass:"demo-ruleForm",attrs:{model:t.versionManage.configs,rules:t.rules,"label-width":"120px"}},[e("el-form-item",{attrs:{label:t.$t("versionManage.title"),prop:"title"}},[e("el-input",{attrs:{size:"small"},model:{value:t.versionManage.configs.title,callback:function(e){t.$set(t.versionManage.configs,"title",e)},expression:"versionManage.configs.title"}})],1),e("el-form-item",{attrs:{label:t.$t("versionManage.description"),prop:"description"}},[e("el-input",{attrs:{size:"small"},model:{value:t.versionManage.configs.description,callback:function(e){t.$set(t.versionManage.configs,"description",e)},expression:"versionManage.configs.description"}})],1),e("el-form-item",{attrs:{label:t.$t("versionManage.version"),prop:"version"}},[e("el-input",{attrs:{size:"small"},model:{value:t.versionManage.configs.version,callback:function(e){t.$set(t.versionManage.configs,"version",e)},expression:"versionManage.configs.version"}})],1),e("el-form-item",{attrs:{label:t.$t("versionManage.versionName"),prop:"versionName"}},[e("el-input",{attrs:{size:"small"},model:{value:t.versionManage.configs.versionName,callback:function(e){t.$set(t.versionManage.configs,"versionName",e)},expression:"versionManage.configs.versionName"}})],1),e("el-form-item",{attrs:{label:t.$t("versionManage.forcibly"),prop:"forcibly"}},[e("el-switch",{attrs:{"on-text":t.$t("main.radioOn"),"off-text":t.$t("main.radioOff")},model:{value:t.versionManage.configs.forcibly,callback:function(e){t.$set(t.versionManage.configs,"forcibly",e)},expression:"versionManage.configs.forcibly"}})],1),e("el-form-item",{attrs:{label:t.$t("versionManage.url"),prop:"url"}},[e("el-upload",{staticClass:"upload-demo",attrs:{action:"/api/upload/files","on-preview":t.handlePreview,"on-remove":t.handleRemove,"before-remove":t.beforeRemove,"on-success":t.uploadSuccess,multiple:"",limit:1,accept:".apk","on-exceed":t.handleExceed,"file-list":t.fileList,data:{action:"uploadfile"}}},[e("el-button",{attrs:{size:"small",type:"primary"}},[t._v("点击上传")]),e("div",{staticClass:"el-upload__tip",attrs:{slot:"tip"},slot:"tip"},[t._v("只能上传apk文件,且不超过20M")])],1)],1),e("el-form-item",[e("el-button",{attrs:{size:"medium",type:"primary"},on:{click:function(e){return t.submitForm("ruleForm")}}},[t._v(t._s(t.$t("main.form_btnText_save")))]),e("el-button",{attrs:{size:"medium"},on:{click:function(e){return t.resetForm("ruleForm")}}},[t._v(t._s(t.$t("main.reSetBtnText")))])],1)],1)],1),e("h2",{staticClass:"line-gate"},[t._v("IOS")]),e("el-col",[e("el-form",{ref:"ruleIosForm",staticClass:"demo-ruleIosForm",attrs:{model:t.versionManage.configs,rules:t.rules1,"label-width":"120px"}},[e("el-form-item",{attrs:{label:t.$t("versionManage.title"),prop:"title"}},[e("el-input",{attrs:{size:"small"},model:{value:t.versionManageIos.configs.title,callback:function(e){t.$set(t.versionManageIos.configs,"title",e)},expression:"versionManageIos.configs.title"}})],1),e("el-form-item",{attrs:{label:t.$t("versionManage.description"),prop:"description"}},[e("el-input",{attrs:{size:"small"},model:{value:t.versionManageIos.configs.description,callback:function(e){t.$set(t.versionManageIos.configs,"description",e)},expression:"versionManageIos.configs.description"}})],1),e("el-form-item",{attrs:{label:t.$t("versionManage.version"),prop:"version"}},[e("el-input",{attrs:{size:"small"},model:{value:t.versionManageIos.configs.version,callback:function(e){t.$set(t.versionManageIos.configs,"version",e)},expression:"versionManageIos.configs.version"}})],1),e("el-form-item",{attrs:{label:t.$t("versionManage.versionName"),prop:"versionName"}},[e("el-input",{attrs:{size:"small"},model:{value:t.versionManageIos.configs.versionName,callback:function(e){t.$set(t.versionManageIos.configs,"versionName",e)},expression:"versionManageIos.configs.versionName"}})],1),e("el-form-item",{attrs:{label:t.$t("versionManage.forcibly"),prop:"forcibly"}},[e("el-switch",{attrs:{"on-text":t.$t("main.radioOn"),"off-text":t.$t("main.radioOff")},model:{value:t.versionManageIos.configs.forcibly,callback:function(e){t.$set(t.versionManageIos.configs,"forcibly",e)},expression:"versionManageIos.configs.forcibly"}})],1),e("el-form-item",[e("el-button",{attrs:{size:"medium",type:"primary"},on:{click:function(e){return t.submitForm("ruleIosForm")}}},[t._v(t._s(t.$t("main.form_btnText_save")))]),e("el-button",{attrs:{size:"medium"},on:{click:function(e){return t.resetForm("ruleIosForm")}}},[t._v(t._s(t.$t("main.reSetBtnText")))])],1)],1)],1)],1)],1)])},[],!1,null,null,null).exports);a.a.use(p.a);var R=function(){return new p.a({mode:"history",base:"/",scrollBehavior:function(){return{y:0}},routes:[{path:v.a.admin_base_path+"/versionManage",name:"versionManage",component:I}]})},L=R();t=L,m=n("7289"),o=n.n(m),n("fad4");g={name:"SvgIcon",props:{iconClass:{type:String,required:!0},className:{type:String,default:""}},computed:{isExternal:function(){return/^(https?:|mailto:|tel:)/.test(this.iconClass)},iconName:function(){return"#icon-".concat(this.iconClass)},svgClass:function(){return this.className?"svg-icon "+this.className:"svg-icon"},styleExternalIcon:function(){return{mask:"url(".concat(this.iconClass,") no-repeat 50% 50%"),"-webkit-mask":"url(".concat(this.iconClass,") no-repeat 50% 50%")}}}},n("68fa"),i=c(g,function(){var e=this,t=e.$createElement,t=e._self._c||t;return e.isExternal?t("div",e._g({staticClass:"svg-external-icon svg-icon",style:e.styleExternalIcon},e.$listeners)):t("svg",e._g({class:e.svgClass,attrs:{"aria-hidden":"true"}},e.$listeners),[t("use",{attrs:{"xlink:href":e.iconName}})])},[],!1,null,"f9f7fefc",null).exports;a.a.component("svg-icon",i);var B,U=n("51ff");(B=U).keys().map(B);d=n("85b3"),m=n.n(d),g=n("97c5"),i=n.n(g),d=n("1558"),g=n.n(d),d=n("1230"),d=n.n(d);function D(t,e){var n,r=Object.keys(t);return Object.getOwnPropertySymbols&&(n=Object.getOwnPropertySymbols(t),e&&(n=n.filter(function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable})),r.push.apply(r,n)),r}function G(t){for(var e=1;e<arguments.length;e++){var n=null!=arguments[e]?arguments[e]:{};e%2?D(Object(n),!0).forEach(function(e){b(t,e,n[e])}):Object.getOwnPropertyDescriptors?Object.defineProperties(t,Object.getOwnPropertyDescriptors(n)):D(Object(n)).forEach(function(e){Object.defineProperty(t,e,Object.getOwnPropertyDescriptor(n,e))})}return t}a.a.use(m.a);var d={en:G(G(G({},{main:{name:"name",myMessage:"Notifiction",settings:"Setting",logOut:"Log Out",lastLoginTime:"Last Login Time",lastLoginIp:"Last Login IP",myPower:"My Right",seeDetails:"Check The Details",adminUserTotalNum:"AdminUsers",regUserTotalNum:"Registers",contentsTotalNum:"Contents",messagesTotalNum:"Comments",shortcutOption:"Short Cuts",addAdminUser:"Add New Admin",addContents:"Add New Contents",sourceManage:"Resource Management",systemConfigs:"System systemConfigs",databak:"Data BackUp",nearMessages:"Recent Comments",messageIn:"At",messageSaid:"Said",messageReply:"Reply",noMessages:"No Data, Right Now!",nearNewUsers:"New Rigistered Users",confirmBtnText:"Yes",cancelBtnText:"Cancel",reSetBtnText:"Reset",scr_modal_title:"Hints",scr_modal_del_succes_info:"Delete Succeeded",scr_modal_del_error_info:"Cancelled Delete",form_btnText_update:"Update",form_btnText_save:"Save",radioOn:"Yes",radioOff:"No",updateSuccess:"Update Succeeded",addSuccess:"Add Succeeded",dataTableOptions:"Operate",del_notice:"Do you want delete the records?",just_del_notice:"You'll delete this records forever, continue??",install_notice:"Are you sure you want to install the plug-in?",uninstall_notice:"Are you sure you want to uninstall the plug-in?",update_notice:"Are you sure you want to update the plug-in?",comments_label:"Note",sort_label:"Sort",ask_select_label:"Please Choose",target_Item:"Specify The Target",confirm_logout:"Are you usre you want to exit?",login_timeout:"Your login has time out",server_error_notice:"Server connection exception, please try again later.",re_login:"Re-Login",addNew:"Add",modify:"Edit",back:"Return",post:"Publish",nopage:"The page you're trying visit not exist or you don't have the right",close_modal:"Close",askForReInputContent:"Found that you have unsaved documents, do you load them?",cancelReInputContent:"Load has been cancelled and data has been cleared.",noModifyPasswordTips:"Leave it blank if you don't change your password"},validate:{inputNull:"Please Type {label}",inputCorrect:"Please Entery the Right {label}",selectNull:"Please Choose {label}",rangelength:"Length between {min} to {max} ",ranglengthandnormal:"{min} to {max} character, only letter, Number and underline available!",maxlength:"The max character {max} you can Entery",passwordnotmatching:"The password and the confirmation you typed do not match",limitUploadImgType:"Only JPG,PNG,GIF format pics are available",limitUploadImgSize:"The pics size can't exceed {size} MB",limitUploadFileType:"上传文件的格式不正确",limitUploadFileSize:"上传文件大小不能超过 {size} MB",error_params:"Illegal Parameter, please try again."}}),{versionManage:{title:"title",description:"Description",version:"Version",versionName:"Version Name",forcibly:"Force Update",url:"Url"}}),i.a),zh:G(G(G({},{main:{name:"名称",myMessage:"我的消息",settings:"设置",logOut:"退出登录",lastLoginTime:"上次登录时间",lastLoginIp:"上次登录IP",myPower:"我的权限",seeDetails:"查看",adminUserTotalNum:"管理员总数",regUserTotalNum:"注册用户",contentsTotalNum:"文档总数",messagesTotalNum:"留言总数",shortcutOption:"快捷操作",addAdminUser:"添加管理员",addContents:"添加文档",sourceManage:"资源管理",systemConfigs:"系统配置",databak:"数据备份",nearMessages:"近期评论",messageIn:"在",messageSaid:"说",messageReply:"回复",noMessages:"暂无数据",nearNewUsers:"新注册用户",confirmBtnText:"确定",cancelBtnText:"取消",reSetBtnText:"重置",scr_modal_title:"提示",scr_modal_del_succes_info:"删除成功",scr_modal_del_error_info:"已取消删除",form_btnText_update:"更新",form_btnText_save:"保存",radioOn:"是",radioOff:"否",updateSuccess:"更新成功",addSuccess:"添加成功",dataTableOptions:"操作",del_notice:"您确认要删除吗?",just_del_notice:"此操作将永久删除该条记录, 是否继续?",install_notice:"您确认要安装该插件吗?",uninstall_notice:"卸载插件会影响到您当前系统相关功能的使用,您确认要执行该操作吗?",update_notice:"您确认要升级该插件吗?",comments_label:"备注",sort_label:"排序",ask_select_label:"请选择",target_Item:"指定目标",confirm_logout:"确认退出吗?",login_timeout:"您的登录已超时!",server_error_notice:"服务异常,请稍后再试",re_login:"重新登录",addNew:"添加",modify:"编辑",back:"返回",post:"发布",nopage:"您访问的页面不存在或者您没有权限访问该模块",close_modal:"关闭",askForReInputContent:"发现您有未保存的文档,是否载入?",cancelReInputContent:"已取消载入并清除数据",noModifyPasswordTips:"不修改密码请留空"},validate:{inputNull:"请输入 {label}",inputCorrect:"请输入正确的 {label}",selectNull:"请选择 {label}",rangelength:"输入长度在 {min} 到 {max} 之间",ranglengthandnormal:"{min} 到 {max} 位,只能包含字母、数字和下划线!",maxlength:"最多可以输入 {max} 个字符",passwordnotmatching:"两次输入密码不一致",limitUploadImgType:"上传图片只能是 JPG,PNG,GIF 格式",limitUploadImgSize:"上传图片大小不能超过 {size} MB",limitUploadFileType:"上传文件的格式不正确",limitUploadFileSize:"上传文件大小不能超过 {size} MB",error_params:"参数非法,请重新操作"}}),{versionManage:{title:"标题",description:"描述",version:"版本号",versionName:"版本名称",forcibly:"是否强制更新",url:"更新地址"}}),g.a),ja:G(G(G({},{main:{name:"名称",myMessage:"我的消息",settings:"设置",logOut:"退出登录",lastLoginTime:"上次登录时间",lastLoginIp:"上次登录IP",myPower:"我的权限",seeDetails:"查看",adminUserTotalNum:"管理员总数",regUserTotalNum:"注册用户",contentsTotalNum:"文档总数",messagesTotalNum:"留言总数",shortcutOption:"快捷操作",addAdminUser:"添加管理员",addContents:"添加文档",sourceManage:"资源管理",systemConfigs:"系统配置",databak:"数据备份",nearMessages:"近期评论",messageIn:"在",messageSaid:"说",messageReply:"回复",noMessages:"暂无数据",nearNewUsers:"新注册用户",confirmBtnText:"确定",cancelBtnText:"取消",reSetBtnText:"重置",scr_modal_title:"提示",scr_modal_del_succes_info:"删除成功",scr_modal_del_error_info:"已取消删除",form_btnText_update:"更新",form_btnText_save:"保存",radioOn:"是",radioOff:"否",updateSuccess:"更新成功",addSuccess:"添加成功",dataTableOptions:"操作",del_notice:"您确认要删除吗?",just_del_notice:"此操作将永久删除该条记录, 是否继续?",install_notice:"您确认要安装该插件吗?",uninstall_notice:"卸载插件会影响到您当前系统相关功能的使用,您确认要执行该操作吗?",update_notice:"您确认要升级该插件吗?",comments_label:"备注",sort_label:"排序",ask_select_label:"请选择",target_Item:"指定目标",confirm_logout:"确认退出吗?",login_timeout:"您的登录已超时!",server_error_notice:"服务异常,请稍后再试",re_login:"重新登录",addNew:"添加",modify:"编辑",back:"返回",post:"发布",nopage:"您访问的页面不存在或者您没有权限访问该模块",close_modal:"关闭",askForReInputContent:"发现您有未保存的文档,是否载入?",cancelReInputContent:"已取消载入并清除数据",noModifyPasswordTips:"不修改密码请留空"},validate:{inputNull:"请输入 {label}",inputCorrect:"请输入正确的 {label}",selectNull:"请选择 {label}",rangelength:"输入长度在 {min} 到 {max} 之间",ranglengthandnormal:"{min} 到 {max} 位,只能包含字母、数字和下划线!",maxlength:"最多可以输入 {max} 个字符",passwordnotmatching:"两次输入密码不一致",limitUploadImgType:"上传图片只能是 JPG,PNG,GIF 格式",limitUploadImgSize:"上传图片大小不能超过 {size} MB",limitUploadFileType:"上传文件的格式不正确",limitUploadFileSize:"上传文件大小不能超过 {size} MB",error_params:"参数非法,请重新操作"}}),{versionManage:{title:"标题",description:"描述",version:"版本号",versionName:"版本名称",forcibly:"是否强制更新",url:"更新地址"}}),d.a)},z=new m.a({locale:u.a.get("language")||"zh",messages:d});n("8dee");function Z(e,t){return e?e.replace(/[\u0391-\uFFE5]/g,"aa").length>t?e.substring(0,t)+"...":e:""}a.a.config.productionTip=!1,a.a.use(l.a,{size:u.a.get("size")||"medium",i18n:function(e,t){return z.t(e,t)}}),Object.keys(r).forEach(function(e){a.a.filter(e,r[e])});var H=o()({Vue:a.a,appOptions:{render:function(e){return e(f)},router:t,store:A,i18n:z}}),K=[H.bootstrap];function V(e){return H.mount(e)}var q=[H.unmount]},5880:function(e,t){e.exports=u},5925:function(e,t,n){"use strict";function s(e,t){for(var n=[],r={},o=0;o<t.length;o++){var a=t[o],i=a[0],a={id:e+":"+o,css:a[1],media:a[2],sourceMap:a[3]};r[i]?r[i].parts.push(a):n.push(r[i]={id:i,parts:[a]})}return n}n.r(t),n.d(t,"default",function(){return o});t="undefined"!=typeof document;if("undefined"!=typeof DEBUG&&DEBUG&&!t)throw new Error("vue-style-loader cannot be used in a non-browser environment. Use { target: 'node' } in your Webpack config to indicate a server-rendering environment.");var l={},r=t&&(document.head||document.getElementsByTagName("head")[0]),a=null,i=0,c=!1,u=function(){},f=null,d="data-vue-ssr-id",p="undefined"!=typeof navigator&&/msie [6-9]\b/.test(navigator.userAgent.toLowerCase());function o(i,e,t,n){c=t,f=n||{};var u=s(i,e);return m(u),function(e){for(var t=[],n=0;n<u.length;n++){var r=u[n];(o=l[r.id]).refs--,t.push(o)}e?m(u=s(i,e)):u=[];for(var o,n=0;n<t.length;n++)if(0===(o=t[n]).refs){for(var a=0;a<o.parts.length;a++)o.parts[a]();delete l[o.id]}}}function m(e){for(var t=0;t<e.length;t++){var n=e[t],r=l[n.id];if(r){r.refs++;for(var o=0;o<r.parts.length;o++)r.parts[o](n.parts[o]);for(;o<n.parts.length;o++)r.parts.push(g(n.parts[o]));r.parts.length>n.parts.length&&(r.parts.length=n.parts.length)}else{for(var a=[],o=0;o<n.parts.length;o++)a.push(g(n.parts[o]));l[n.id]={id:n.id,refs:1,parts:a}}}}function v(){var e=document.createElement("style");return e.type="text/css",r.appendChild(e),e}function g(t){var e,n,r,o=document.querySelector("style["+d+'~="'+t.id+'"]');if(o){if(c)return u;o.parentNode.removeChild(o)}return r=p?(e=i++,o=a=a||v(),n=y.bind(null,o,e,!1),y.bind(null,o,e,!0)):(o=v(),n=function(e,t){var n=t.css,r=t.media,o=t.sourceMap;r&&e.setAttribute("media",r);f.ssrId&&e.setAttribute(d,t.id);o&&(n+="\n/*# sourceURL="+o.sources[0]+" */",n+="\n/*# sourceMappingURL=data:application/json;base64,"+btoa(unescape(encodeURIComponent(JSON.stringify(o))))+" */");if(e.styleSheet)e.styleSheet.cssText=n;else{for(;e.firstChild;)e.removeChild(e.firstChild);e.appendChild(document.createTextNode(n))}}.bind(null,o),function(){o.parentNode.removeChild(o)}),n(t),function(e){e?e.css===t.css&&e.media===t.media&&e.sourceMap===t.sourceMap||n(t=e):r()}}var h,b=(h=[],function(e,t){return h[e]=t,h.filter(Boolean).join("\n")});function y(e,t,n,r){n=n?"":r.css;e.styleSheet?e.styleSheet.cssText=b(t,n):(r=document.createTextNode(n),(n=e.childNodes)[t]&&e.removeChild(n[t]),n.length?e.insertBefore(r,n[t]):e.appendChild(r))}},"597a":function(e,t,n){var r=n("970b"),o=n("4a92"),a=n("5d61"),i=Object.defineProperty;t.f=n("5e9e")?Object.defineProperty:function(e,t,n){if(r(e),t=a(t,!0),r(n),o)try{return i(e,t,n)}catch(e){}if("get"in n||"set"in n)throw TypeError("Accessors not supported!");return"value"in n&&(e[t]=n.value),e}},"5d10":function(e,t,n){var o=n("9cff");e.exports=function(e,t){if(!o(e))return e;var n,r;if(t&&"function"==typeof(n=e.toString)&&!o(r=n.call(e)))return r;if("function"==typeof(n=e.valueOf)&&!o(r=n.call(e)))return r;if(!t&&"function"==typeof(n=e.toString)&&!o(r=n.call(e)))return r;throw TypeError("Can't convert object to primitive value")}},"5d25":function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){return(0,r.default)(e),(0,o.default)(i,e.toUpperCase())};var r=a(n("d076")),o=a(n("eec2"));function a(e){return e&&e.__esModule?e:{default:e}}var i=["AD","AE","AF","AG","AI","AL","AM","AO","AQ","AR","AS","AT","AU","AW","AX","AZ","BA","BB","BD","BE","BF","BG","BH","BI","BJ","BL","BM","BN","BO","BQ","BR","BS","BT","BV","BW","BY","BZ","CA","CC","CD","CF","CG","CH","CI","CK","CL","CM","CN","CO","CR","CU","CV","CW","CX","CY","CZ","DE","DJ","DK","DM","DO","DZ","EC","EE","EG","EH","ER","ES","ET","FI","FJ","FK","FM","FO","FR","GA","GB","GD","GE","GF","GG","GH","GI","GL","GM","GN","GP","GQ","GR","GS","GT","GU","GW","GY","HK","HM","HN","HR","HT","HU","ID","IE","IL","IM","IN","IO","IQ","IR","IS","IT","JE","JM","JO","JP","KE","KG","KH","KI","KM","KN","KP","KR","KW","KY","KZ","LA","LB","LC","LI","LK","LR","LS","LT","LU","LV","LY","MA","MC","MD","ME","MF","MG","MH","MK","ML","MM","MN","MO","MP","MQ","MR","MS","MT","MU","MV","MW","MX","MY","MZ","NA","NC","NE","NF","NG","NI","NL","NO","NP","NR","NU","NZ","OM","PA","PE","PF","PG","PH","PK","PL","PM","PN","PR","PS","PT","PW","PY","QA","RE","RO","RS","RU","RW","SA","SB","SC","SD","SE","SG","SH","SI","SJ","SK","SL","SM","SN","SO","SR","SS","ST","SV","SX","SY","SZ","TC","TD","TF","TG","TH","TJ","TK","TL","TM","TN","TO","TR","TT","TV","TW","TZ","UA","UG","UM","US","UY","UZ","VA","VC","VE","VG","VI","VN","VU","WF","WS","YE","YT","ZA","ZM","ZW"];e.exports=t.default,e.exports.default=t.default},"5d61":function(e,t,n){var o=n("0677");e.exports=function(e,t){if(!o(e))return e;var n,r;if(t&&"function"==typeof(n=e.toString)&&!o(r=n.call(e)))return r;if("function"==typeof(n=e.valueOf)&&!o(r=n.call(e)))return r;if(!t&&"function"==typeof(n=e.toString)&&!o(r=n.call(e)))return r;throw TypeError("Can't convert object to primitive value")}},"5e9e":function(e,t,n){e.exports=!n("99fe")(function(){return 7!=Object.defineProperty({},"a",{get:function(){return 7}}).a})},"5edc":function(e,t){e.exports=function(e,t){return{enumerable:!(1&e),configurable:!(2&e),writable:!(4&e),value:t}}},"5f1c":function(e,t,n){"use strict";function r(){}function f(e){var t;return!(!g(e)||"function"!=typeof(t=e.then))&&t}function o(c,t){var n;c._n||(c._n=!0,n=c._c,M(function(){for(var s=c._v,l=1==c._s,e=0;n.length>e;)!function(e){var t,n,r,o=l?e.ok:e.fail,a=e.resolve,i=e.reject,u=e.domain;try{o?(l||(2==c._h&&R(c),c._h=1),!0===o?t=s:(u&&u.enter(),t=o(s),u&&(u.exit(),r=!0)),t===e.promise?i(P("Promise-chain cycle")):(n=f(t))?n.call(t,a,i):a(t)):i(s)}catch(e){u&&!r&&u.exit(),i(e)}}(n[e++]);c._c=[],c._n=!1,t&&!c._h&&F(c)}))}function a(e){var t=this;t._d||(t._d=!0,(t=t._w||t)._v=e,t._s=2,t._a||(t._a=t._c.slice()),o(t,!0))}var i,u,s,l,c=n("3d8a"),d=n("0b34"),p=n("1e4d"),m=n("d445"),v=n("e99b"),g=n("9cff"),h=n("3250"),b=n("8b5a"),y=n("2b37"),_=n("1b0b"),x=n("edec").set,M=n("2d39")(),w=n("d4c9"),O=n("fb49"),S=n("aeb8"),A=n("4836"),E="Promise",P=d.TypeError,$=d.process,T=$&&$.versions,C=T&&T.v8||"",j=d[E],N="process"==m($),k=u=w.f,m=!!function(){try{var e=j.resolve(1),t=(e.constructor={})[n("839a")("species")]=function(e){e(r,r)};return(N||"function"==typeof PromiseRejectionEvent)&&e.then(r)instanceof t&&0!==C.indexOf("6.6")&&-1===S.indexOf("Chrome/66")}catch(e){}}(),F=function(o){x.call(d,function(){var e,t,n=o._v,r=I(o);if(r&&(e=O(function(){N?$.emit("unhandledRejection",n,o):(t=d.onunhandledrejection)?t({promise:o,reason:n}):(t=d.console)&&t.error&&t.error("Unhandled promise rejection",n)}),o._h=N||I(o)?2:1),o._a=void 0,r&&e.e)throw e.v})},I=function(e){return 1!==e._h&&0===(e._a||e._c).length},R=function(t){x.call(d,function(){var e;N?$.emit("rejectionHandled",t):(e=d.onrejectionhandled)&&e({promise:t,reason:t._v})})},L=function(e){var n,r=this;if(!r._d){r._d=!0,r=r._w||r;try{if(r===e)throw P("Promise can't be resolved itself");(n=f(e))?M(function(){var t={_w:r,_d:!1};try{n.call(e,p(L,t,1),p(a,t,1))}catch(e){a.call(t,e)}}):(r._v=e,r._s=1,o(r,!1))}catch(e){a.call({_w:r,_d:!1},e)}}};m||(j=function(e){b(this,j,E,"_h"),h(e),i.call(this);try{e(p(L,this,1),p(a,this,1))}catch(e){a.call(this,e)}},(i=function(e){this._c=[],this._a=void 0,this._s=0,this._d=!1,this._v=void 0,this._h=0,this._n=!1}).prototype=n("6f45")(j.prototype,{then:function(e,t){var n=k(_(this,j));return n.ok="function"!=typeof e||e,n.fail="function"==typeof t&&t,n.domain=N?$.domain:void 0,this._c.push(n),this._a&&this._a.push(n),this._s&&o(this,!1),n.promise},catch:function(e){return this.then(void 0,e)}}),s=function(){var e=new i;this.promise=e,this.resolve=p(L,e,1),this.reject=p(a,e,1)},w.f=k=function(e){return e===j||e===l?new s:u(e)}),v(v.G+v.W+v.F*!m,{Promise:j}),n("bac3")(j,E),n("f966")(E),l=n("76e3")[E],v(v.S+v.F*!m,E,{reject:function(e){var t=k(this);return(0,t.reject)(e),t.promise}}),v(v.S+v.F*(c||!m),E,{resolve:function(e){return A(c&&this===l?j:this,e)}}),v(v.S+v.F*!(m&&n("1a9a")(function(e){j.all(e).catch(r)})),E,{all:function(e){var i=this,t=k(i),u=t.resolve,s=t.reject,n=O(function(){var r=[],o=0,a=1;y(e,!1,function(e){var t=o++,n=!1;r.push(void 0),a++,i.resolve(e).then(function(e){n||(n=!0,r[t]=e,--a||u(r))},s)}),--a||u(r)});return n.e&&s(n.v),t.promise},race:function(e){var t=this,n=k(t),r=n.reject,o=O(function(){y(e,!1,function(e){t.resolve(e).then(n.resolve,r)})});return o.e&&r(o.v),n.promise}})},"5f72":function(e,t){e.exports=o},6071:function(e,t,n){"use strict";function r(e){return(r="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){"object"===r(e)&&null!==e?e="function"==typeof e.toString?e.toString():"[object Object]":(null==e||isNaN(e)&&!e.length)&&(e="");return String(e)},e.exports=t.default,e.exports.default=t.default},"60bb":function(e,t){e.exports=a},6389:function(e,t){e.exports=s},6698:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){return(0,o.default)(e),new RegExp("^[a-f0-9]{".concat(a[t],"}$")).test(e)};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};var a={md5:32,md4:32,sha1:40,sha256:64,sha384:96,sha512:128,ripemd128:32,ripemd160:40,tiger128:32,tiger160:40,tiger192:48,crc32:8,crc32b:8};e.exports=t.default,e.exports.default=t.default},6894:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){return(0,o.default)(e),a.test(e)};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};var a=/[^\x00-\x7F]/;e.exports=t.default,e.exports.default=t.default},"68fa":function(e,t,n){"use strict";n("3b2c")},"690e":function(e,t){e.exports=function(n){var i=[];return i.toString=function(){return this.map(function(e){var t=function(e,t){var n=e[1]||"",r=e[3];if(!r)return n;if(t&&"function"==typeof btoa){e=function(e){return"/*# sourceMappingURL=data:application/json;charset=utf-8;base64,"+btoa(unescape(encodeURIComponent(JSON.stringify(e))))+" */"}(r),t=r.sources.map(function(e){return"/*# sourceURL="+r.sourceRoot+e+" */"});return[n].concat(t).concat([e]).join("\n")}return[n].join("\n")}(e,n);return e[2]?"@media "+e[2]+"{"+t+"}":t}).join("")},i.i=function(e,t){"string"==typeof e&&(e=[[null,e,""]]);for(var n={},r=0;r<this.length;r++){var o=this[r][0];"number"==typeof o&&(n[o]=!0)}for(r=0;r<e.length;r++){var a=e[r];"number"==typeof a[0]&&n[a[0]]||(t&&!a[2]?a[2]=t:t&&(a[2]="("+a[2]+") and ("+t+")"),i.push(a))}},i}},"69e5":function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function e(t){var n=1<arguments.length&&void 0!==arguments[1]?arguments[1]:"";(0,s.default)(t);n=String(n);{if(!n)return e(t,4)||e(t,6);if("4"===n){if(!l.test(t))return!1;var r=t.split(".").sort(function(e,t){return e-t});return r[3]<=255}if("6"===n){var o=t.split(":"),a=!1,i=e(o[o.length-1],4),n=i?7:8;if(o.length>n)return!1;if("::"===t)return!0;"::"===t.substr(0,2)?(o.shift(),o.shift(),a=!0):"::"===t.substr(t.length-2)&&(o.pop(),o.pop(),a=!0);for(var u=0;u<o.length;++u)if(""===o[u]&&0<u&&u<o.length-1){if(a)return!1;a=!0}else if(!(i&&u===o.length-1||c.test(o[u])))return!1;return a?1<=o.length:o.length===n}}return!1};var r,s=(r=n("d076"))&&r.__esModule?r:{default:r};var l=/^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/,c=/^[0-9A-F]{1,4}$/i;e.exports=t.default,e.exports.default=t.default},"6ab3":function(e,t,n){(t=e.exports=n("690e")(!1)).push([e.i,'.fade-enter-active,.fade-leave-active{-webkit-transition:opacity .28s;transition:opacity .28s}.fade-enter,.fade-leave-active{opacity:0}.fade-transform-enter-active,.fade-transform-leave-active{-webkit-transition:all .5s;transition:all .5s}.fade-transform-enter{opacity:0;-webkit-transform:translateX(-30px);transform:translateX(-30px)}.fade-transform-leave-to{opacity:0;-webkit-transform:translateX(30px);transform:translateX(30px)}.breadcrumb-enter-active,.breadcrumb-leave-active{-webkit-transition:all .5s;transition:all .5s}.breadcrumb-enter,.breadcrumb-leave-active{opacity:0;-webkit-transform:translateX(20px);transform:translateX(20px)}.breadcrumb-move{-webkit-transition:all .5s;transition:all .5s}.breadcrumb-leave-active{position:absolute}.el-breadcrumb__inner,.el-breadcrumb__inner a{font-weight:400!important}.el-upload input[type=file]{display:none!important}.el-upload__input{display:none}.el-dialog{-webkit-transform:none;transform:none;left:0;position:relative;margin:0 auto}.upload-container .el-upload{width:100%}.upload-container .el-upload .el-upload-dragger{width:100%;height:200px}.el-dropdown-menu a{display:block}.dr-toolbar{margin:10px auto;height:30px;background-color:#fff}.dr-toolbar .option-button{text-align:left}.dr-datatable{background-color:#fff;margin:0 15px 15px}.dr-searchInput{min-width:180px!important;margin-right:10px}.dr-searchInput,.dr-select-box{display:inline-block}.dr-toolbar-right{width:100%;display:block;text-align:right}.el-button--small{padding:7px 7px!important}.el-button--mini{padding:7px!important}.el-input-number--small{line-height:32px!important}.el-table a:link,.el-table a:visited{color:#5a5e66;text-decoration:none}.el-card__header{padding:10px 10px}.dr-datatable{padding:15px}.dash-box{background:#fff;-webkit-box-shadow:4px 4px 40px rgba(0,0,0,.05);box-shadow:4px 4px 40px rgba(0,0,0,.05);border-color:rgba(0,0,0,.05)}.dash-box .dash-title{font-size:16px;color:rgba(0,0,0,.45);margin:0;padding:15px;font-weight:400;border-bottom:1px solid #eee;background:rgba(0,0,0,.003);-webkit-box-shadow:inset 0 -2px 1px rgba(0,0,0,.03);box-shadow:inset 0 -2px 1px rgba(0,0,0,.03)}.dash-box .dash-content{padding:15px}@media screen and (max-width:768px){.el-dialog{width:90%!important}.el-message-box{width:80%!important}}#versionManage-app .main-container{min-height:100%;-webkit-transition:margin-left .28s;transition:margin-left .28s;margin-left:210px;position:relative}#versionManage-app .hideSidebar .main-container{margin-left:54px}#versionManage-app .mobile .main-container{margin-left:0}#versionManage-app .withoutAnimation .main-container,#versionManage-app .withoutAnimation .sidebar-container{-webkit-transition:none;transition:none}body{height:100%;-moz-osx-font-smoothing:grayscale;-webkit-font-smoothing:antialiased;text-rendering:optimizeLegibility;font-family:Helvetica Neue,Helvetica,PingFang SC,Hiragino Sans GB,Microsoft YaHei,Arial,sans-serif}label{font-weight:700}html{-webkit-box-sizing:border-box;box-sizing:border-box}#navbar-app,html{height:100%}*,:after,:before{-webkit-box-sizing:inherit;box-sizing:inherit}a:active,a:focus{outline:none}a,a:focus,a:hover{cursor:pointer;color:inherit;text-decoration:none}div:focus{outline:none}.clearfix:after{visibility:hidden;display:block;font-size:0;content:" ";clear:both;height:0}.app-container{padding:20px}',""]),t.locals={menuText:"#bfcbd9",menuActiveText:"#409eff",subMenuActiveText:"#f4f4f5",menuBg:"#304156",menuHover:"#263445",subMenuBg:"#1f2d3d",subMenuHover:"#001528",sideBarWidth:"210px"}},"6ba0":function(e,t,n){var r=n("e99b");r(r.S+r.F,"Object",{assign:n("9f15")})},"6bac":function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){if((0,i.default)(e),!u.test(e))return!1;for(var t,n=e.replace(/[A-Z]/g,function(e){return parseInt(e,36)}),r=0,o=!0,a=n.length-2;0<=a;a--)t=n.substring(a,a+1),t=parseInt(t,10),r+=o&&10<=(t*=2)?t+1:t,o=!o;return parseInt(e.substr(e.length-1),10)===(1e4-r)%10};var r,i=(r=n("d076"))&&r.__esModule?r:{default:r};var u=/^[A-Z]{2}[0-9A-Z]{9}[0-9]$/;e.exports=t.default,e.exports.default=t.default},"6bf8":function(e,t,n){"use strict";var r=n("a86f");e.exports=function(){var e=r(this),t="";return e.global&&(t+="g"),e.ignoreCase&&(t+="i"),e.multiline&&(t+="m"),e.unicode&&(t+="u"),e.sticky&&(t+="y"),t}},"6d46":function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){return(0,o.default)(e),parseFloat(e)};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};e.exports=t.default,e.exports.default=t.default},"6d97":function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(){var e,t=0<arguments.length&&void 0!==arguments[0]?arguments[0]:{},n=1<arguments.length?arguments[1]:void 0;for(e in n)void 0===t[e]&&(t[e]=n[e]);return t},e.exports=t.default,e.exports.default=t.default},"6e69":function(e,t,n){n.p=window.getPublicPath("versionManage")},"6f45":function(e,t,n){var o=n("84e8");e.exports=function(e,t,n){for(var r in t)o(e,r,t[r],n);return e}},"6fef":function(e,t){e.exports=function(e,t){return{value:t,done:!!e}}},7289:function(t,e,n){!function(e){t.exports=function(){"use strict";function r(e){return(r="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(t,e){var n=Object.keys(t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(t);e&&(r=r.filter(function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable})),n.push.apply(n,r)}return n}function s(t){for(var e=1;e<arguments.length;e++){var n=null!=arguments[e]?arguments[e]:{};e%2?a(Object(n),!0).forEach(function(e){o(t,e,n[e])}):Object.getOwnPropertyDescriptors?Object.defineProperties(t,Object.getOwnPropertyDescriptors(n)):a(Object(n)).forEach(function(e){Object.defineProperty(t,e,Object.getOwnPropertyDescriptor(n,e))})}return t}var i="undefined"!=typeof globalThis?globalThis:"undefined"!=typeof window?window:"undefined"!=typeof e?e:"undefined"!=typeof self?self:{};!function(e,t){e(t={exports:{}},t.exports)}(function(e,t){var n;n=i,e.exports=function(e){if(e.CSS&&e.CSS.escape)return e.CSS.escape;var t=function(e){if(0==arguments.length)throw new TypeError("`CSS.escape` requires an argument.");for(var t,n=String(e),r=n.length,o=-1,a="",i=n.charCodeAt(0);++o<r;)0!=(t=n.charCodeAt(o))?a+=t>=1&&t<=31||127==t||0==o&&t>=48&&t<=57||1==o&&t>=48&&t<=57&&45==i?"\\"+t.toString(16)+" ":0==o&&1==r&&45==t||!(t>=128||45==t||95==t||t>=48&&t<=57||t>=65&&t<=90||t>=97&&t<=122)?"\\"+n.charAt(o):n.charAt(o):a+="�";return a};return e.CSS||(e.CSS={}),e.CSS.escape=t,t}(n)});var u={appOptions:null,template:null,Vue:null,createApp:null,handleInstance:null};function l(t){return t.loadRootComponent?t.loadRootComponent().then(function(e){return t.rootComponent=e}):Promise.resolve()}function c(o,a,i){var u=a[i.name];return Promise.resolve().then(function(){if(u)u.vueInstance.$el.style.display="block";else{u={};var e,t=s({},o.appOptions);if(i.domElement&&!t.el&&(t.el=i.domElement),t.el)if("string"==typeof t.el){if(!(e=document.querySelector(t.el)))throw Error("If appOptions.el is provided to single-spa-vue, the dom element must\n exist in the dom. Was provided as ".concat(t.el))}else e=t.el;else{var n="single-spa-application:".concat(i.name);t.el="#".concat(CSS.escape(n)),(e=document.getElementById(n))||((e=document.createElement("div")).id=n,document.body.appendChild(e))}if(t.el=t.el+" .single-spa-container",!e.querySelector(".single-spa-container")){var r=document.createElement("div");r.className="single-spa-container",e.appendChild(r)}u.domEl=e,t.render||t.template||!o.rootComponent||(t.render=function(e){return e(o.rootComponent)}),t.data||(t.data={}),t.data=s(s({},t.data),i),u.vueInstance=new o.Vue(t),u.vueInstance.bind&&(u.vueInstance=u.vueInstance.bind(u.vueInstance)),a[i.name]=u}return u.vueInstance})}function f(r,o,a){return Promise.resolve().then(function(){var e=o[a.name],t=s(s({},r.appOptions.data||{}),a);for(var n in t)e.vueInstance[n]=t[n]})}function d(e,t,n){return Promise.resolve().then(function(){t[n.name].vueInstance.$el.style.display="none"})}return function(e){if("object"!==r(e))throw new Error("single-spa-vue requires a configuration object");var t=s(s({},u),e);if(!t.Vue&&!t.createApp)throw Error("single-spa-vue must be passed opts.Vue or opts.createApp");if(!t.appOptions)throw Error("single-spa-vue must be passed opts.appOptions");if(t.appOptions.el&&"string"!=typeof t.appOptions.el&&!(t.appOptions.el instanceof HTMLElement))throw Error("single-spa-vue: appOptions.el must be a string CSS selector, an HTMLElement, or not provided at all. Was given ".concat(r(t.appOptions.el)));t.createApp=t.createApp||t.Vue&&t.Vue.createApp;var n={};return{bootstrap:l.bind(null,t,n),mount:c.bind(null,t,n),unmount:d.bind(null,t,n),update:f.bind(null,t,n)}}}()}.call(this,n("2409"))},"728a":function(e,t,n){var a=n("96d8");e.exports=function(r,o,e){if(a(r),void 0===o)return r;switch(e){case 1:return function(e){return r.call(o,e)};case 2:return function(e,t){return r.call(o,e,t)};case 3:return function(e,t,n){return r.call(o,e,t,n)}}return function(){return r.apply(o,arguments)}}},"732b":function(e,t,n){var r=n("212e"),o=Math.max,a=Math.min;e.exports=function(e,t){return(e=r(e))<0?o(e+t,0):a(e,t)}},"761c":function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){return(0,r.default)(e),(0,o.default)(e)&&24===e.length};var r=a(n("d076")),o=a(n("bf02"));function a(e){return e&&e.__esModule?e:{default:e}}e.exports=t.default,e.exports.default=t.default},"76e3":function(e,t){e=e.exports={version:"2.6.12"};"number"==typeof __e&&(__e=e)},7771:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){(0,i.default)(e),(t=(0,u.default)(t,s)).allow_trailing_dot&&"."===e[e.length-1]&&(e=e.substring(0,e.length-1));for(var n=e.split("."),r=0;r<n.length;r++)if(63<n[r].length)return!1;if(t.require_tld){e=n.pop();if(!n.length||!/^([a-z\u00a1-\uffff]{2,}|xn[a-z0-9-]{2,})$/i.test(e))return!1;if(/[\s\u2002-\u200B\u202F\u205F\u3000\uFEFF\uDB40\uDC20]/.test(e))return!1}for(var o,a=0;a<n.length;a++){if(o=n[a],t.allow_underscores&&(o=o.replace(/_/g,"")),!/^[a-z\u00a1-\uffff0-9-]+$/i.test(o))return!1;if(/[\uff01-\uff5e]/.test(o))return!1;if("-"===o[0]||"-"===o[o.length-1])return!1}return!0};var i=r(n("d076")),u=r(n("6d97"));function r(e){return e&&e.__esModule?e:{default:e}}var s={require_tld:!0,allow_underscores:!1,allow_trailing_dot:!1};e.exports=t.default,e.exports.default=t.default},"7a47":function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){{if((0,o.default)(e),"[object Array]"===Object.prototype.toString.call(t)){var n=[];for(var r in t)!{}.hasOwnProperty.call(t,r)||(n[r]=(0,a.default)(t[r]));return 0<=n.indexOf(e)}if("object"===i(t))return t.hasOwnProperty(e);if(t&&"function"==typeof t.indexOf)return 0<=t.indexOf(e)}return!1};var o=r(n("d076")),a=r(n("6071"));function r(e){return e&&e.__esModule?e:{default:e}}function i(e){return(i="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}e.exports=t.default,e.exports.default=t.default},"7bb0":function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){return(0,o.default)(e),a.test(e)};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};var a=/^[\x00-\x7F]+$/;e.exports=t.default,e.exports.default=t.default},"7c2b":function(e,t,n){var v=n("a4cf"),g=n("ce99"),h=n("728a"),b=n("0cb2"),y=n("dce3"),_="prototype",x=function(e,t,n){var r,o,a,i=e&x.F,u=e&x.G,s=e&x.S,l=e&x.P,c=e&x.B,f=e&x.W,d=u?g:g[t]||(g[t]={}),p=d[_],m=u?v:s?v[t]:(v[t]||{})[_];for(r in n=u?t:n)(o=!i&&m&&void 0!==m[r])&&y(d,r)||(a=(o?m:n)[r],d[r]=u&&"function"!=typeof m[r]?n[r]:c&&o?h(a,v):f&&m[r]==a?function(r){function e(e,t,n){if(this instanceof r){switch(arguments.length){case 0:return new r;case 1:return new r(e);case 2:return new r(e,t)}return new r(e,t,n)}return r.apply(this,arguments)}return e[_]=r[_],e}(a):l&&"function"==typeof a?h(Function.call,a):a,l&&((d.virtual||(d.virtual={}))[r]=a,e&x.R&&p&&!p[r]&&b(p,r,a)))};x.F=1,x.G=2,x.S=4,x.P=8,x.B=16,x.W=32,x.U=64,x.R=128,e.exports=x},"7ee3":function(e,t,n){function r(){}var o=n("a86f"),a=n("3f9e"),i=n("065e"),u=n("3a0d")("IE_PROTO"),s="prototype",l=function(){var e=n("e8d7")("iframe"),t=i.length;for(e.style.display="none",n("bbcc").appendChild(e),e.src="javascript:",(e=e.contentWindow.document).open(),e.write("<script>document.F=Object<\/script>"),e.close(),l=e.F;t--;)delete l[s][i[t]];return l()};e.exports=Object.create||function(e,t){var n;return null!==e?(r[s]=o(e),n=new r,r[s]=null,n[u]=e):n=l(),void 0===t?n:a(n,t)}},8078:function(e,t,n){var r=n("3ab0");e.exports=function(e){return Object(r(e))}},"81e7":function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){return(0,o.default)(e),e.replace(new RegExp("[^".concat(t,"]+"),"g"),"")};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};e.exports=t.default,e.exports.default=t.default},"839a":function(e,t,n){var r=n("baa7")("wks"),o=n("d8b3"),a=n("0b34").Symbol,i="function"==typeof a;(e.exports=function(e){return r[e]||(r[e]=i&&a[e]||(i?a:o)("Symbol."+e))}).store=r},"83d3":function(e,t,n){e.exports=!n("26df")&&!n("0926")(function(){return 7!=Object.defineProperty(n("e8d7")("div"),"a",{get:function(){return 7}}).a})},"84e8":function(e,t,n){var a=n("0b34"),i=n("065d"),u=n("4fd4"),s=n("d8b3")("src"),r=n("05fd"),o="toString",l=(""+r).split(o);n("76e3").inspectSource=function(e){return r.call(e)},(e.exports=function(e,t,n,r){var o="function"==typeof n;o&&(u(n,"name")||i(n,"name",t)),e[t]!==n&&(o&&(u(n,s)||i(n,s,e[t]?""+e[t]:l.join(String(t)))),e===a?e[t]=n:r?e[t]?e[t]=n:i(e,t,n):(delete e[t],i(e,t,n)))})(Function.prototype,o,function(){return"function"==typeof this&&this[s]||r.call(this)})},8569:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){var n,r;(0,o.default)(e),r="object"===a(t)?(n=t.min||0,t.max):(n=arguments[1],arguments[2]);t=e.match(/[\uD800-\uDBFF][\uDC00-\uDFFF]/g)||[],t=e.length-t.length;return n<=t&&(void 0===r||t<=r)};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};function a(e){return(a="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}e.exports=t.default,e.exports.default=t.default},8598:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(t,e,n){if((0,o.default)(t),n&&n.strictMode&&!t.startsWith("+"))return!1;{if(Array.isArray(e))return e.some(function(e){if(a.hasOwnProperty(e)&&a[e].test(t))return!0;return!1});if(e in a)return a[e].test(t);if(!e||"any"===e){for(var r in a)if(a.hasOwnProperty(r))if(a[r].test(t))return!0;return!1}}throw new Error("Invalid locale '".concat(e,"'"))},t.locales=void 0;var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};var a={"ar-AE":/^((\+?971)|0)?5[024568]\d{7}$/,"ar-BH":/^(\+?973)?(3|6)\d{7}$/,"ar-DZ":/^(\+?213|0)(5|6|7)\d{8}$/,"ar-EG":/^((\+?20)|0)?1[0125]\d{8}$/,"ar-IQ":/^(\+?964|0)?7[0-9]\d{8}$/,"ar-JO":/^(\+?962|0)?7[789]\d{7}$/,"ar-KW":/^(\+?965)[569]\d{7}$/,"ar-SA":/^(!?(\+?966)|0)?5\d{8}$/,"ar-SY":/^(!?(\+?963)|0)?9\d{8}$/,"ar-TN":/^(\+?216)?[2459]\d{7}$/,"be-BY":/^(\+?375)?(24|25|29|33|44)\d{7}$/,"bg-BG":/^(\+?359|0)?8[789]\d{7}$/,"bn-BD":/^(\+?880|0)1[1356789][0-9]{8}$/,"cs-CZ":/^(\+?420)? ?[1-9][0-9]{2} ?[0-9]{3} ?[0-9]{3}$/,"da-DK":/^(\+?45)?\s?\d{2}\s?\d{2}\s?\d{2}\s?\d{2}$/,"de-DE":/^(\+49)?0?1(5[0-25-9]\d|6([23]|0\d?)|7([0-57-9]|6\d))\d{7}$/,"el-GR":/^(\+?30|0)?(69\d{8})$/,"en-AU":/^(\+?61|0)4\d{8}$/,"en-GB":/^(\+?44|0)7\d{9}$/,"en-GH":/^(\+233|0)(20|50|24|54|27|57|26|56|23|28)\d{7}$/,"en-HK":/^(\+?852\-?)?[456789]\d{3}\-?\d{4}$/,"en-IE":/^(\+?353|0)8[356789]\d{7}$/,"en-IN":/^(\+?91|0)?[6789]\d{9}$/,"en-KE":/^(\+?254|0)(7|1)\d{8}$/,"en-MT":/^(\+?356|0)?(99|79|77|21|27|22|25)[0-9]{6}$/,"en-MU":/^(\+?230|0)?\d{8}$/,"en-NG":/^(\+?234|0)?[789]\d{9}$/,"en-NZ":/^(\+?64|0)[28]\d{7,9}$/,"en-PK":/^((\+92)|(0092))-{0,1}\d{3}-{0,1}\d{7}$|^\d{11}$|^\d{4}-\d{7}$/,"en-RW":/^(\+?250|0)?[7]\d{8}$/,"en-SG":/^(\+65)?[89]\d{7}$/,"en-TZ":/^(\+?255|0)?[67]\d{8}$/,"en-UG":/^(\+?256|0)?[7]\d{8}$/,"en-US":/^((\+1|1)?( |-)?)?(\([2-9][0-9]{2}\)|[2-9][0-9]{2})( |-)?([2-9][0-9]{2}( |-)?[0-9]{4})$/,"en-ZA":/^(\+?27|0)\d{9}$/,"en-ZM":/^(\+?26)?09[567]\d{7}$/,"es-CL":/^(\+?56|0)[2-9]\d{1}\d{7}$/,"es-ES":/^(\+?34)?(6\d{1}|7[1234])\d{7}$/,"es-MX":/^(\+?52)?(1|01)?\d{10,11}$/,"es-PY":/^(\+?595|0)9[9876]\d{7}$/,"es-UY":/^(\+598|0)9[1-9][\d]{6}$/,"et-EE":/^(\+?372)?\s?(5|8[1-4])\s?([0-9]\s?){6,7}$/,"fa-IR":/^(\+?98[\-\s]?|0)9[0-39]\d[\-\s]?\d{3}[\-\s]?\d{4}$/,"fi-FI":/^(\+?358|0)\s?(4(0|1|2|4|5|6)?|50)\s?(\d\s?){4,8}\d$/,"fj-FJ":/^(\+?679)?\s?\d{3}\s?\d{4}$/,"fo-FO":/^(\+?298)?\s?\d{2}\s?\d{2}\s?\d{2}$/,"fr-FR":/^(\+?33|0)[67]\d{8}$/,"he-IL":/^(\+972|0)([23489]|5[012345689]|77)[1-9]\d{6}$/,"hu-HU":/^(\+?36)(20|30|70)\d{7}$/,"id-ID":/^(\+?62|0)8(1[123456789]|2[1238]|3[1238]|5[12356789]|7[78]|9[56789]|8[123456789])([\s?|\d]{5,11})$/,"it-IT":/^(\+?39)?\s?3\d{2} ?\d{6,7}$/,"ja-JP":/^(\+?81|0)[789]0[ \-]?[1-9]\d{2}[ \-]?\d{5}$/,"kk-KZ":/^(\+?7|8)?7\d{9}$/,"kl-GL":/^(\+?299)?\s?\d{2}\s?\d{2}\s?\d{2}$/,"ko-KR":/^((\+?82)[ \-]?)?0?1([0|1|6|7|8|9]{1})[ \-]?\d{3,4}[ \-]?\d{4}$/,"lt-LT":/^(\+370|8)\d{8}$/,"ms-MY":/^(\+?6?01){1}(([0145]{1}(\-|\s)?\d{7,8})|([236789]{1}(\s|\-)?\d{7}))$/,"nb-NO":/^(\+?47)?[49]\d{7}$/,"nl-BE":/^(\+?32|0)4?\d{8}$/,"nl-NL":/^(\+?31|0)6?\d{8}$/,"nn-NO":/^(\+?47)?[49]\d{7}$/,"pl-PL":/^(\+?48)? ?[5-8]\d ?\d{3} ?\d{2} ?\d{2}$/,"pt-BR":/(?=^(\+?5{2}\-?|0)[1-9]{2}\-?\d{4}\-?\d{4}$)(^(\+?5{2}\-?|0)[1-9]{2}\-?[6-9]{1}\d{3}\-?\d{4}$)|(^(\+?5{2}\-?|0)[1-9]{2}\-?9[6-9]{1}\d{3}\-?\d{4}$)/,"pt-PT":/^(\+?351)?9[1236]\d{7}$/,"ro-RO":/^(\+?4?0)\s?7\d{2}(\/|\s|\.|\-)?\d{3}(\s|\.|\-)?\d{3}$/,"ru-RU":/^(\+?7|8)?9\d{9}$/,"sl-SI":/^(\+386\s?|0)(\d{1}\s?\d{3}\s?\d{2}\s?\d{2}|\d{2}\s?\d{3}\s?\d{3})$/,"sk-SK":/^(\+?421)? ?[1-9][0-9]{2} ?[0-9]{3} ?[0-9]{3}$/,"sr-RS":/^(\+3816|06)[- \d]{5,9}$/,"sv-SE":/^(\+?46|0)[\s\-]?7[\s\-]?[02369]([\s\-]?\d){7}$/,"th-TH":/^(\+66|66|0)\d{9}$/,"tr-TR":/^(\+?90|0)?5\d{9}$/,"uk-UA":/^(\+?38|8)?0\d{9}$/,"vi-VN":/^(\+?84|0)((3([2-9]))|(5([2689]))|(7([0|6-9]))|(8([1-6|89]))|(9([0-9])))([0-9]{7})$/,"zh-CN":/^((\+|00)86)?1([358][0-9]|4[579]|6[67]|7[0135678]|9[189])[0-9]{8}$/,"zh-TW":/^(\+?886\-?|0)?9\d{8}$/};a["en-CA"]=a["en-US"],a["fr-BE"]=a["nl-BE"],a["zh-HK"]=a["en-HK"];var i=Object.keys(a);t.locales=i},"85b3":function(e,t){e.exports=n},"87b2":function(e,t,n){var r=n("839a")("unscopables"),o=Array.prototype;null==o[r]&&n("065d")(o,r,{}),e.exports=function(e){o[r][e]=!0}},"8b5a":function(e,t){e.exports=function(e,t,n,r){if(!(e instanceof t)||void 0!==r&&r in e)throw TypeError(n+": incorrect invocation!");return e}},"8bbf":function(e,t){e.exports=r},"8c18":function(e,t,n){(e.exports=n("690e")(!1)).push([e.i,"/*! normalize.css v8.0.1 | MIT License | github.com/necolas/normalize.css */html{line-height:1.15;-webkit-text-size-adjust:100%}body{margin:0}main{display:block}h1{font-size:2em;margin:.67em 0}hr{-webkit-box-sizing:content-box;box-sizing:content-box;height:0;overflow:visible}pre{font-family:monospace,monospace;font-size:1em}a{background-color:transparent}abbr[title]{border-bottom:none;text-decoration:underline;-webkit-text-decoration:underline dotted;text-decoration:underline dotted}b,strong{font-weight:bolder}code,kbd,samp{font-family:monospace,monospace;font-size:1em}small{font-size:80%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sub{bottom:-.25em}sup{top:-.5em}img{border-style:none}button,input,optgroup,select,textarea{font-family:inherit;font-size:100%;line-height:1.15;margin:0}button,input{overflow:visible}button,select{text-transform:none}[type=button],[type=reset],[type=submit],button{-webkit-appearance:button}[type=button]::-moz-focus-inner,[type=reset]::-moz-focus-inner,[type=submit]::-moz-focus-inner,button::-moz-focus-inner{border-style:none;padding:0}[type=button]:-moz-focusring,[type=reset]:-moz-focusring,[type=submit]:-moz-focusring,button:-moz-focusring{outline:1px dotted ButtonText}fieldset{padding:.35em .75em .625em}legend{-webkit-box-sizing:border-box;box-sizing:border-box;color:inherit;display:table;max-width:100%;padding:0;white-space:normal}progress{vertical-align:baseline}textarea{overflow:auto}[type=checkbox],[type=radio]{-webkit-box-sizing:border-box;box-sizing:border-box;padding:0}[type=number]::-webkit-inner-spin-button,[type=number]::-webkit-outer-spin-button{height:auto}[type=search]{-webkit-appearance:textfield;outline-offset:-2px}[type=search]::-webkit-search-decoration{-webkit-appearance:none}::-webkit-file-upload-button{-webkit-appearance:button;font:inherit}details{display:block}summary{display:list-item}[hidden],template{display:none}",""])},"8dee":function(e,t,n){"use strict";var M=n("a86f"),w=n("8078"),O=n("201c"),S=n("212e"),A=n("43ec"),E=n("f417"),P=Math.max,$=Math.min,T=Math.floor,C=/\$([$&`']|\d\d?|<[^>]*>)/g,j=/\$([$&`']|\d\d?)/g;n("c46f")("replace",2,function(o,a,_,x){return[function(e,t){var n=o(this),r=null==e?void 0:e[a];return void 0!==r?r.call(e,n,t):_.call(String(n),e,t)},function(e,t){var n=x(_,e,this,t);if(n.done)return n.value;var r=M(e),o=String(this),a="function"==typeof t;a||(t=String(t));var i,u=r.global;u&&(i=r.unicode,r.lastIndex=0);for(var s=[];;){var l=E(r,o);if(null===l)break;if(s.push(l),!u)break;""===String(l[0])&&(r.lastIndex=A(o,O(r.lastIndex),i))}for(var c,f="",d=0,p=0;p<s.length;p++){for(var l=s[p],m=String(l[0]),v=P($(S(l.index),o.length),0),g=[],h=1;h<l.length;h++)g.push(void 0===(c=l[h])?c:String(c));var b,y=l.groups,y=a?(b=[m].concat(g,v,o),void 0!==y&&b.push(y),String(t.apply(void 0,b))):function(a,i,u,s,l,e){var c=u+a.length,f=s.length,t=j;void 0!==l&&(l=w(l),t=C);return _.call(e,t,function(e,t){var n;switch(t.charAt(0)){case"$":return"$";case"&":return a;case"`":return i.slice(0,u);case"'":return i.slice(c);case"<":n=l[t.slice(1,-1)];break;default:var r=+t;if(0==r)return e;if(f<r){var o=T(r/10);return 0===o?e:o<=f?void 0===s[o-1]?t.charAt(1):s[o-1]+t.charAt(1):e}n=s[r-1]}return void 0===n?"":n})}(m,o,v,g,y,t);d<=v&&(f+=o.slice(d,v)+y,d=v+m.length)}return f+o.slice(d)}]})},"8fcc":function(e,t,n){(e.exports=n("690e")(!1)).push([e.i,".adminVersionConfig{margin-top:25px}.adminVersionConfig .line-gate{overflow:hidden;color:#606266;-webkit-transition:height .2s;transition:height .2s;font-size:16px;padding:10px 0;width:100%;border-bottom:1px solid #eee}",""])},"93ca":function(e,t,n){var r=n("3a4c"),o=n("065e");e.exports=Object.keys||function(e){return r(e,o)}},"953d":function(e,t){e.exports={}},"96d8":function(e,t){e.exports=function(e){if("function"!=typeof e)throw TypeError(e+" is not a function!");return e}},"970b":function(e,t,n){var r=n("0677");e.exports=function(e){if(!r(e))throw TypeError(e+" is not an object!");return e}},9758:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){return(0,o.default)(e),e=Date.parse(e),isNaN(e)?null:new Date(e)};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};e.exports=t.default,e.exports.default=t.default},"97c5":function(e,t,n){"use strict";t.__esModule=!0,t.default={el:{colorpicker:{confirm:"OK",clear:"Clear"},datepicker:{now:"Now",today:"Today",cancel:"Cancel",clear:"Clear",confirm:"OK",selectDate:"Select date",selectTime:"Select time",startDate:"Start Date",startTime:"Start Time",endDate:"End Date",endTime:"End Time",prevYear:"Previous Year",nextYear:"Next Year",prevMonth:"Previous Month",nextMonth:"Next Month",year:"",month1:"January",month2:"February",month3:"March",month4:"April",month5:"May",month6:"June",month7:"July",month8:"August",month9:"September",month10:"October",month11:"November",month12:"December",week:"week",weeks:{sun:"Sun",mon:"Mon",tue:"Tue",wed:"Wed",thu:"Thu",fri:"Fri",sat:"Sat"},months:{jan:"Jan",feb:"Feb",mar:"Mar",apr:"Apr",may:"May",jun:"Jun",jul:"Jul",aug:"Aug",sep:"Sep",oct:"Oct",nov:"Nov",dec:"Dec"}},select:{loading:"Loading",noMatch:"No matching data",noData:"No data",placeholder:"Select"},cascader:{noMatch:"No matching data",loading:"Loading",placeholder:"Select",noData:"No data"},pagination:{goto:"Go to",pagesize:"/page",total:"Total {total}",pageClassifier:""},messagebox:{title:"Message",confirm:"OK",cancel:"Cancel",error:"Illegal input"},upload:{deleteTip:"press delete to remove",delete:"Delete",preview:"Preview",continue:"Continue"},table:{emptyText:"No Data",confirmFilter:"Confirm",resetFilter:"Reset",clearFilter:"All",sumText:"Sum"},tree:{emptyText:"No Data"},transfer:{noMatch:"No matching data",noData:"No data",titles:["List 1","List 2"],filterPlaceholder:"Enter keyword",noCheckedFormat:"{total} items",hasCheckedFormat:"{checked}/{total} checked"},image:{error:"FAILED"},pageHeader:{title:"Back"},popconfirm:{confirmButtonText:"Yes",cancelButtonText:"No"}}}},"99fe":function(e,t){e.exports=function(e){try{return!!e()}catch(e){return!0}}},"9cff":function(e,t){e.exports=function(e){return"object"==typeof e?null!==e:"function"==typeof e}},"9dba":function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){var t=1<arguments.length&&void 0!==arguments[1]?arguments[1]:{};(0,u.default)(e);var n=s;if(n=t.require_hyphen?n.replace("?",""):n,!(n=t.case_sensitive?new RegExp(n):new RegExp(n,"i")).test(e))return!1;for(var r=e.replace("-","").toUpperCase(),o=0,a=0;a<r.length;a++){var i=r[a];o+=("X"===i?10:+i)*(8-a)}return o%11==0};var r,u=(r=n("d076"))&&r.__esModule?r:{default:r};var s="^\\d{4}-?\\d{3}[\\dX]$";e.exports=t.default,e.exports.default=t.default},"9f15":function(e,t,n){"use strict";var d=n("26df"),p=n("93ca"),m=n("0c29"),v=n("35d4"),g=n("8078"),h=n("1b96"),o=Object.assign;e.exports=!o||n("0926")(function(){var e={},t={},n=Symbol(),r="abcdefghijklmnopqrst";return e[n]=7,r.split("").forEach(function(e){t[e]=e}),7!=o({},e)[n]||Object.keys(o({},t)).join("")!=r})?function(e,t){for(var n=g(e),r=arguments.length,o=1,a=m.f,i=v.f;o<r;)for(var u,s=h(arguments[o++]),l=a?p(s).concat(a(s)):p(s),c=l.length,f=0;f<c;)u=l[f++],d&&!i.call(s,u)||(n[u]=s[u]);return n}:o},"9f35":function(e,t,n){var r=n("8c18");(r="string"==typeof(r=r.__esModule?r.default:r)?[[e.i,r,""]]:r).locals&&(e.exports=r.locals);(0,n("5925").default)("68069466",r,!0,{sourceMap:!1,shadowMode:!1})},a079:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){return(0,r.default)(e),0===((t=(0,o.default)(t,i)).ignore_whitespace?e.trim():e).length};var r=a(n("d076")),o=a(n("6d97"));function a(e){return e&&e.__esModule?e:{default:e}}var i={ignore_whitespace:!1};e.exports=t.default,e.exports.default=t.default},a080:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){t=(0,o.default)(t,a);var n=e.split("@"),e=n.pop(),n=[n.join("@"),e];if(n[1]=n[1].toLowerCase(),"gmail.com"===n[1]||"googlemail.com"===n[1]){if(t.gmail_remove_subaddress&&(n[0]=n[0].split("+")[0]),t.gmail_remove_dots&&(n[0]=n[0].replace(/\.+/g,c)),!n[0].length)return!1;(t.all_lowercase||t.gmail_lowercase)&&(n[0]=n[0].toLowerCase()),n[1]=t.gmail_convert_googlemaildotcom?"gmail.com":n[1]}else if(0<=i.indexOf(n[1])){if(t.icloud_remove_subaddress&&(n[0]=n[0].split("+")[0]),!n[0].length)return!1;(t.all_lowercase||t.icloud_lowercase)&&(n[0]=n[0].toLowerCase())}else if(0<=u.indexOf(n[1])){if(t.outlookdotcom_remove_subaddress&&(n[0]=n[0].split("+")[0]),!n[0].length)return!1;(t.all_lowercase||t.outlookdotcom_lowercase)&&(n[0]=n[0].toLowerCase())}else if(0<=s.indexOf(n[1])){if(t.yahoo_remove_subaddress&&(e=n[0].split("-"),n[0]=1<e.length?e.slice(0,-1).join("-"):e[0]),!n[0].length)return!1;(t.all_lowercase||t.yahoo_lowercase)&&(n[0]=n[0].toLowerCase())}else 0<=l.indexOf(n[1])?((t.all_lowercase||t.yandex_lowercase)&&(n[0]=n[0].toLowerCase()),n[1]="yandex.ru"):t.all_lowercase&&(n[0]=n[0].toLowerCase());return n.join("@")};var r,o=(r=n("6d97"))&&r.__esModule?r:{default:r};var a={all_lowercase:!0,gmail_lowercase:!0,gmail_remove_dots:!0,gmail_remove_subaddress:!0,gmail_convert_googlemaildotcom:!0,outlookdotcom_lowercase:!0,outlookdotcom_remove_subaddress:!0,yahoo_lowercase:!0,yahoo_remove_subaddress:!0,yandex_lowercase:!0,icloud_lowercase:!0,icloud_remove_subaddress:!0},i=["icloud.com","me.com"],u=["hotmail.at","hotmail.be","hotmail.ca","hotmail.cl","hotmail.co.il","hotmail.co.nz","hotmail.co.th","hotmail.co.uk","hotmail.com","hotmail.com.ar","hotmail.com.au","hotmail.com.br","hotmail.com.gr","hotmail.com.mx","hotmail.com.pe","hotmail.com.tr","hotmail.com.vn","hotmail.cz","hotmail.de","hotmail.dk","hotmail.es","hotmail.fr","hotmail.hu","hotmail.id","hotmail.ie","hotmail.in","hotmail.it","hotmail.jp","hotmail.kr","hotmail.lv","hotmail.my","hotmail.ph","hotmail.pt","hotmail.sa","hotmail.sg","hotmail.sk","live.be","live.co.uk","live.com","live.com.ar","live.com.mx","live.de","live.es","live.eu","live.fr","live.it","live.nl","msn.com","outlook.at","outlook.be","outlook.cl","outlook.co.il","outlook.co.nz","outlook.co.th","outlook.com","outlook.com.ar","outlook.com.au","outlook.com.br","outlook.com.gr","outlook.com.pe","outlook.com.tr","outlook.com.vn","outlook.cz","outlook.de","outlook.dk","outlook.es","outlook.fr","outlook.hu","outlook.id","outlook.ie","outlook.in","outlook.it","outlook.jp","outlook.kr","outlook.lv","outlook.my","outlook.ph","outlook.pt","outlook.sa","outlook.sg","outlook.sk","passport.com"],s=["rocketmail.com","yahoo.ca","yahoo.co.uk","yahoo.com","yahoo.de","yahoo.fr","yahoo.in","yahoo.it","ymail.com"],l=["yandex.ru","yandex.ua","yandex.kz","yandex.com","yandex.by","ya.ru"];function c(e){return 1<e.length?e:""}e.exports=t.default,e.exports.default=t.default},a215:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){if((0,o.default)(e),t)return"1"===e||"true"===e;return"0"!==e&&"false"!==e&&""!==e};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};e.exports=t.default,e.exports.default=t.default},a334:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){var t=1<arguments.length&&void 0!==arguments[1]?arguments[1]:String(new Date);(0,r.default)(e);t=(0,o.default)(t),e=(0,o.default)(e);return!!(e&&t&&e<t)};var r=a(n("d076")),o=a(n("9758"));function a(e){return e&&e.__esModule?e:{default:e}}e.exports=t.default,e.exports.default=t.default},a450:function(e,t,n){var r=n("bb8b").f,o=Function.prototype,a=/^\s*function ([^ (]*)/;"name"in o||n("26df")&&r(o,"name",{configurable:!0,get:function(){try{return(""+this).match(a)[1]}catch(e){return""}}})},a4cf:function(e,t){e=e.exports="undefined"!=typeof window&&window.Math==Math?window:"undefined"!=typeof self&&self.Math==Math?self:Function("return this")();"number"==typeof __g&&(__g=e)},a618:function(e,t){e.exports=function(e,t,n){var r=void 0===n;switch(t.length){case 0:return r?e():e.call(n);case 1:return r?e(t[0]):e.call(n,t[0]);case 2:return r?e(t[0],t[1]):e.call(n,t[0],t[1]);case 3:return r?e(t[0],t[1],t[2]):e.call(n,t[0],t[1],t[2]);case 4:return r?e(t[0],t[1],t[2],t[3]):e.call(n,t[0],t[1],t[2],t[3])}return e.apply(n,t)}},a64a:function(e,t,n){"use strict";n("2f06")},a77f:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){return(0,o.default)(e),e===e.toUpperCase()};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};e.exports=t.default,e.exports.default=t.default},a86f:function(e,t,n){var r=n("9cff");e.exports=function(e){if(!r(e))throw TypeError(e+" is not an object!");return e}},ab3a:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){(0,o.default)(e);try{var t=JSON.parse(e);return!!t&&"object"===a(t)}catch(e){}return!1};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};function a(e){return(a="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}e.exports=t.default,e.exports.default=t.default},ac67:function(e,t,n){var r=n("e99b"),s=n("e7c8"),l=n("3471"),c=n("285b"),f=n("1374");r(r.S,"Object",{getOwnPropertyDescriptors:function(e){for(var t,n,r=l(e),o=c.f,a=s(r),i={},u=0;a.length>u;)void 0!==(n=o(r,t=a[u++]))&&f(i,t,n);return i}})},addc:function(e,t,n){var r=n("4fd4"),o=n("8078"),a=n("3a0d")("IE_PROTO"),i=Object.prototype;e.exports=Object.getPrototypeOf||function(e){return e=o(e),r(e,a)?e[a]:"function"==typeof e.constructor&&e instanceof e.constructor?e.constructor.prototype:e instanceof Object?i:null}},ae75:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){return(0,o.default)(e,{min:0,max:65535})};var r,o=(r=n("c454"))&&r.__esModule?r:{default:r};e.exports=t.default,e.exports.default=t.default},aeb8:function(e,t,n){n=n("0b34").navigator;e.exports=n&&n.userAgent||""},b067:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){var n;(0,o.default)(e),t="object"===a(t)?(n=t.min||0,t.max):(n=arguments[1],arguments[2]);e=encodeURI(e).split(/%..|./).length-1;return n<=e&&(void 0===t||e<=t)};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};function a(e){return(a="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}e.exports=t.default,e.exports.default=t.default},b1d4:function(e,t,n){var a=n("a86f");e.exports=function(t,e,n,r){try{return r?e(a(n)[0],n[1]):e(n)}catch(e){var o=t.return;throw void 0!==o&&a(o.call(t)),e}}},b20f:function(e,t,n){var r=n("6ab3");(r="string"==typeof(r=r.__esModule?r.default:r)?[[e.i,r,""]]:r).locals&&(e.exports=r.locals);(0,n("5925").default)("1053eb3b",r,!0,{sourceMap:!1,shadowMode:!1})},b2be:function(e,t,n){var o=n("e99b"),a=n("76e3"),i=n("0926");e.exports=function(e,t){var n=(a.Object||{})[e]||Object[e],r={};r[e]=t(n),o(o.S+o.F*i(function(){n(1)}),"Object",r)}},b47f:function(e,t,n){"use strict";var r=n("e99b"),o=n("76e3"),a=n("0b34"),i=n("1b0b"),u=n("4836");r(r.P+r.R,"Promise",{finally:function(t){var n=i(this,o.Promise||a.Promise),e="function"==typeof t;return this.then(e?function(e){return u(n,t()).then(function(){return e})}:t,e?function(e){return u(n,t()).then(function(){throw e})}:t)}})},b953:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){return(0,r.default)(e),(0,o.default)(e)%parseInt(t,10)==0};var r=a(n("d076")),o=a(n("6d46"));function a(e){return e&&e.__esModule?e:{default:e}}e.exports=t.default,e.exports.default=t.default},baa7:function(e,t,n){var r=n("76e3"),o=n("0b34"),a="__core-js_shared__",i=o[a]||(o[a]={});(e.exports=function(e,t){return i[e]||(i[e]=void 0!==t?t:{})})("versions",[]).push({version:r.version,mode:n("3d8a")?"pure":"global",copyright:"© 2020 Denis Pushkarev (zloirock.ru)"})},bac3:function(e,t,n){var r=n("bb8b").f,o=n("4fd4"),a=n("839a")("toStringTag");e.exports=function(e,t,n){e&&!o(e=n?e:e.prototype,a)&&r(e,a,{configurable:!0,value:t})}},bb8b:function(e,t,n){var r=n("a86f"),o=n("83d3"),a=n("5d10"),i=Object.defineProperty;t.f=n("26df")?Object.defineProperty:function(e,t,n){if(r(e),t=a(t,!0),r(n),o)try{return i(e,t,n)}catch(e){}if("get"in n||"set"in n)throw TypeError("Accessors not supported!");return"value"in n&&(e[t]=n.value),e}},bbcc:function(e,t,n){n=n("0b34").document;e.exports=n&&n.documentElement},bf02:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){return(0,o.default)(e),a.test(e)};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};var a=/^[0-9A-F]+$/i;e.exports=t.default,e.exports.default=t.default},bf73:function(e,t,n){"use strict";var r=n("0353");n("e99b")({target:"RegExp",proto:!0,forced:r!==/./.exec},{exec:r})},c28e:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){var t=1<arguments.length&&void 0!==arguments[1]?arguments[1]:String(new Date);(0,r.default)(e);t=(0,o.default)(t),e=(0,o.default)(e);return!!(e&&t&&t<e)};var r=a(n("d076")),o=a(n("9758"));function a(e){return e&&e.__esModule?e:{default:e}}e.exports=t.default,e.exports.default=t.default},c2a3:function(e,t,n){"use strict";n.r(t);var r=n("09f1"),o=n.n(r),r=n("dfc6"),r=n.n(r),o=new o.a({id:"icon-add",use:"icon-add-usage",viewBox:"0 0 1024 1024",content:'<symbol class="icon" viewBox="0 0 1024 1024" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" id="icon-add"><defs><style type="text/css"></style></defs><path d="M512 1024a512 512 0 1 1 512-512 512.576 512.576 0 0 1-512 512z m0-960a448 448 0 1 0 448 448A448.512 448.512 0 0 0 512 64z m192 480h-160v160a32 32 0 0 1-64 0v-160h-160a32 32 0 0 1 0-64h160v-160a32 32 0 0 1 64 0v160h160a32 32 0 0 1 0 64z" p-id="9510" /></symbol>'});r.a.add(o);t.default=o},c454:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){(0,i.default)(e);var n=(t=t||{}).hasOwnProperty("allow_leading_zeroes")&&!t.allow_leading_zeroes?u:s,r=!t.hasOwnProperty("min")||e>=t.min,o=!t.hasOwnProperty("max")||e<=t.max,a=!t.hasOwnProperty("lt")||e<t.lt,t=!t.hasOwnProperty("gt")||e>t.gt;return n.test(e)&&r&&o&&a&&t};var r,i=(r=n("d076"))&&r.__esModule?r:{default:r};var u=/^(?:[-+]?(?:0|[1-9][0-9]*))$/,s=/^[-+]?[0-9]+$/;e.exports=t.default,e.exports.default=t.default},c46f:function(e,t,n){"use strict";n("bf73");var s=n("84e8"),l=n("065d"),c=n("0926"),f=n("3ab0"),d=n("839a"),p=n("0353"),m=d("species"),v=!c(function(){var e=/./;return e.exec=function(){var e=[];return e.groups={a:"7"},e},"7"!=="".replace(e,"$<a>")}),g=function(){var e=/(?:)/,t=e.exec;e.exec=function(){return t.apply(this,arguments)};e="ab".split(e);return 2===e.length&&"a"===e[0]&&"b"===e[1]}();e.exports=function(n,e,t){var a,r,o=d(n),i=!c(function(){var e={};return e[o]=function(){return 7},7!=""[n](e)}),u=i?!c(function(){var e=!1,t=/a/;return t.exec=function(){return e=!0,null},"split"===n&&(t.constructor={},t.constructor[m]=function(){return t}),t[o](""),!e}):void 0;i&&u&&("replace"!==n||v)&&("split"!==n||g)||(a=/./[o],t=(u=t(f,o,""[n],function(e,t,n,r,o){return t.exec===p?i&&!o?{done:!0,value:a.call(t,n,r)}:{done:!0,value:e.call(n,t,r)}:{done:!1}}))[0],r=u[1],s(String.prototype,n,t),l(RegExp.prototype,o,2==e?function(e,t){return r.call(e,this,t)}:function(e){return r.call(e,this)}))}},c535:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){return(0,o.default)(e),a.test(e)};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};var a=/^[A-Z]{2}[0-9A-Z]{3}\d{2}\d{5}$/;e.exports=t.default,e.exports.default=t.default},c865:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){return(0,o.default)(e),e.replace(/&amp;/g,"&").replace(/&quot;/g,'"').replace(/&#x27;/g,"'").replace(/&lt;/g,"<").replace(/&gt;/g,">").replace(/&#x2F;/g,"/").replace(/&#x5C;/g,"\\").replace(/&#96;/g,"`")};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};e.exports=t.default,e.exports.default=t.default},ca9d:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){(0,o.default)(e);for(var n=e.length-1;0<=n;n--)if(-1===t.indexOf(e[n]))return!1;return!0};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};e.exports=t.default,e.exports.default=t.default},cc7c:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){(0,i.default)(e);var t=e.replace(/[- ]+/g,"");if(!u.test(t))return!1;for(var n,r,o=0,a=t.length-1;0<=a;a--)n=t.substring(a,a+1),n=parseInt(n,10),o+=r&&10<=(n*=2)?n%10+1:n,r=!r;return!(o%10!=0||!t)};var r,i=(r=n("d076"))&&r.__esModule?r:{default:r};var u=/^(?:4[0-9]{12}(?:[0-9]{3})?|5[1-5][0-9]{14}|(222[1-9]|22[3-9][0-9]|2[3-6][0-9]{2}|27[01][0-9]|2720)[0-9]{12}|6(?:011|5[0-9][0-9])[0-9]{12}|3[47][0-9]{13}|3(?:0[0-5]|[68][0-9])[0-9]{11}|(?:2131|1800|35\d{3})\d{11}|6[27][0-9]{14})$/;e.exports=t.default,e.exports.default=t.default},cc96:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){(0,o.default)(e);var t=e.length;if(!t||t%4!=0||a.test(e))return!1;var n=e.indexOf("=");return-1===n||n===t-1||n===t-2&&"="===e[t-1]};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};var a=/[^A-Z0-9+\/=]/i;e.exports=t.default,e.exports.default=t.default},ce99:function(e,t){e=e.exports={version:"2.6.12"};"number"==typeof __e&&(__e=e)},cea2:function(e,t){var n={}.toString;e.exports=function(e){return n.call(e).slice(8,-1)}},cebe:function(e,t){e.exports=i},d04c:function(e,t){e.exports={title:"DoraCMS Admin",fixedHeader:!0,sidebarLogo:!0,server_api:"",token_key:"admin_doracms",admin_token_key:"admin_doracmsapi",admin_base_path:"/admin",host_project_path:"/Users/dora/Documents/dora/coding.net/egg-cms",qiniuStaticPath:"cms/plugins/static/sadmin/"}},d076:function(e,t,n){"use strict";function r(e){return(r="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){var t;if(!("string"==typeof e||e instanceof String))throw t=null===e?"null":"object"===(t=r(e))&&e.constructor&&e.constructor.hasOwnProperty("name")?e.constructor.name:"a ".concat(t),new TypeError("Expected string but received ".concat(t,"."))},e.exports=t.default,e.exports.default=t.default},d24d:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){{if((0,o.default)(e),t in s)return s[t].test(e);if("any"===t){for(var n in s)if(s.hasOwnProperty(n))if(s[n].test(e))return!0;return!1}}throw new Error("Invalid locale '".concat(t,"'"))},t.locales=void 0;var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};var a=/^\d{4}$/,i=/^\d{5}$/,u=/^\d{6}$/,s={AD:/^AD\d{3}$/,AT:a,AU:a,BE:a,BG:a,BR:/^\d{5}-\d{3}$/,CA:/^[ABCEGHJKLMNPRSTVXY]\d[ABCEGHJ-NPRSTV-Z][\s\-]?\d[ABCEGHJ-NPRSTV-Z]\d$/i,CH:a,CZ:/^\d{3}\s?\d{2}$/,DE:i,DK:a,DZ:i,EE:i,ES:i,FI:i,FR:/^\d{2}\s?\d{3}$/,GB:/^(gir\s?0aa|[a-z]{1,2}\d[\da-z]?\s?(\d[a-z]{2})?)$/i,GR:/^\d{3}\s?\d{2}$/,HR:/^([1-5]\d{4}$)/,HU:a,ID:i,IL:i,IN:u,IS:/^\d{3}$/,IT:i,JP:/^\d{3}\-\d{4}$/,KE:i,LI:/^(948[5-9]|949[0-7])$/,LT:/^LT\-\d{5}$/,LU:a,LV:/^LV\-\d{4}$/,MX:i,MT:/^[A-Za-z]{3}\s{0,1}\d{4}$/,NL:/^\d{4}\s?[a-z]{2}$/i,NO:a,NZ:a,PL:/^\d{2}\-\d{3}$/,PR:/^00[679]\d{2}([ -]\d{4})?$/,PT:/^\d{4}\-\d{3}?$/,RO:u,RU:u,SA:i,SE:/^\d{3}\s?\d{2}$/,SI:a,SK:/^\d{3}\s?\d{2}$/,TN:a,TW:/^\d{3}(\d{2})?$/,UA:i,US:/^\d{5}(-\d{4})?$/,ZA:a,ZM:i},i=Object.keys(s);t.locales=i},d25d:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){return(0,o.default)(e),a.test(e)};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};var a=/[\uD800-\uDBFF][\uDC00-\uDFFF]/;e.exports=t.default,e.exports.default=t.default},d445:function(e,t,n){var r=n("cea2"),o=n("839a")("toStringTag"),a="Arguments"==r(function(){return arguments}());e.exports=function(e){var t;return void 0===e?"Undefined":null===e?"Null":"string"==typeof(e=function(e,t){try{return e[t]}catch(e){}}(t=Object(e),o))?e:a?r(t):"Object"==(e=r(t))&&"function"==typeof t.callee?"Arguments":e}},d48a:function(e,t){e.exports=function(e,t){return{enumerable:!(1&e),configurable:!(2&e),writable:!(4&e),value:t}}},d4c9:function(e,t,n){"use strict";var o=n("3250");function r(e){var n,r;this.promise=new e(function(e,t){if(void 0!==n||void 0!==r)throw TypeError("Bad Promise constructor");n=e,r=t}),this.resolve=o(n),this.reject=o(r)}e.exports.f=function(e){return new r(e)}},d592:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){(0,a.default)(e);var t=e.split(",");if(t.length<2)return!1;var n=t.shift().trim().split(";"),e=n.shift();if("data:"!==e.substr(0,5))return!1;e=e.substr(5);if(""!==e&&!i.test(e))return!1;for(var r=0;r<n.length;r++)if((r!==n.length-1||"base64"!==n[r].toLowerCase())&&!u.test(n[r]))return!1;for(var o=0;o<t.length;o++)if(!s.test(t[o]))return!1;return!0};var r,a=(r=n("d076"))&&r.__esModule?r:{default:r};var i=/^[a-z]+\/[a-z0-9\-\+]+$/i,u=/^[a-z\-]+=[a-z0-9\-]+$/i,s=/^[a-z0-9!\$&'\(\)\*\+,;=\-\._~:@\/\?%\s]*$/i;e.exports=t.default,e.exports.default=t.default},d8b3:function(e,t){var n=0,r=Math.random();e.exports=function(e){return"Symbol(".concat(void 0===e?"":e,")_",(++n+r).toString(36))}},dce3:function(e,t){var n={}.hasOwnProperty;e.exports=function(e,t){return n.call(e,t)}},dcea:function(e,t,n){var r=n("953d"),o=n("839a")("iterator"),a=Array.prototype;e.exports=function(e){return void 0!==e&&(r.Array===e||a[o]===e)}},dd23:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){if((0,o.default)(e),!e.includes(","))return!1;e=e.split(",");return a.test(e[0])&&i.test(e[1])};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};var a=/^\(?[+-]?(90(\.0+)?|[1-8]?\d(\.\d+)?)$/,i=/^\s?[+-]?(180(\.0+)?|1[0-7]\d(\.\d+)?|\d{1,2}(\.\d+)?)\)?$/;e.exports=t.default,e.exports.default=t.default},dfae:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){var t=1<arguments.length&&void 0!==arguments[1]?arguments[1]:"all";(0,o.default)(e);t=a[t];return t&&t.test(e)};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};var a={3:/^[0-9A-F]{8}-[0-9A-F]{4}-3[0-9A-F]{3}-[0-9A-F]{4}-[0-9A-F]{12}$/i,4:/^[0-9A-F]{8}-[0-9A-F]{4}-4[0-9A-F]{3}-[89AB][0-9A-F]{3}-[0-9A-F]{12}$/i,5:/^[0-9A-F]{8}-[0-9A-F]{4}-5[0-9A-F]{3}-[89AB][0-9A-F]{3}-[0-9A-F]{12}$/i,all:/^[0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12}$/i};e.exports=t.default,e.exports.default=t.default},dfc6:function(e,t,n){!function(K){e.exports=function(){"use strict";var t=typeof window!=="undefined"?window:typeof K!=="undefined"?K:typeof self!=="undefined"?self:{};function e(e,t){return t={exports:{}},e(t,t.exports),t.exports}var s=e(function(n,e){(function(e,t){if(false);else n.exports=t()})(t,function(){function a(e){var t=e&&typeof e==="object";return t&&Object.prototype.toString.call(e)!=="[object RegExp]"&&Object.prototype.toString.call(e)!=="[object Date]"}function r(e){return Array.isArray(e)?[]:{}}function i(e,t){var n=t&&t.clone===true;return n&&a(e)?l(r(e),e,t):e}function u(n,e,r){var o=n.slice();e.forEach(function(e,t){if(typeof o[t]==="undefined")o[t]=i(e,r);else if(a(e))o[t]=l(n[t],e,r);else if(n.indexOf(e)===-1)o.push(i(e,r))});return o}function s(t,n,r){var o={};if(a(t))Object.keys(t).forEach(function(e){o[e]=i(t[e],r)});Object.keys(n).forEach(function(e){if(!a(n[e])||!t[e])o[e]=i(n[e],r);else o[e]=l(t[e],n[e],r)});return o}function l(e,t,n){var r=Array.isArray(t);var o=n||{arrayMerge:u};var a=o.arrayMerge||u;if(r)return Array.isArray(e)?a(e,t,n):i(t,n);else return s(e,t,n)}l.all=function e(t,n){if(!Array.isArray(t)||t.length<2)throw new Error("first argument should be an array with at least two elements");return t.reduce(function(e,t){return l(e,t,n)})};return l})});function l(r){r=r||Object.create(null);return{on:function e(t,n){(r[t]||(r[t]=[])).push(n)},off:function e(t,n){if(r[t])r[t].splice(r[t].indexOf(n)>>>0,1)},emit:function e(t,n){(r[t]||[]).map(function(e){e(n)});(r["*"]||[]).map(function(e){e(t,n)})}}}var n=e(function(e,t){var n={svg:{name:"xmlns",uri:"http://www.w3.org/2000/svg"},xlink:{name:"xmlns:xlink",uri:"http://www.w3.org/1999/xlink"}};t.default=n;e.exports=t.default}),o=function(n){return Object.keys(n).map(function(e){var t=n[e].toString().replace(/"/g,"&quot;");return e+'="'+t+'"'}).join(" ")},r=n.svg,a=n.xlink,i={};i[r.name]=r.uri,i[a.name]=a.uri;var u=function(e,t){if(e===void 0)e="";var n=s(i,t||{});var r=o(n);return"<svg "+r+">"+e+"</svg>"},c=n.svg,f=n.xlink,d={attrs:((p={style:["position: absolute","width: 0","height: 0"].join("; ")})[c.name]=c.uri,p[f.name]=f.uri,p)},p,m=function e(t){this.config=s(d,t||{});this.symbols=[]};m.prototype.add=function e(t){var n=this;var r=n.symbols;var o=this.find(t.id);if(o){r[r.indexOf(o)]=t;return false}r.push(t);return true},m.prototype.remove=function e(t){var n=this;var r=n.symbols;var o=this.find(t);if(o){r.splice(r.indexOf(o),1);o.destroy();return true}return false},m.prototype.find=function e(t){return this.symbols.filter(function(e){return e.id===t})[0]||null},m.prototype.has=function e(t){return this.find(t)!==null},m.prototype.stringify=function e(){var t=this.config;var n=t.attrs;var r=this.symbols.map(function(e){return e.stringify()}).join("");return u(r,n)},m.prototype.toString=function e(){return this.stringify()},m.prototype.destroy=function e(){this.symbols.forEach(function(e){return e.destroy()})};var v=function e(t){var n=t.id;var r=t.viewBox;var o=t.content;this.id=n;this.viewBox=r;this.content=o};v.prototype.stringify=function e(){return this.content},v.prototype.toString=function e(){return this.stringify()},v.prototype.destroy=function e(){var t=this;["id","viewBox","content"].forEach(function(e){return delete t[e]})};var g=function(e){var t=!!document.importNode;var n=(new DOMParser).parseFromString(e,"image/svg+xml").documentElement;if(t)return document.importNode(n,true);return n},h=function(t){function n(){t.apply(this,arguments)}if(t)n.__proto__=t;n.prototype=Object.create(t&&t.prototype);n.prototype.constructor=n;var e={isMounted:{}};e.isMounted.get=function(){return!!this.node};n.createFromExistingNode=function e(t){return new n({id:t.getAttribute("id"),viewBox:t.getAttribute("viewBox"),content:t.outerHTML})};n.prototype.destroy=function e(){if(this.isMounted)this.unmount();t.prototype.destroy.call(this)};n.prototype.mount=function e(t){if(this.isMounted)return this.node;var n=typeof t==="string"?document.querySelector(t):t;var r=this.render();this.node=r;n.appendChild(r);return r};n.prototype.render=function e(){var t=this.stringify();return g(u(t)).childNodes[0]};n.prototype.unmount=function e(){this.node.parentNode.removeChild(this.node)};Object.defineProperties(n.prototype,e);return n}(v),b={autoConfigure:true,mountTo:"body",syncUrlsWithBaseTag:false,listenLocationChangeEvent:true,locationChangeEvent:"locationChange",locationChangeAngularEmitter:false,usagesToUpdate:"use[*|href]",moveGradientsOutsideSymbol:false},y=function(e){return Array.prototype.slice.call(e,0)},_=navigator.userAgent,x={isChrome:/chrome/i.test(_),isFirefox:/firefox/i.test(_),isIE:/msie/i.test(_)||/trident/i.test(_),isEdge:/edge/i.test(_)},M=function(e,t){var n=document.createEvent("CustomEvent");n.initCustomEvent(e,false,false,t);window.dispatchEvent(n)},w=function(e){var t=[];y(e.querySelectorAll("style")).forEach(function(e){e.textContent+="";t.push(e)});return t},O=function(e){return(e||window.location.href).split("#")[0]},S=function(r){angular.module("ng").run(["$rootScope",function(e){e.$on("$locationChangeSuccess",function(e,t,n){M(r,{oldUrl:n,newUrl:t})})}])},A="linearGradient, radialGradient, pattern",E=function(e,n){if(n===void 0)n=A;y(e.querySelectorAll("symbol")).forEach(function(t){y(t.querySelectorAll(n)).forEach(function(e){t.parentNode.insertBefore(e,t)})});return e};function P(e,o){var t=y(e).reduce(function(e,t){if(!t.attributes)return e;var n=y(t.attributes);var r=o?n.filter(o):n;return e.concat(r)},[]);return t}var $=n.xlink.uri,T="xlink:href",C=/[{}|\\\^\[\]`"<>]/g;function j(e){return e.replace(C,function(e){return"%"+e[0].charCodeAt(0).toString(16).toUpperCase()})}function N(e,r,o){y(e).forEach(function(e){var t=e.getAttribute(T);if(t&&t.indexOf(r)===0){var n=t.replace(r,o);e.setAttributeNS($,T,n)}});return e}var k=["clipPath","colorProfile","src","cursor","fill","filter","marker","markerStart","markerMid","markerEnd","mask","stroke","style"],F=k.map(function(e){return"["+e+"]"}).join(","),I=function(e,t,n,r){var o=j(n);var a=j(r);var i=e.querySelectorAll(F);var u=P(i,function(e){var t=e.localName;var n=e.value;return k.indexOf(t)!==-1&&n.indexOf("url("+o)!==-1});u.forEach(function(e){return e.value=e.value.replace(o,a)});N(t,o,a)},R={MOUNT:"mount",SYMBOL_MOUNT:"symbol_mount"},L=function(u){function e(e){var t=this;if(e===void 0)e={};u.call(this,s(b,e));var n=l();this._emitter=n;this.node=null;var r=this;var o=r.config;if(o.autoConfigure)this._autoConfigure(e);if(o.syncUrlsWithBaseTag){var a=document.getElementsByTagName("base")[0].getAttribute("href");n.on(R.MOUNT,function(){return t.updateUrls("#",a)})}var i=this._handleLocationChange.bind(this);this._handleLocationChange=i;if(o.listenLocationChangeEvent)window.addEventListener(o.locationChangeEvent,i);if(o.locationChangeAngularEmitter)S(o.locationChangeEvent);n.on(R.MOUNT,function(e){if(o.moveGradientsOutsideSymbol)E(e)});n.on(R.SYMBOL_MOUNT,function(e){if(o.moveGradientsOutsideSymbol)E(e.parentNode);if(x.isIE||x.isEdge)w(e)})}if(u)e.__proto__=u;e.prototype=Object.create(u&&u.prototype);e.prototype.constructor=e;var t={isMounted:{}};t.isMounted.get=function(){return!!this.node};e.prototype._autoConfigure=function e(t){var n=this;var r=n.config;if(typeof t.syncUrlsWithBaseTag==="undefined")r.syncUrlsWithBaseTag=typeof document.getElementsByTagName("base")[0]!=="undefined";if(typeof t.locationChangeAngularEmitter==="undefined")r.locationChangeAngularEmitter="angular"in window;if(typeof t.moveGradientsOutsideSymbol==="undefined")r.moveGradientsOutsideSymbol=x.isFirefox};e.prototype._handleLocationChange=function e(t){var n=t.detail;var r=n.oldUrl;var o=n.newUrl;this.updateUrls(r,o)};e.prototype.add=function e(t){var n=this;var r=u.prototype.add.call(this,t);if(this.isMounted&&r){t.mount(n.node);this._emitter.emit(R.SYMBOL_MOUNT,t.node)}return r};e.prototype.attach=function e(t){var n=this;var r=this;if(r.isMounted)return r.node;var o=typeof t==="string"?document.querySelector(t):t;r.node=o;this.symbols.forEach(function(e){e.mount(r.node);n._emitter.emit(R.SYMBOL_MOUNT,e.node)});y(o.querySelectorAll("symbol")).forEach(function(e){var t=h.createFromExistingNode(e);t.node=e;r.add(t)});this._emitter.emit(R.MOUNT,o);return o};e.prototype.destroy=function e(){var t=this;var n=t.config;var r=t.symbols;var o=t._emitter;r.forEach(function(e){return e.destroy()});o.off("*");window.removeEventListener(n.locationChangeEvent,this._handleLocationChange);if(this.isMounted)this.unmount()};e.prototype.mount=function e(t,n){if(t===void 0)t=this.config.mountTo;if(n===void 0)n=false;var r=this;if(r.isMounted)return r.node;var o=typeof t==="string"?document.querySelector(t):t;var a=r.render();this.node=a;if(n&&o.childNodes[0])o.insertBefore(a,o.childNodes[0]);else o.appendChild(a);this._emitter.emit(R.MOUNT,a);return a};e.prototype.render=function e(){return g(this.stringify())};e.prototype.unmount=function e(){this.node.parentNode.removeChild(this.node)};e.prototype.updateUrls=function e(t,n){if(!this.isMounted)return false;var r=document.querySelectorAll(this.config.usagesToUpdate);I(this.node,r,O(t)+"#",O(n)+"#");return true};Object.defineProperties(e.prototype,t);return e}(m),B=e(function(n){ /*! * domready (c) Dustin Diaz 2014 - License MIT */ !function(e,t){n.exports=t()}("domready",function(){var t=[],e,n=document,r=n.documentElement.doScroll,o="DOMContentLoaded",a=(r?/^loaded|^c/:/^loaded|^i|^c/).test(n.readyState);if(!a)n.addEventListener(o,e=function(){n.removeEventListener(o,e);a=1;while(e=t.shift())e()});return function(e){a?setTimeout(e,0):t.push(e)}})}),U="__SVG_SPRITE_NODE__",D="__SVG_SPRITE__",G,z;if(!!window[D])z=window[D];else{z=new L({attrs:{id:U}});window[D]=z}var Z=function(){var e=document.getElementById(U);if(e)z.attach(e);else z.mount(document.body,true)},H;if(document.body)Z();else B(Z);return z}()}.call(this,n("2409"))},e04f:function(e,t,n){var r,o; /*! * JavaScript Cookie v2.2.1 * https://github.com/js-cookie/js-cookie * * Copyright 2006, 2015 Klaus Hartl & Fagner Brack * Released under the MIT license */void 0===(r="function"==typeof(r=o=function(){function u(){for(var e=0,t={};e<arguments.length;e++){var n,r=arguments[e];for(n in r)t[n]=r[n]}return t}function l(e){return e.replace(/(%[0-9A-Z]{2})+/g,decodeURIComponent)}return function e(s){function i(){}function n(e,t,n){if("undefined"!=typeof document){"number"==typeof(n=u({path:"/"},i.defaults,n)).expires&&(n.expires=new Date(+new Date+864e5*n.expires)),n.expires=n.expires?n.expires.toUTCString():"";try{var r=JSON.stringify(t);/^[\{\[]/.test(r)&&(t=r)}catch(e){}t=s.write?s.write(t,e):encodeURIComponent(String(t)).replace(/%(23|24|26|2B|3A|3C|3E|3D|2F|3F|40|5B|5D|5E|60|7B|7D|7C)/g,decodeURIComponent),e=encodeURIComponent(String(e)).replace(/%(23|24|26|2B|5E|60|7C)/g,decodeURIComponent).replace(/[\(\)]/g,escape);var o,a="";for(o in n)n[o]&&(a+="; "+o,!0!==n[o]&&(a+="="+n[o].split(";")[0]));return document.cookie=e+"="+t+a}}function t(e,t){if("undefined"!=typeof document){for(var n={},r=document.cookie?document.cookie.split("; "):[],o=0;o<r.length;o++){var a=r[o].split("="),i=a.slice(1).join("=");t||'"'!==i.charAt(0)||(i=i.slice(1,-1));try{var u=l(a[0]),i=(s.read||s)(i,u)||l(i);if(t)try{i=JSON.parse(i)}catch(e){}if(n[u]=i,e===u)break}catch(e){}}return e?n[e]:n}}return i.set=n,i.get=function(e){return t(e,!1)},i.getJSON=function(e){return t(e,!0)},i.remove=function(e,t){n(e,"",u(t,{expires:-1}))},i.defaults={},i.withConverter=e,i}(function(){})})?r.call(t,n,t,e):r)||(e.exports=r),e.exports=o()},e247:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){return(0,o.default)(e),0<=["true","false","1","0"].indexOf(e)};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};e.exports=t.default,e.exports.default=t.default},e2a1:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){if((0,o.default)(e),(t=(0,r.default)(t,s)).locale in i.decimal)return!(0,a.default)(l,e.replace(/ /g,""))&&function(e){return new RegExp("^[-+]?([0-9]+)?(\\".concat(i.decimal[e.locale],"[0-9]{").concat(e.decimal_digits,"})").concat(e.force_decimal?"":"?","$"))}(t).test(e);throw new Error("Invalid locale '".concat(t.locale,"'"))};var r=u(n("6d97")),o=u(n("d076")),a=u(n("eec2")),i=n("25dc");function u(e){return e&&e.__esModule?e:{default:e}}var s={force_decimal:!1,decimal_digits:"1,",locale:"en-US"},l=["","-","+"];e.exports=t.default,e.exports.default=t.default},e3bb:function(e,t,n){var r=n("d445"),o=n("839a")("iterator"),a=n("953d");e.exports=n("76e3").getIteratorMethod=function(e){if(null!=e)return e[o]||e["@@iterator"]||a[r(e)]}},e654:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){return(0,o.default)(e),e===t};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};e.exports=t.default,e.exports.default=t.default},e74f:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){{if((0,o.default)(e),t in a)return a[t](e);if("any"===t){for(var n in a)if(a.hasOwnProperty(n))if((0,a[n])(e))return!0;return!1}}throw new Error("Invalid locale '".concat(t,"'"))};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};var a={ES:function(e){(0,o.default)(e);var t={X:0,Y:1,Z:2},n=e.trim().toUpperCase();if(!/^[0-9X-Z][0-9]{7}[TRWAGMYFPDXBNJZSQVHLCKE]$/.test(n))return!1;e=n.slice(0,-1).replace(/[X,Y,Z]/g,function(e){return t[e]});return n.endsWith(["T","R","W","A","G","M","Y","F","P","D","X","B","N","J","Z","S","Q","V","H","L","C","K","E"][e%23])},"he-IL":function(e){e=e.trim();if(!/^\d{9}$/.test(e))return!1;for(var t,n=e,r=0,o=0;o<n.length;o++)r+=9<(t=Number(n[o])*(o%2+1))?t-9:t;return r%10==0},"zh-TW":function(e){var r={A:10,B:11,C:12,D:13,E:14,F:15,G:16,H:17,I:34,J:18,K:19,L:20,M:21,N:22,O:35,P:23,Q:24,R:25,S:26,T:27,U:28,V:29,W:32,X:30,Y:31,Z:33},e=e.trim().toUpperCase();return!!/^[A-Z][0-9]{9}$/.test(e)&&Array.from(e).reduce(function(e,t,n){if(0!==n)return 9===n?(10-e%10-Number(t))%10==0:e+Number(t)*(9-n);t=r[t];return t%10*9+Math.floor(t/10)},0)}};e.exports=t.default,e.exports.default=t.default},e7c8:function(e,t,n){var r=n("21d9"),o=n("0c29"),a=n("a86f"),n=n("0b34").Reflect;e.exports=n&&n.ownKeys||function(e){var t=r.f(a(e)),n=o.f;return n?t.concat(n(e)):t}},e7e0:function(e,t,n){var r=n("0677"),o=n("a4cf").document,a=r(o)&&r(o.createElement);e.exports=function(e){return a?o.createElement(e):{}}},e8d7:function(e,t,n){var r=n("9cff"),o=n("0b34").document,a=r(o)&&r(o.createElement);e.exports=function(e){return a?o.createElement(e):{}}},e99b:function(e,t,n){var m=n("0b34"),v=n("76e3"),g=n("065d"),h=n("84e8"),b=n("1e4d"),y="prototype",_=function(e,t,n){var r,o,a,i=e&_.F,u=e&_.G,s=e&_.S,l=e&_.P,c=e&_.B,f=u?m:s?m[t]||(m[t]={}):(m[t]||{})[y],d=u?v:v[t]||(v[t]={}),p=d[y]||(d[y]={});for(r in n=u?t:n)o=((a=!i&&f&&void 0!==f[r])?f:n)[r],a=c&&a?b(o,m):l&&"function"==typeof o?b(Function.call,o):o,f&&h(f,r,o,e&_.U),d[r]!=o&&g(d,r,a),l&&p[r]!=o&&(p[r]=o)};m.core=v,_.F=1,_.G=2,_.S=4,_.P=8,_.B=16,_.W=32,_.U=64,_.R=128,e.exports=_},e9a8:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){return(0,o.default)(e),a.test(e)};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};var a=/^([A-Za-z0-9\-_~+\/]+[=]{0,2})\.([A-Za-z0-9\-_~+\/]+[=]{0,2})(?:\.([A-Za-z0-9\-_~+\/]+[=]{0,2}))?$/;e.exports=t.default,e.exports.default=t.default},edec:function(e,t,n){function r(){var e,t=+this;h.hasOwnProperty(t)&&(e=h[t],delete h[t],e())}function o(e){r.call(e.data)}var a,i=n("1e4d"),u=n("a618"),s=n("bbcc"),l=n("e8d7"),c=n("0b34"),f=c.process,d=c.setImmediate,p=c.clearImmediate,m=c.MessageChannel,v=c.Dispatch,g=0,h={},b="onreadystatechange";d&&p||(d=function(e){for(var t=[],n=1;n<arguments.length;)t.push(arguments[n++]);return h[++g]=function(){u("function"==typeof e?e:Function(e),t)},a(g),g},p=function(e){delete h[e]},"process"==n("cea2")(f)?a=function(e){f.nextTick(i(r,e,1))}:v&&v.now?a=function(e){v.now(i(r,e,1))}:m?(m=(n=new m).port2,n.port1.onmessage=o,a=i(m.postMessage,m,1)):c.addEventListener&&"function"==typeof postMessage&&!c.importScripts?(a=function(e){c.postMessage(e+"","*")},c.addEventListener("message",o,!1)):a=b in l("script")?function(e){s.appendChild(l("script"))[b]=function(){s.removeChild(this),r.call(e)}}:function(e){setTimeout(i(r,e,1),0)}),e.exports={set:d,clear:p}},ee68:function(e,t,n){var r=n("7c2b");r(r.S+r.F*!n("5e9e"),"Object",{defineProperty:n("597a").f})},eec2:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=void 0;function r(e,t){return e.some(function(e){return t===e})}t.default=r,e.exports=t.default,e.exports.default=t.default},f03a:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){return(0,o.default)(e),a.test(e.trim())};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};var a=/^magnet:\?xt=urn:[a-z0-9]+:[a-z0-9]{32,40}&dn=.+&tr=.+$/i;e.exports=t.default,e.exports.default=t.default},f08d:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){return(0,o.default)(e),a.test(e)||i.test(e)||u.test(e)};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r};var a=/^(application|audio|font|image|message|model|multipart|text|video)\/[a-zA-Z0-9\.\-\+]{1,100}$/i,i=/^text\/[a-zA-Z0-9\.\-\+]{1,100};\s?charset=("[a-zA-Z0-9\.\-\+\s]{0,70}"|[a-zA-Z0-9\.\-\+]{0,70})(\s?\([a-zA-Z0-9\.\-\+\s]{1,20}\))?$/i,u=/^multipart\/[a-zA-Z0-9\.\-\+]{1,100}(;\s?(boundary|charset)=("[a-zA-Z0-9\.\-\+\s]{0,70}"|[a-zA-Z0-9\.\-\+]{0,70})(\s?\([a-zA-Z0-9\.\-\+\s]{1,20}\))?){0,2}$/i;e.exports=t.default,e.exports.default=t.default},f263:function(e,t,n){n("ee68");var r=n("ce99").Object;e.exports=function(e,t,n){return r.defineProperty(e,t,n)}},f417:function(e,t,n){"use strict";var r=n("d445"),o=RegExp.prototype.exec;e.exports=function(e,t){var n=e.exec;if("function"==typeof n){n=n.call(e,t);if("object"!=typeof n)throw new TypeError("RegExp exec method returned something other than an Object or null");return n}if("RegExp"!==r(e))throw new TypeError("RegExp#exec called on incompatible receiver");return o.call(e,t)}},f476:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){(0,r.default)(e);e=e.split("/");if(2!==e.length)return!1;if(!i.test(e[1]))return!1;if(1<e[1].length&&e[1].startsWith("0"))return!1;return(0,o.default)(e[0],4)&&e[1]<=32&&0<=e[1]};var r=a(n("d076")),o=a(n("69e5"));function a(e){return e&&e.__esModule?e:{default:e}}var i=/^\d{1,2}$/;e.exports=t.default,e.exports.default=t.default},f5f8:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t){return(0,r.default)((0,o.default)(e,t),t)};var r=a(n("11d8")),o=a(n("1685"));function a(e){return e&&e.__esModule?e:{default:e}}e.exports=t.default,e.exports.default=t.default},f63a:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){return(0,r.default)(e),(0,o.default)(i,e.toUpperCase())};var r=a(n("d076")),o=a(n("eec2"));function a(e){return e&&e.__esModule?e:{default:e}}var i=["AFG","ALA","ALB","DZA","ASM","AND","AGO","AIA","ATA","ATG","ARG","ARM","ABW","AUS","AUT","AZE","BHS","BHR","BGD","BRB","BLR","BEL","BLZ","BEN","BMU","BTN","BOL","BES","BIH","BWA","BVT","BRA","IOT","BRN","BGR","BFA","BDI","KHM","CMR","CAN","CPV","CYM","CAF","TCD","CHL","CHN","CXR","CCK","COL","COM","COG","COD","COK","CRI","CIV","HRV","CUB","CUW","CYP","CZE","DNK","DJI","DMA","DOM","ECU","EGY","SLV","GNQ","ERI","EST","ETH","FLK","FRO","FJI","FIN","FRA","GUF","PYF","ATF","GAB","GMB","GEO","DEU","GHA","GIB","GRC","GRL","GRD","GLP","GUM","GTM","GGY","GIN","GNB","GUY","HTI","HMD","VAT","HND","HKG","HUN","ISL","IND","IDN","IRN","IRQ","IRL","IMN","ISR","ITA","JAM","JPN","JEY","JOR","KAZ","KEN","KIR","PRK","KOR","KWT","KGZ","LAO","LVA","LBN","LSO","LBR","LBY","LIE","LTU","LUX","MAC","MKD","MDG","MWI","MYS","MDV","MLI","MLT","MHL","MTQ","MRT","MUS","MYT","MEX","FSM","MDA","MCO","MNG","MNE","MSR","MAR","MOZ","MMR","NAM","NRU","NPL","NLD","NCL","NZL","NIC","NER","NGA","NIU","NFK","MNP","NOR","OMN","PAK","PLW","PSE","PAN","PNG","PRY","PER","PHL","PCN","POL","PRT","PRI","QAT","REU","ROU","RUS","RWA","BLM","SHN","KNA","LCA","MAF","SPM","VCT","WSM","SMR","STP","SAU","SEN","SRB","SYC","SLE","SGP","SXM","SVK","SVN","SLB","SOM","ZAF","SGS","SSD","ESP","LKA","SDN","SUR","SJM","SWZ","SWE","CHE","SYR","TWN","TJK","TZA","THA","TLS","TGO","TKL","TON","TTO","TUN","TUR","TKM","TCA","TUV","UGA","UKR","ARE","GBR","USA","UMI","URY","UZB","VUT","VEN","VNM","VGB","VIR","WLF","ESH","YEM","ZMB","ZWE"];e.exports=t.default,e.exports.default=t.default},f7bd:function(e,t,n){e.exports=n("f263")},f94c:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){return(0,o.default)(e),a.fullWidth.test(e)&&i.halfWidth.test(e)};var r,o=(r=n("d076"))&&r.__esModule?r:{default:r},a=n("417b"),i=n("4ef4");e.exports=t.default,e.exports.default=t.default},f966:function(e,t,n){"use strict";var r=n("0b34"),o=n("bb8b"),a=n("26df"),i=n("839a")("species");e.exports=function(e){e=r[e];a&&e&&!e[i]&&o.f(e,i,{configurable:!0,get:function(){return this}})}},fad4:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),t.default=void 0;var r=Me(n("9758")),o=Me(n("6d46")),a=Me(n("2a5d")),i=Me(n("a215")),u=Me(n("e654")),s=Me(n("0259")),l=Me(n("40e7")),c=Me(n("212a")),f=Me(n("2743")),d=Me(n("4760")),p=Me(n("69e5")),m=Me(n("f476")),v=Me(n("7771")),g=Me(n("e247")),h=xe(n("497f")),b=xe(n("473c")),y=Me(n("5114")),_=Me(n("ae75")),x=Me(n("5131")),M=Me(n("a77f")),w=Me(n("7bb0")),O=Me(n("417b")),S=Me(n("4ef4")),A=Me(n("f94c")),E=Me(n("6894")),P=Me(n("d25d")),$=Me(n("c454")),T=xe(n("41ae")),C=Me(n("e2a1")),j=Me(n("bf02")),N=Me(n("b953")),k=Me(n("3410")),F=Me(n("c535")),I=Me(n("3074")),R=Me(n("6698")),L=Me(n("e9a8")),B=Me(n("ab3a")),U=Me(n("a079")),D=Me(n("8569")),G=Me(n("b067")),z=Me(n("dfae")),Z=Me(n("761c")),H=Me(n("c28e")),K=Me(n("a334")),V=Me(n("7a47")),q=Me(n("cc7c")),W=Me(n("e74f")),Y=Me(n("6bac")),J=Me(n("21c4")),X=Me(n("9dba")),Q=xe(n("8598")),ee=Me(n("13d8")),te=Me(n("1ff3")),ne=Me(n("06e2")),re=Me(n("5d25")),oe=Me(n("f63a")),ae=Me(n("54ac")),ie=Me(n("cc96")),ue=Me(n("d592")),se=Me(n("f03a")),le=Me(n("f08d")),ce=Me(n("dd23")),fe=xe(n("d24d")),de=Me(n("1685")),pe=Me(n("11d8")),me=Me(n("f5f8")),ve=Me(n("0cad")),ge=Me(n("c865")),he=Me(n("1103")),be=Me(n("81e7")),ye=Me(n("4b34")),_e=Me(n("ca9d")),n=Me(n("a080"));function xe(e){if(e&&e.__esModule)return e;var t,n={};if(null!=e)for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&((t=Object.defineProperty&&Object.getOwnPropertyDescriptor?Object.getOwnPropertyDescriptor(e,r):{}).get||t.set?Object.defineProperty(n,r,t):n[r]=e[r]);return n.default=e,n}function Me(e){return e&&e.__esModule?e:{default:e}}n={version:"11.1.0",toDate:r.default,toFloat:o.default,toInt:a.default,toBoolean:i.default,equals:u.default,contains:s.default,matches:l.default,isEmail:c.default,isURL:f.default,isMACAddress:d.default,isIP:p.default,isIPRange:m.default,isFQDN:v.default,isBoolean:g.default,isAlpha:h.default,isAlphaLocales:h.locales,isAlphanumeric:b.default,isAlphanumericLocales:b.locales,isNumeric:y.default,isPort:_.default,isLowercase:x.default,isUppercase:M.default,isAscii:w.default,isFullWidth:O.default,isHalfWidth:S.default,isVariableWidth:A.default,isMultibyte:E.default,isSurrogatePair:P.default,isInt:$.default,isFloat:T.default,isFloatLocales:T.locales,isDecimal:C.default,isHexadecimal:j.default,isDivisibleBy:N.default,isHexColor:k.default,isISRC:F.default,isMD5:I.default,isHash:R.default,isJWT:L.default,isJSON:B.default,isEmpty:U.default,isLength:D.default,isByteLength:G.default,isUUID:z.default,isMongoId:Z.default,isAfter:H.default,isBefore:K.default,isIn:V.default,isCreditCard:q.default,isIdentityCard:W.default,isISIN:Y.default,isISBN:J.default,isISSN:X.default,isMobilePhone:Q.default,isMobilePhoneLocales:Q.locales,isPostalCode:fe.default,isPostalCodeLocales:fe.locales,isCurrency:ee.default,isISO8601:te.default,isRFC3339:ne.default,isISO31661Alpha2:re.default,isISO31661Alpha3:oe.default,isBase32:ae.default,isBase64:ie.default,isDataURI:ue.default,isMagnetURI:se.default,isMimeType:le.default,isLatLong:ce.default,ltrim:de.default,rtrim:pe.default,trim:me.default,escape:ve.default,unescape:ge.default,stripLow:he.default,whitelist:be.default,blacklist:ye.default,isWhitelisted:_e.default,normalizeEmail:n.default,toString:toString};t.default=n,e.exports=t.default,e.exports.default=t.default},fb49:function(e,t){e.exports=function(e){try{return{e:!1,v:e()}}catch(e){return{e:!0,v:e}}}}},l.c=f,l.d=function(e,t,n){l.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:n})},l.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},l.t=function(t,e){if(1&e&&(t=l(t)),8&e)return t;if(4&e&&"object"==typeof t&&t&&t.__esModule)return t;var n=Object.create(null);if(l.r(n),Object.defineProperty(n,"default",{enumerable:!0,value:t}),2&e&&"string"!=typeof t)for(var r in t)l.d(n,r,function(e){return t[e]}.bind(null,r));return n},l.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return l.d(t,"a",t),t},l.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},l.p="/",l(l.s=0);function l(e){if(f[e])return f[e].exports;var t=f[e]={i:e,l:!1,exports:{}};return c[e].call(t.exports,t,t.exports,l),t.l=!0,t.exports}var c,f});
from .serializer import CLASS_PATH_KEY, INSTANCE_OR_CALLABLE, MODULE_X_NAME_DELIMITER, Serializer
/* YUI 3.15.0 (build 834026e) Copyright 2014 Yahoo! Inc. All rights reserved. Licensed under the BSD License. http://yuilibrary.com/license/ */ YUI.add("widget-position-constrain",function(e,t){function m(e){}var n="constrain",r="constrain|xyChange",i="constrainChange",s="preventOverlap",o="align",u="",a="bindUI",f="xy",l="x",c="y",h=e.Node,p="viewportRegion",d="region",v;m.ATTRS={constrain:{value:null,setter:"_setConstrain"},preventOverlap:{value:!1}},v=m._PREVENT_OVERLAP={x:{tltr:1,blbr:1,brbl:1,trtl:1},y:{trbr:1,tlbl:1,bltl:1,brtr:1}},m.prototype={initializer:function(){this._posNode||e.error("WidgetPosition needs to be added to the Widget, before WidgetPositionConstrain is added"),e.after(this._bindUIPosConstrained,this,a)},getConstrainedXY:function(e,t){t=t||this.get(n);var r=this._getRegion(t===!0?null:t),i=this._posNode.get(d);return[this._constrain(e[0],l,i,r),this._constrain(e[1],c,i,r)]},constrain:function(e,t){var r,i,s=t||this.get(n);s&&(r=e||this.get(f),i=this.getConstrainedXY(r,s),(i[0]!==r[0]||i[1]!==r[1])&&this.set(f,i,{constrained:!0}))},_setConstrain:function(e){return e===!0?e:h.one(e)},_constrain:function(e,t,n,r){if(r){this.get(s)&&(e=this._preventOverlap(e,t,n,r));var i=t==l,o=i?r.width:r.height,u=i?n.width:n.height,a=i?r.left:r.top,f=i?r.right-u:r.bottom-u;if(e<a||e>f)u<o?e<a?e=a:e>f&&(e=f):e=a}return e},_preventOverlap:function(e,t,n,r){var i=this.get(o),s=t===l,a,f,c,h,p,d;return i&&i.points&&v[t][i.points.join(u)]&&(f=this._getRegion(i.node),f&&(a=s?n.width:n.height,c=s?f.left:f.top,h=s?f.right:f.bottom,p=s?f.left-r.left:f.top-r.top,d=s?r.right-f.right:r.bottom-f.bottom),e>c?d<a&&p>a&&(e=c-a):p<a&&d>a&&(e=h)),e},_bindUIPosConstrained:function(){this.after(i,this._afterConstrainChange),this._enableConstraints(this.get(n))},_afterConstrainChange:function(e){this._enableConstraints(e.newVal)},_enableConstraints:function(e){e?(this.constrain(),this._cxyHandle=this._cxyHandle||this.on(r,this._constrainOnXYChange)):this._cxyHandle&&(this._cxyHandle.detach(),this._cxyHandle=null)},_constrainOnXYChange:function(e){e.constrained||(e.newVal=this.getConstrainedXY(e.newVal))},_getRegion:function(e){var t;return e?(e=h.one(e),e&&(t=e.get(d))):t=this._posNode.get(p),t}},e.WidgetPositionConstrain=m},"3.15.0",{requires:["widget-position"]});
from typing import Union from allennlp.nn.util import tiny_value_of_dtype import torch from overrides import overrides from interlens.modules.vector_similarities.vector_similarity import VectorSimilarity @VectorSimilarity.register('minus_p_distance') class MinusDistanceVectorSimilarity(VectorSimilarity): """ This `VectorSimilarity` is a `Module` that ... """ def __init__(self, p: Union[float, int] = 2, use_raised_distance: bool = True,) -> None: super().__init__() self.p = p self.use_raised_distance = use_raised_distance @overrides def forward(self, vec_0: torch.FloatTensor, vec_1: torch.FloatTensor,) -> torch.FloatTensor: # dist = torch.cdist(vec1, vec2, p=self.p) # return torch.neg(dist) vec_0 = torch.unsqueeze(vec_0, 1) vec_1 = torch.unsqueeze(vec_1, 0) dist_raised = torch.sum(torch.pow(vec_0 - vec_1, self.p), -1) if self.use_raised_distance: return torch.neg(dist_raised) else: return torch.neg(torch.pow(dist_raised, torch.true_divide(1, self.p)))
import sys def basic(): import api critic = api.critic.startSession() alice = api.user.fetch(critic, name="alice") bob = api.user.fetch(critic, name="bob") carol = api.user.fetch(critic, name="carol") dave = api.user.fetch(critic, name="dave") erin = api.user.fetch(critic, name="erin") felix = api.user.fetch(critic, name="felix") howard = api.user.fetch(critic, name="howard") gina = api.user.fetch(critic, name="gina") iris = api.user.fetch(critic, name="iris") admin = api.user.fetch(critic, name="admin") extra = api.user.fetch(critic, name="extra") all_users = [admin, alice, bob, dave, erin, howard, carol, felix, gina, iris, extra] assert isinstance(alice, api.user.User) assert isinstance(alice.id, int) assert int(alice) == alice.id assert hash(alice) == hash(alice.id) assert alice == alice.id assert alice.id == alice assert alice.name == "alice" assert alice.fullname == "Alice von Testing" assert alice.status == "current" assert alice.email == "alice@example.org" assert alice.is_anonymous is False assert isinstance(alice.primary_emails, list) assert len(alice.primary_emails) == 1 assert isinstance(alice.primary_emails[0], api.user.User.PrimaryEmail) assert alice.primary_emails[0].address == "alice@example.org" assert alice.primary_emails[0].selected is True assert alice.primary_emails[0].verified is None assert isinstance(alice.git_emails, set) if len(alice.git_emails) == 0: assert "--unreliable-git-emails" in sys.argv else: assert len(alice.git_emails) == 2 assert "alice@example.org" in alice.git_emails assert "common@example.org" in alice.git_emails assert isinstance(alice.repository_filters, dict) assert len(alice.repository_filters) == 1 repository, filters = alice.repository_filters.items()[0] assert isinstance(repository, api.repository.Repository) assert repository.name == "critic" assert len(filters) == 1 assert isinstance(filters[0], api.filters.RepositoryFilter) assert filters[0].subject is alice assert filters[0].type == "reviewer" assert filters[0].path == "028-gitemails/" assert isinstance(filters[0].id, int) assert filters[0].repository is repository assert isinstance(filters[0].delegates, frozenset) assert all(isinstance(delegate, api.user.User) for delegate in filters[0].delegates) assert erin in filters[0].delegates assert not (alice == bob) assert alice != bob try: api.user.fetch(alice, user_id=alice.id) except AssertionError: pass else: assert False try: api.user.fetch(critic) except AssertionError: pass else: assert False try: api.user.fetch(critic, user_id=alice.id, name=alice.name) except AssertionError: pass else: assert False try: api.user.fetch(critic, user_id="foo") except ValueError: pass else: assert False try: api.user.fetch(critic, user_id=4711) except api.user.InvalidUserId as error: assert error.message == "Invalid user id: %r" % 4711 assert error.value == 4711 else: assert False try: api.user.fetch(critic, name="nobody") except api.user.InvalidUserName as error: assert error.message == "Invalid user name: %r" % "nobody" assert error.value == "nobody" else: assert False try: api.user.fetchMany(alice, user_ids=[alice.id]) except AssertionError: pass else: assert False try: api.user.fetchMany(critic, user_ids=[alice.id], names=[alice.name]) except AssertionError: pass else: assert False try: api.user.fetchMany(critic, user_ids=[4711, 4712]) except api.user.InvalidUserIds as error: assert error.message == "Invalid user ids: %r" % [4711, 4712], error.message assert error.values == [4711, 4712], repr(error.values) else: assert False try: api.user.fetchMany(critic, names=["nobody", "anybody"]) except api.user.InvalidUserNames as error: assert error.message == "Invalid user names: %r" % ["nobody", "anybody"], error.message assert error.values == ["nobody", "anybody"], repr(error.values) else: assert False alice_bob_and_dave = api.user.fetchMany( critic, user_ids=[alice.id, bob.id, dave.id]) assert isinstance(alice_bob_and_dave, list), type(alice_bob_and_dave) assert alice_bob_and_dave == [alice, bob, dave], repr(alice_bob_and_dave) alice_bob_and_dave = api.user.fetchMany( critic, names=[alice.name, bob.name, dave.name]) assert isinstance(alice_bob_and_dave, list), type(alice_bob_and_dave) assert alice_bob_and_dave == [alice, bob, dave], repr(alice_bob_and_dave) alice_bob_and_dave = api.user.fetchMany( critic, user_ids=set([alice.id, bob.id, dave.id])) assert isinstance(alice_bob_and_dave, set), type(alice_bob_and_dave) assert alice_bob_and_dave == set([alice, bob, dave]), repr(alice_bob_and_dave) alice_bob_and_dave = api.user.fetchMany( critic, names=set([alice.name, bob.name, dave.name])) assert isinstance(alice_bob_and_dave, set), type(alice_bob_and_dave) assert alice_bob_and_dave == set([alice, bob, dave]), repr(alice_bob_and_dave) alice_bob_and_dave = api.user.fetchMany( critic, user_ids=(user.id for user in [alice, bob, dave])) assert isinstance(alice_bob_and_dave, list), type(alice_bob_and_dave) assert alice_bob_and_dave == [alice, bob, dave], repr(alice_bob_and_dave) alice_bob_and_dave = api.user.fetchMany( critic, names=(user.name for user in [alice, bob, dave])) assert isinstance(alice_bob_and_dave, list), type(alice_bob_and_dave) assert alice_bob_and_dave == [alice, bob, dave], repr(alice_bob_and_dave) users = api.user.fetchAll(critic) assert isinstance(users, list) assert users == sorted(all_users, key=lambda user: user.id) users = api.user.fetchAll(critic, status="current") assert isinstance(users, list) assert users == sorted([user for user in all_users if user.status == "current"], key=lambda user: user.id) users = api.user.fetchAll(critic, status=["current", "absent"]) assert isinstance(users, list) assert users == sorted([user for user in all_users if user.status in ("current", "absent")], key=lambda user: user.id) users = api.user.fetchAll(critic, status=["retired", "absent"]) assert isinstance(users, list) assert users == sorted([user for user in all_users if user.status in ("retired", "absent")], key=lambda user: user.id) users = api.user.fetchAll(critic, status=["absent"]) assert isinstance(users, list) assert users == [] users = api.user.fetchAll( critic, status=(status for status in ["current", "absent"])) assert isinstance(users, list) assert users == sorted([user for user in all_users if user.status in ("current", "absent")], key=lambda user: user.id) assert alice.hasRole("administrator") is False assert alice.hasRole("repositories") is False assert alice.hasRole("newswriter") is False assert alice.hasRole("developer") is False assert admin.hasRole("administrator") is True assert admin.hasRole("repositories") is True if "--unreliable-admin-newswriter" in sys.argv: assert isinstance(admin.hasRole("newswriter"), bool) else: assert admin.hasRole("newswriter") is True assert admin.hasRole("developer") is True try: alice.hasRole("crazy-cat-lady") except api.user.InvalidRole as error: assert error.message == "Invalid role: %r" % "crazy-cat-lady", error.message assert error.role == "crazy-cat-lady", error.role else: assert False anonymous = api.user.anonymous(critic) assert isinstance(anonymous, api.user.User) assert anonymous.id is None assert anonymous.name is None assert anonymous.fullname is None assert anonymous.is_anonymous is True assert anonymous.email is None assert anonymous.primary_emails == [] assert anonymous.git_emails == set([]) assert anonymous.repository_filters == {} def preferences(): import api critic = api.critic.startSession() alice = api.user.fetch(critic, name="alice") repository = api.repository.fetch(critic, name="critic") compactMode = alice.getPreference("commit.diff.compactMode") assert isinstance(compactMode.value, bool) assert compactMode.item == "commit.diff.compactMode" assert compactMode.value is True assert compactMode.user is None assert compactMode.repository is None rulerColumn = alice.getPreference("commit.diff.rulerColumn") assert isinstance(rulerColumn.value, int) assert rulerColumn.item == "commit.diff.rulerColumn" assert rulerColumn.value == 0 assert rulerColumn.user is None assert rulerColumn.repository is None defaultGroups = alice.getPreference("dashboard.defaultGroups") assert isinstance(defaultGroups.value, str) assert defaultGroups.item == "dashboard.defaultGroups" assert defaultGroups.value == "owned,draft,active,watched" assert defaultGroups.user is None assert defaultGroups.repository is None # Read per-repository, not overridden. compactMode = alice.getPreference("commit.diff.compactMode", repository=repository) assert compactMode.item == "commit.diff.compactMode" assert compactMode.value is True assert compactMode.user is None assert compactMode.repository is None # Read per-user, overridden per user. visualTabs = alice.getPreference("commit.diff.visualTabs") assert visualTabs.value is True assert visualTabs.user is alice assert visualTabs.repository is None # Read per-repository, overridden per user. visualTabs = alice.getPreference("commit.diff.visualTabs", repository=repository) assert visualTabs.value is True assert visualTabs.user is alice assert visualTabs.repository is None # Read per-user, overridden per repository. expandAllFiles = alice.getPreference("commit.expandAllFiles") assert expandAllFiles.value is False assert expandAllFiles.user is None assert expandAllFiles.repository is None # Read per-repository, overridden per repository. expandAllFiles = alice.getPreference("commit.expandAllFiles", repository=repository) assert expandAllFiles.value is True assert expandAllFiles.user is alice assert expandAllFiles.repository is repository if __name__ == "__main__": import coverage if "basic" in sys.argv[1:]: coverage.call("unittest", basic) if "preferences" in sys.argv[1:]: coverage.call("unittest", preferences)
import tarfile import urllib.request import zipfile from os import makedirs, remove from os.path import basename, exists, join import requests from torchvision.transforms import transforms from CONFIG import * from dataset import DatasetFromFolder __all__ = [ 'download_bsd300', 'download_file_from_google_drive', 'get_confirm_token', 'save_response_content', 'download_nasa_apod', 'calculate_valid_size', 'input_transform', 'target_transform', 'get_train_set', 'get_test_set', ] def download_bsd300(dest='./dataset'): output_image_dir = join(dest, 'BSDS300/images') url = 'http://www2.eecs.berkeley.edu/Research/Projects/CS/vision/bsds/BSDS300-images.tgz' if not exists(dest): makedirs(dest) if not exists(output_image_dir): print('downloading url...', url) data = urllib.request.urlopen(url) filepath = join(dest, basename(url)) with open(filepath, 'wb') as f: f.write(data.read()) print('extracting data...') with tarfile.open(filepath) as tar: for item in tar: tar.extract(item, dest) remove(filepath) return output_image_dir def download_file_from_google_drive(id, destination): url = "https://docs.google.com/uc?export=download" session = requests.Session() response = session.get(url, params = { 'id' : id }, stream = True) token = get_confirm_token(response) if token: params = { 'id' : id, 'confirm' : token } response = session.get(url, params = params, stream = True) save_response_content(response, destination) def get_confirm_token(response): for key, value in response.cookies.items(): if key.startswith('download_warning'): return value return None def save_response_content(response, destination): chunk_size = 32768 with open(destination, "wb") as f: for chunk in response.iter_content(chunk_size): if chunk: # filter out keep-alive new chunks f.write(chunk) def download_nasa_apod(dest='./dataset'): output_image_dir = join(dest, 'NASA/images') file_id = '19a36iKIcQZRgYWyQXN7UGf3dbBx8VI9E' if not exists(dest): makedirs(dest) if not exists(output_image_dir): filepath = join(dest, 'images.zip') print('downloading file from google drive...', file_id) download_file_from_google_drive(file_id, filepath) print('extracting data...') with zipfile.ZipFile(filepath, 'r') as zip_ref: zip_ref.extractall(join(dest, 'NASA/')) remove(filepath) return output_image_dir def calculate_valid_size(size, upscale_factor): return size - (size % upscale_factor) def input_transform(h, w, upscale_factor): return transforms.Compose([ transforms.CenterCrop((h, w)), transforms.Resize((h // upscale_factor, w // upscale_factor)), transforms.ToTensor(), ]) def target_transform(h, w): return transforms.Compose([ transforms.CenterCrop((h, w)), transforms.ToTensor(), ]) def get_train_set(h=IMAGE_HEIGHT, w=IMAGE_WIDTH, download=download_bsd300, upscale_factor=None): h = calculate_valid_size(h, upscale_factor) w = calculate_valid_size(w, upscale_factor) return DatasetFromFolder( join(download(), 'train'), input_transfrom=input_transform(h, w, upscale_factor), target_transform=target_transform(h, w), ) def get_test_set(h=IMAGE_HEIGHT, w=IMAGE_WIDTH, download=download_bsd300, upscale_factor=None): h = calculate_valid_size(h, upscale_factor) w = calculate_valid_size(w, upscale_factor) return DatasetFromFolder( join(download(), 'test'), input_transfrom=input_transform(h, w, upscale_factor), target_transform=target_transform(h, w), ) if __name__ == '__main__': print('BSD300 Dataset downloaded to', download_bsd300()) print('NASA APOD Dataset downloaded to', download_nasa_apod())
########################################################################### # # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ########################################################################### import json #from random import choice class Colab: def __init__(self, name, version='4.0'): self.markdown_lines = [] self.code_lines = [] self.colab = { 'license': 'Apache License, Version 2.0', 'copyright': 'Copyright 2020 Google LLC', 'nbformat': version.split('.', 1)[0], 'nbformat_minor': version.split('.', 1)[1], 'metadata': { 'colab': { 'name': name, 'provenance': [], 'collapsed_sections': [], 'toc_visible': True }, 'kernelspec': { 'name': 'python3', 'display_name': 'Python 3' } }, 'cells': [] } def _code(self): if self.code_lines: self.colab['cells'].append({ 'cell_type': 'code', 'metadata': { #"id": ''.join([choice('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789') for i in range(12)]), 'colab_type': 'code' }, 'source': self.code_lines }) self.code_lines = [] def _markdown(self): if self.markdown_lines: self.colab['cells'].append({ 'cell_type': 'markdown', 'metadata': { #"id": ''.join([choice('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789') for i in range(12)]), 'colab_type': 'text' }, 'source': self.markdown_lines }) self.markdown_lines = [] def code(self, code): self._markdown() self.code_lines.extend(['%s\n' % c for c in code.split('\n')]) def header(self, text, level=1, indent=0): self._code() self.markdown_lines.append('%s%s%s\n' % ('>' * indent, '#' * level, text)) def paragraph(self, text, indent=0): self._code() self.markdown_lines.extend( ['%s%s\n' % ('>' * indent, t) for t in text.split('\n')]) def image(self, name, link): self._code() self.markdown_lines.append('![%s](%s)\n' % (name, link)) def list(self, items, ordered=True, indent=0): self._code() self.markdown_lines.extend([ '%s %s %s\n' % ('>' * indent, '1.' if ordered else '*', t) for t in items ]) def render(self): self._code() self._markdown() return json.dumps(self.colab, indent=2)
# -*- coding: utf-8 -*- # Generated by Django 1.10.2 on 2016-10-29 20:52 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('gluu_license', '0006_license_is_active'), ] operations = [ migrations.AlterField( model_name='license', name='account', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='licenses', to='account.Account'), ), ]
import React from "react"; const CartContext = React.createContext({ items: [], totalQuantity: 0, totalAmount: 0, addItem: (item) => {}, removeItem: (id) => {}, }); export default CartContext;
from django.db import models class ArticleModel(models.Model): author = models.CharField(verbose_name='作者', max_length=32, default='shenxgan') title = models.CharField(verbose_name='标题', max_length=128) content = models.TextField(verbose_name='内容') tags = models.CharField(verbose_name='标签', max_length=64, help_text='多个标签请使用英文逗号分隔') view_cnt = models.IntegerField(verbose_name='阅读次数', default=0) like_cnt = models.IntegerField(verbose_name='点赞次数', default=0) is_publish = models.BooleanField(verbose_name='是否发布', default=False) order = models.IntegerField(verbose_name='菜单中的顺序', default=99) create_at = models.DateTimeField(verbose_name='创建时间', auto_now_add=True) # publish_at = models.DateTimeField(verbose_name='发布时间', null=True, blank=True) update_at = models.DateTimeField(verbose_name='更新时间') class Meta: db_table = 'blog_article' verbose_name = '博客文章' verbose_name_plural = '博客文章' # def save(self, *args, **kwargs): # '''文章排序 order 值默认赋值为 id''' # if not self.order: # self.order = self.id # super().save(*args, **kwargs) class ArticleCommentModel(models.Model): article_id = models.IntegerField(verbose_name='文章 ID', db_index=True) username = models.CharField(verbose_name='用户名', max_length=32) content = models.TextField(verbose_name='内容') create_at = models.DateTimeField(verbose_name='创建时间', auto_now_add=True) class Meta: db_table = 'blog_article_comment' verbose_name = '博客文章评论' verbose_name_plural = '博客文章评论'
import matplotlib.pyplot as plt import matplotlib.animation as animation from matplotlib.patches import Rectangle from matplotlib.transforms import Bbox, TransformedBbox from matplotlib.figure import Figure from matplotlib.text import Text, Annotation # https://matplotlib.org/stable/api/text_api.html fig: Figure fig, ax = plt.subplots() ax.set_facecolor(color='white') ax.plot(1, 1, 'o', color='black', markersize=1) ax.plot(1000, 1000, 'o', color='white', linewidth=2, markersize=1) font_dict = {'family':'serif', 'color':'darkred', 'size':15} ax.annotate('local max', xy=(3, 1), xycoords='data', xytext=(0.8, 0.95), textcoords='axes fraction', arrowprops=dict(facecolor='black', shrink=0.05), horizontalalignment='right', verticalalignment='top' ) txt = ax.text(200, 100,'Text Example', fontdict=font_dict, ) # annotation = Annotation( # '', xy=(0.0, 50.0), xytext=(50.0, 50.0), xycoords='figure pixels', # arrowprops={ # 'facecolor': 'white', 'width': 108, 'headwidth': 10, 'shrink': 0.0}) # annotation.set_figure(fig) # annotation.draw(renderer) plt.show()
/** * @depends {nrs.js} */ var NRS = (function(NRS, $, undefined) { NRS.showConsole = function() { NRS.console = window.open("", "console", "width=750,height=400,menubar=no,scrollbars=yes,status=no,toolbar=no,resizable=yes"); $(NRS.console.document.head).html("<title>" + $.t("console") + "</title><style type='text/css'>body { background:black; color:white; font-family:courier-new,courier;font-size:14px; } pre { font-size:14px; } #console { padding-top:15px; }</style>"); $(NRS.console.document.body).html("<div style='position:fixed;top:0;left:0;right:0;padding:5px;background:#efefef;color:black;'>" + $.t("console_opened") + "<div style='float:right;text-decoration:underline;color:blue;font-weight:bold;cursor:pointer;' onclick='document.getElementById(\"console\").innerHTML=\"\"'>clear</div></div><div id='console'></div>"); } NRS.addToConsole = function(url, type, data, response, error) { if (!NRS.console) { return; } if (!NRS.console.document || !NRS.console.document.body) { NRS.console = null; return; } url = url.replace(/&random=[\.\d]+/, "", url); NRS.addToConsoleBody(url + " (" + type + ") " + new Date().toString(), "url"); if (data) { if (typeof data == "string") { var d = NRS.queryStringToObject(data); NRS.addToConsoleBody(JSON.stringify(d, null, "\t"), "post"); } else { NRS.addToConsoleBody(JSON.stringify(data, null, "\t"), "post"); } } if (error) { NRS.addToConsoleBody(response, "error"); } else { NRS.addToConsoleBody(JSON.stringify(response, null, "\t"), (response.errorCode ? "error" : "")); } } NRS.addToConsoleBody = function(text, type) { var color = ""; switch (type) { case "url": color = "#29FD2F"; break; case "post": color = "lightgray"; break; case "error": color = "red"; break; } $(NRS.console.document.body).find("#console").append("<pre" + (color ? " style='color:" + color + "'" : "") + ">" + text.escapeHTML() + "</pre>"); } NRS.queryStringToObject = function(qs) { qs = qs.split("&"); if (!qs) { return {}; } var obj = {}; for (var i = 0; i < qs.length; ++i) { var p = qs[i].split('='); if (p.length != 2) { continue; } obj[p[0]] = decodeURIComponent(p[1].replace(/\+/g, " ")); } if ("secretPhrase" in obj) { obj.secretPhrase = "***"; } return obj; } return NRS; }(NRS || {}, jQuery));
#!/usr/bin/env python # coding: utf-8 # # BME590 Unet++ # In[1]: # Import Packages import os import sys import random import warnings import numpy as np import pandas as pd import matplotlib.pyplot as plt from tqdm import tqdm from itertools import chain from skimage.io import imread, imshow, imread_collection, concatenate_images from skimage.transform import resize from skimage.morphology import label import tensorflow as tf from tensorflow.python.keras.models import Model, load_model from tensorflow.python.keras.layers import Input from tensorflow.python.keras.layers.core import Dropout, Lambda from tensorflow.python.keras.layers.convolutional import Conv2D, Conv2DTranspose from tensorflow.python.keras.layers.pooling import MaxPooling2D from tensorflow.python.keras.layers.merge import concatenate from tensorflow.python.keras.callbacks import EarlyStopping, ModelCheckpoint from tensorflow.python.keras import backend as K from tensorflow.python.keras import optimizers #Resize the image to 96*96 IMG_WIDTH = 96 IMG_HEIGHT = 96 IMG_CHANNELS = 3 TRAIN_PATH = './stage1_train/' TEST_PATH = './stage1_test/' warnings.filterwarnings('ignore', category=UserWarning, module='skimage') seed = 42 random.seed = seed np.random.seed = seed # In[2]: #Get Image ID train_ids = next(os.walk(TRAIN_PATH))[1] test_ids = next(os.walk(TEST_PATH))[1] # In[3]: # Get train images and masks X_train = np.zeros((len(train_ids), IMG_HEIGHT, IMG_WIDTH, IMG_CHANNELS), dtype=np.uint8) Y_train = np.zeros((len(train_ids), IMG_HEIGHT, IMG_WIDTH, 1), dtype=np.bool) print('Getting and resizing train images and masks ... ') sys.stdout.flush() for n, id_ in tqdm(enumerate(train_ids), total=len(train_ids)): path = TRAIN_PATH + id_ img = imread(path + '/images/' + id_ + '.png')[:,:,:IMG_CHANNELS] img = resize(img, (IMG_HEIGHT, IMG_WIDTH), mode='constant', preserve_range=True) X_train[n] = img mask = np.zeros((IMG_HEIGHT, IMG_WIDTH, 1), dtype=np.bool) for mask_file in next(os.walk(path + '/masks/'))[2]: mask_ = imread(path + '/masks/' + mask_file) mask_ = np.expand_dims(resize(mask_, (IMG_HEIGHT, IMG_WIDTH), mode='constant', preserve_range=True), axis=-1) mask = np.maximum(mask, mask_) Y_train[n] = mask X_test = np.zeros((len(test_ids), IMG_HEIGHT, IMG_WIDTH, IMG_CHANNELS), dtype=np.uint8) sizes_test = [] print('Getting and resizing test images ... ') sys.stdout.flush() for n, id_ in tqdm(enumerate(test_ids), total=len(test_ids)): path = TEST_PATH + id_ img = imread(path + '/images/' + id_ + '.png')[:,:,:IMG_CHANNELS] sizes_test.append([img.shape[0], img.shape[1]]) img = resize(img, (IMG_HEIGHT, IMG_WIDTH), mode='constant', preserve_range=True) X_test[n] = img # In[4]: #Check train image and train mask ix = random.randint(0, len(train_ids)) imshow(X_train[ix]) plt.show() imshow(np.squeeze(Y_train[ix])) plt.show() # In[5]: test_X = X_train[0:67] test_Y = Y_train[0:67] # In[6]: X_train = X_train[67:] Y_train = Y_train[67:] # In[7]: #Define Intersection over Union (IoU) def get_iou_vector(A, B): batch_size = A.shape[0] metric = 0.0 for batch in range(batch_size): t, p = A[batch], B[batch] true = np.sum(t) pred = np.sum(p) if true == 0: metric += (pred == 0) continue intersection = np.sum(t * p) union = true + pred - intersection iou = intersection / union iou = np.floor(max(0, (iou - 0.45)*20)) / 10 metric += iou metric /= batch_size return metric def my_iou_metric(label, pred): return tf.compat.v1.py_func(get_iou_vector, [label, pred > 0.5], tf.float64) # In[8]: #Define BC Dice Loss Function def dice_coef(y_true, y_pred): y_true_f = K.flatten(K.cast(y_true, 'float32')) y_pred_f = K.flatten(K.cast(y_pred, 'float32')) intersection = K.sum(y_true_f * y_pred_f) return (2. * intersection + 1.) / (K.sum(y_true_f) + K.sum(y_pred_f) + 1.) def bc_dice_loss(y_true, y_pred): return 0.5 * tf.python.keras.losses.binary_crossentropy(y_true, y_pred) - dice_coef(y_true, y_pred) # In[9]: #Set parameters dropout_rate = 0.1 activation = "elu" def conv_block(input_tensor, num_of_channels, kernel_size=3): x = Conv2D(num_of_channels, (kernel_size, kernel_size), activation=activation, kernel_initializer = 'he_normal', padding='same' )(input_tensor) x = Dropout(dropout_rate)(x) x = Conv2D(num_of_channels, (kernel_size, kernel_size), activation=activation, kernel_initializer = 'he_normal', padding='same')(x) x = Dropout(dropout_rate)(x) return x # In[10]: #Build and train our neural network inputs = Input((IMG_HEIGHT, IMG_WIDTH, IMG_CHANNELS)) s = Lambda(lambda x: x / 255) (inputs) c1 = Conv2D(32, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same') (s) c1 = Dropout(0.1) (c1) c1 = Conv2D(32, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same') (c1) p1 = MaxPooling2D((2, 2)) (c1) c2 = Conv2D(64, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same') (p1) c2 = Dropout(0.1) (c2) c2 = Conv2D(64, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same') (c2) p2 = MaxPooling2D((2, 2)) (c2) up1_2 = Conv2DTranspose(32,(2,2),strides=(2,2),padding='same')(c2) conv1_2 = concatenate([up1_2,c1],axis=3) conv1_2 = conv_block(conv1_2, num_of_channels=32) c3 = Conv2D(128, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same') (p2) c3 = Dropout(0.1) (c3) c3 = Conv2D(128, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same') (c3) p3 = MaxPooling2D((2, 2)) (c3) up2_2 = Conv2DTranspose(64, (2, 2), strides=(2, 2), padding='same')(c3) conv2_2 = concatenate([up2_2, c2], axis=3) conv2_2 = conv_block(conv2_2, num_of_channels=64) up1_3 = Conv2DTranspose(32, (2, 2), strides=(2, 2), padding='same')(conv2_2) conv1_3 = concatenate([up1_3, c1, conv1_2], axis=3) conv1_3 = conv_block(conv1_3, num_of_channels=32) c4 = Conv2D(256, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same') (p3) c4 = Dropout(0.1) (c4) c4 = Conv2D(256, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same') (c4) p4 = MaxPooling2D(pool_size=(2, 2)) (c4) up3_2 = Conv2DTranspose(128, (2, 2), strides=(2, 2), padding='same')(c4) conv3_2 = concatenate([up3_2, c3], axis=3) conv3_2 = conv_block(conv3_2, num_of_channels=128) up2_3 = Conv2DTranspose(64, (2, 2), strides=(2, 2), padding='same')(conv3_2) conv2_3 = concatenate([up2_3, c2, conv2_2], axis=3) conv2_3 = conv_block(conv2_3, num_of_channels=64) up1_4 = Conv2DTranspose(32, (2, 2), strides=(2, 2), padding='same')(conv2_3) conv1_4 = concatenate([up1_4, c1, conv1_2, conv1_3], axis=3) conv1_4 = conv_block(conv1_4, num_of_channels=32) c5 = Conv2D(512, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same') (p4) c5 = Dropout(0.1) (c5) c5 = Conv2D(512, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same') (c5) up4_2 = Conv2DTranspose(256, (2, 2), strides=(2, 2), padding='same')(c5) conv4_2 = concatenate([up4_2, c4], axis=3) conv4_2 = conv_block(conv4_2, num_of_channels=256) up3_3 = Conv2DTranspose(128, (2, 2), strides=(2, 2), padding='same')(conv4_2) conv3_3 = concatenate([up3_3, c3, conv3_2], axis=3) conv3_3 = conv_block(conv3_3, num_of_channels=128) up2_4 = Conv2DTranspose(64, (2, 2), strides=(2, 2), padding='same')(conv3_3) conv2_4 = concatenate([up2_4, c2, conv2_2, conv2_3], axis=3) conv2_4 = conv_block(conv2_4, num_of_channels=64) up1_5 = Conv2DTranspose(32, (2, 2), strides=(2, 2), padding='same')(conv2_4) conv1_5 = concatenate([up1_5, c1, conv1_2, conv1_3, conv1_4], axis=3) conv1_5 = conv_block(conv1_5, num_of_channels=32) nestnet_output = Conv2D(1, (1, 1), activation ='sigmoid', kernel_initializer= 'he_normal', padding='same')(conv1_5) model = Model(inputs=[inputs], outputs=[nestnet_output]) model.compile(optimizer='adam', loss=bc_dice_loss,metrics=[my_iou_metric],lr = 3e-4) model.summary() # In[11]: #Fit model earlystopper = EarlyStopping(patience=10, verbose=1) checkpointer = ModelCheckpoint('Unet++.h5', verbose=1, save_best_only=True) results = model.fit(X_train, Y_train, batch_size=16, epochs=50, callbacks=[earlystopper, checkpointer]) # In[12]: # Predict on train, and test model = load_model('Unet++.h5', custom_objects={'my_iou_metric': my_iou_metric,'bc_dice_loss':bc_dice_loss}) preds_train = model.predict(X_train, verbose=1) preds_test = model.predict(X_test, verbose=1) preds_train_t = (preds_train > 0.5).astype(np.uint8) preds_test_t = (preds_test > 0.5).astype(np.uint8) # In[13]: preds_test_in_train = model.predict(test_X, verbose=1) preds_test_t_in_train = (preds_test_in_train > 0.5).astype(np.uint8) # In[14]: m = tf.keras.metrics.MeanIoU(num_classes=2) m.update_state(preds_test_t_in_train, test_Y) print('Unet++ Final result on test set: ', m.result().numpy()) # In[15]: #Check train ix = 10 imshow(X_train[ix]) plt.show() imshow(np.squeeze(Y_train[ix])) plt.show() imshow(np.squeeze(preds_train_t[ix]),cmap = 'gray') plt.show() # In[16]: #Check test ix = 11 imshow(X_test[ix]) plt.show() imshow(np.squeeze(preds_test_t[ix]),cmap = 'gray') plt.show() # In[17]: #Check test ix = 12 imshow(X_test[ix]) plt.show() imshow(np.squeeze(preds_test_t[ix]),cmap = 'gray') plt.show() # In[18]: #Check test ix = 13 imshow(X_test[ix]) plt.show() imshow(np.squeeze(preds_test_t[ix]),cmap = 'gray') plt.show() # Since the original test set in the dataset has no mask, we cannot calculate the iou. I use 10% data in train set to calculate the iou, and I use real data in test set to plot these figures.
function validateFileType(event){ var file=$('#profile_photo'); var fileName = document.getElementById("profile_photo").value; var idxDot = fileName.lastIndexOf(".") + 1; var extFile = fileName.substr(idxDot, fileName.length).toLowerCase(); if (extFile=="jpg" || extFile=="jpeg" || extFile=="png" ||extFile=="jfif"){ $('#profile_preview_image').attr('src',URL.createObjectURL(event.target.files[0])); }else{ $('#profile_photo').value=""; alert("Only jpg/jpeg and png files are allowed."); document.getElementById("profile_photo").value=""; } } function validateCSVFileType(event,upload_file_id){ var element=document.getElementById(upload_file_id); var fileName = document.getElementById(upload_file_id).value; var idxDot = fileName.lastIndexOf(".") + 1; var extFile = fileName.substr(idxDot, fileName.length).toLowerCase(); if (extFile=="csv" ){ // alert('filesize='+element.files[0].size); if (element.files[0].size > 2097152) { alert("Try to upload csv file less than 2MB!"); document.getElementById(upload_file_id).value=""; } }else{ alert("Only CSV files are allowed."); document.getElementById(upload_file_id).value=""; } } //fade out message setTimeout(function() { $('#success-msg').fadeOut('fast'); }, 3000); //fade out message setTimeout(function() { $('#fail-msg').fadeOut('fast'); }, 3000);
# model settings model = dict( type='CenterNet', pretrained='./pretrain/darknet53.pth', backbone=dict( type='DarknetV3', layers=[1, 2, 8, 8, 4], inplanes=[3, 32, 64, 128, 256, 512], planes=[32, 64, 128, 256, 512, 1024], norm_cfg=dict(type='BN'), out_indices=(1, 2, 3, 4), frozen_stages=1, norm_eval=False), neck=dict(type='None'), bbox_head=dict( type='CXTHead', inplanes=(128, 256, 512, 1024), head_conv=128, wh_conv=64, use_deconv=False, norm_after_upsample=False, hm_head_conv_num=2, wh_head_conv_num=2, ct_head_conv_num=1, fovea_hm=False, num_classes=81, use_exp_wh=False, wh_offset_base=16, wh_agnostic=True, wh_heatmap=True, shortcut_cfg=(1, 2, 3), shortcut_attention=(False, False, False), norm_cfg=dict(type='BN'), norm_wh=False, hm_center_ratio=0.27, center_ratio=0.01, hm_init_value=None, giou_weight=5., merge_weight=1., hm_weight=1., ct_weight=1.)) cudnn_benchmark = True # training and testing settings train_cfg = dict( vis_every_n_iters=100, debug=False) test_cfg = dict( score_thr=0.01, max_per_img=100) # dataset settings dataset_type = 'CocoDataset' data_root = 'data/coco/' img_norm_cfg = dict( mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) data = dict( imgs_per_gpu=12, workers_per_gpu=4, train=dict( type=dataset_type, ann_file=data_root + 'annotations/instances_train2017.json', img_prefix=data_root + 'train2017/', pipeline=train_pipeline), val=dict( type=dataset_type, ann_file=data_root + 'annotations/instances_val2017.json', img_prefix=data_root + 'val2017/', pipeline=test_pipeline), test=dict( type=dataset_type, ann_file=data_root + 'annotations/instances_val2017.json', img_prefix=data_root + 'val2017/', pipeline=test_pipeline)) # optimizer optimizer = dict(type='SGD', lr=0.001, momentum=0.9, weight_decay=0.0004, paramwise_options=dict(bias_lr_mult=2., bias_decay_mult=0.)) optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) # learning policy lr_config = dict( policy='step', warmup='linear', warmup_iters=500, warmup_ratio=1.0 / 5, step=[9, 11]) checkpoint_config = dict(save_every_n_steps=200, max_to_keep=1, keep_every_n_epochs=9) bbox_head_hist_config = dict( model_type=['ConvModule', 'DeformConvPack'], sub_modules=['bbox_head'], save_every_n_steps=200) # yapf:disable log_config = dict(interval=20) # yapf:enable # runtime settings total_epochs = 12 dist_params = dict(backend='nccl') log_level = 'INFO' work_dir = 'ttf53_whh_beta001_1lr_log_1x' load_from = None resume_from = None workflow = [('train', 1)]
# -*- coding: utf-8 -* import numpy as np from .measure import * class Reporter: def __init__(self, solver): self.solver = solver self.std = solver.B * solver.Dx * solver.staAfter * solver.B.T def get_points(self): sb = [] sb.append(u'点名列表:') sb.append('%10s%15s%15s%15s%10s'%('Name', 'X', 'Y', 'Z', 'isKnown')) sb.append('-'*68) for p in self.solver.points.values(): sb.append('%10s%15.5f%15.5f%15.5f%10s'%(p.name,p.point[0], p.point[1], p.point[2],p.isknown)) return '\n'.join(sb) def get_result(self): sb, solver = [], self.solver titles, Dx, staAfter = solver.titles, solver.Dx, solver.staAfter sb.append(u'结果及精度:') sb.append('%10s%15s%15s%15s%15s%15s%15s'%('Name', 'X', 'Y', 'Z', 'Dx', 'Dy', 'Dz')) sb.append('-'*105) for p in self.solver.points.values(): items = [p.name+'-'+i for i in 'XYZ'] idx = [titles.index(i) if i in titles else -1 for i in items] sta = ['%15.4f'%(np.sqrt(Dx[i,i] * staAfter)) if i>-1 else '-' for i in idx] sb.append('%10s%15.5f%15.5f%15.5f%15s%15s%15s'%(p.name, p.point[0], p.point[1], p.point[2], sta[0], sta[1], sta[2])) return '\n'.join(sb) def get_dist(self): sb = [] sb.append(u'斜距观测:') sb.append('%10s%10s%15s%15s%10s'%('From', 'To', 'Mea', 'Reg', 'Dl')) sb.append('-'*68) for mea in self.solver.meas: if not isinstance(mea, Distance): continue i = self.solver.meas.index(mea) sb.append('%10s%10s%15.4f%15.4f%15.4f'%(mea.p1.name, mea.p2.name, mea.value(), mea.estimate(), np.sqrt(self.std[i,i])*1000)) return '\n'.join(sb) def get_pitch(self): sb = [] sb.append(u'倾斜观测:') sb.append('%10s%10s%15s%15s%10s'%('From', 'To', 'Mea', 'Reg', 'Da')) sb.append('-'*68) for mea in self.solver.meas: if not isinstance(mea, Pitch): continue i = self.solver.meas.index(mea) sb.append('%10s%10s%15.4f%15.4f%15.4f'%(mea.p1.name, mea.p2.name, mea.value(), mea.estimate(), np.sqrt(self.std[i,i])/np.pi * 180 * 3600)) return '\n'.join(sb) def get_dir(self): sb = [] sb.append(u'方位观测:') sb.append('%10s%10s%15s%15s%10s'%('From', 'To', 'Mea', 'Reg', 'Da')) sb.append('-'*68) for mea in self.solver.meas: if not isinstance(mea, Direction): continue i = self.solver.meas.index(mea) sb.append('%10s%10s%15.4f%15.4f%15.4f'%(mea.p1.name, mea.p2.name, mea.value(), mea.estimate(), np.sqrt(self.std[i,i])/np.pi * 180 * 3600)) return '\n'.join(sb) def get_angel(self): sb = [] sb.append(u'角度观测:') sb.append('%10s%10s%10s%15s%15s%10s'%('From', 'O', 'To', 'Mea', 'Reg', 'Da')) sb.append('-'*72) for mea in self.solver.meas: if not isinstance(mea, Angle): continue i = self.solver.meas.index(mea) sb.append('%10s%10s%10s%15.4f%15.4f%15.4f'%(mea.p1.name, mea.p2.name, mea.p3.name, mea.value(), mea.estimate(), np.sqrt(self.std[i,i])/np.pi * 180 * 3600)) return '\n'.join(sb) def get_level(self): sb = [] sb.append(u'水准观测:') sb.append('%10s%10s%15s%15s%10s'%('From', 'To', 'Mea', 'Reg', 'Da')) sb.append('-'*68) for mea in self.solver.meas: if not isinstance(mea, Level): continue i = self.solver.meas.index(mea) sb.append('%10s%10s%15.4f%15.4f%15.4f'%(mea.p1.name, mea.p2.name, mea.value(), mea.estimate(), np.sqrt(self.std[i,i])*1000)) return '\n'.join(sb) def get_sum(self): sb, solver = [], self.solver sb.append(u'信息汇总:') sb.append('-'*38) sb.append('\t\t%-15s:%.4f'%('Sta Error', self.solver.staAfter)) sb.append('\t\t%-15s:%s'%('Free Item', solver.B.shape[1])) sb.append('\t\t%-15s:%s'%('Mea Count', solver.B.shape[0])) sb.append('\t\t%-15s:%s'%('Inessential:', solver.B.shape[0]-solver.B.shape[1])) return '\n'.join(sb)
scotchApp.controller("fornecedoresController", ["$scope", "$firebaseArray","$routeParams","$timeout", function($scope, $firebaseArray,$routeParams,$timeout,$http) { isLoggedIn(); $('.loaderDiv').show(); //Função para saber se o documento está pronto $timeout(function(){ //console.log(selectedAccount); var refBaseConta = new Firebase(AppUrl+"contas/"+selectedAccount+'/fornecedores'); $scope.fornecedores = $firebaseArray(refBaseConta); $scope.fornecedores.$watch(function(event) { $scope.filteredItems = $scope.fornecedores.length; $scope.totalItems = $scope.fornecedores.length; }); $scope.currentPage = 1; //current page $scope.entryLimit = 10; //max no of items to display in a page //Initially for no filter $scope.filteredItems = 0; $scope.totalItems = 0; $scope.fornecedores.$loaded( function(data) { $scope.filteredItems = $scope.fornecedores.length; $scope.totalItems = $scope.fornecedores.length; $('.loaderDiv').hide(); }, function(error) { console.error("Error:", error); $('.loaderDiv').hide(); } ); $scope.setPage = function(pageNo) { $scope.currentPage = pageNo; }; $scope.filter = function() { $timeout(function() { $scope.filteredItems = $scope.filtered.length; }, 10); }; $scope.sort_by = function(predicate) { console.log(predicate); $scope.predicate = predicate; $scope.reverse = !$scope.reverse; }; //ADD MESSAGE METHOD $scope.saveObject = function(e) { // createFornecedorObj($scope); //console.log(createFornecedorObj($scope)); $scope.fornecedores.$add(createFornecedorObj($scope)).then(function(refBaseConta) { var id = refBaseConta.key(); alert("added record with id " + id); // $scope.fornecedores.$indexFor(id); // console.log($scope.fornecedores.$indexFor(id)); // returns location in the array }).catch(function(error) { alert('Error!'); }); // $scope.fornecedores.$add(createFornecedorObj($scope)); //RESET MESSAGE // $scope.msg = ""; } },1200); } ]); scotchApp.controller("fornecedoresVerController", ["$scope", "$firebaseArray","$routeParams","$timeout", function($scope, $firebaseArray,$routeParams,$timeout) { var currentId = $routeParams.id; $('.loaderDiv').show(); //Função para saber se o documento está pronto $timeout(function(){ var refBaseContaFornecedores = new Firebase(AppUrl+"contas/"+selectedAccount+'/fornecedores/'+currentId); $scope.fornecedores = $firebaseArray(refBaseContaFornecedores); setTimeout(function () { $('.loaderDiv').hide(); },1000); },1200); } ]); scotchApp.controller("fornecedoresEditarController", ["$scope", "$firebaseArray","$routeParams","$firebaseObject","$timeout", function($scope, $firebaseArray,$routeParams,$firebaseObject,$timeout) { var currentId = $routeParams.id; $('.loaderDiv').show(); $timeout(function(){ var refBaseContaFornecedores = new Firebase(AppUrl+"contas/"+selectedAccount+'/fornecedores/'+currentId); $scope.fornecedores = $firebaseArray(refBaseContaFornecedores); setTimeout(function () { $('.loaderDiv').hide(); },1000); $scope.editObject = function(e) { $scope.fornecedores.$save(0).then(function(ref) { ref.key() === $scope.fornecedores[0].$id; // true }, function(error) { console.log("Error:", error); }); } },1200); } ]);
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ module.exports = { isValidEmail: function(value) { var emailRegex = /^((([a-z]|\d|[!#\$%&'\*\+\-\/=\?\^_`{\|}~]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])+(\.([a-z]|\d|[!#\$%&'\*\+\-\/=\?\^_`{\|}~]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])+)*)|((\x22)((((\x20|\x09)*(\x0d\x0a))?(\x20|\x09)+)?(([\x01-\x08\x0b\x0c\x0e-\x1f\x7f]|\x21|[\x23-\x5b]|[\x5d-\x7e]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(\\([\x01-\x09\x0b\x0c\x0d-\x7f]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]))))*(((\x20|\x09)*(\x0d\x0a))?(\x20|\x09)+)?(\x22)))@((([a-z]|\d|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(([a-z]|\d|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])([a-z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])*([a-z]|\d|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])))\.)+(([a-z]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(([a-z]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])([a-z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])*([a-z]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])))$/i; return emailRegex.test(value); }, isValidInt: function(value) { var intRegex = /^-?\d+$/; return intRegex.test(value); }, isValidUNIXUser: function(value){ var regex = /^[a-z_][a-z0-9_-]{0,31}$/; return regex.test(value); }, isValidFloat: function(value) { if (typeof value === 'string' && value.trim() === '') { return false; } var floatRegex = /^-?(?:\d+|\d{1,3}(?:,\d{3})+)?(?:\.\d+)?$/; return floatRegex.test(value); }, /** * validate directory with slash or drive at the start * @param value * @return {Boolean} */ isValidDir: function(value){ var floatRegex = /^\/[0-9a-z]*/; var winRegex = /^[a-z]:\\[0-9a-zA-Z]*/; var winUrlRegex = /^file:\/\/\/[a-zA-Z]:\/[0-9a-zA-Z]*/; var dirs = value.split(','); if (dirs.some(function(i) { return i.startsWith(' '); })) { return false; } for(var i = 0; i < dirs.length; i++){ if(!floatRegex.test(dirs[i]) && !winRegex.test(dirs[i]) && !winUrlRegex.test(dirs[i])){ return false; } } return true; }, /** * defines if config value looks like link to other config * @param value * @returns {boolean} */ isConfigValueLink: function(value) { return /^\${.+}$/.test(value); }, /** * validate directory with slash at the start * @param value * @returns {boolean} */ isValidDataNodeDir: function(value) { var dirRegex = /^(\[[0-9a-zA-Z]+\])?(\/[0-9a-z]*)/; var winRegex = /^(\[[0-9a-zA-Z]+\])?[a-zA-Z]:\\[0-9a-zA-Z]*/; var winUrlRegex = /^(\[[0-9a-zA-Z]+\])?file:\/\/\/[a-zA-Z]:\/[0-9a-zA-Z]*/; var dirs = value.split(','); if (dirs.some(function (i) {return i.startsWith(' '); })) { return false; } for(var i = 0; i < dirs.length; i++){ if(!dirRegex.test(dirs[i]) && !winRegex.test(dirs[i]) && !winUrlRegex.test(dirs[i])){ return false; } } return true; }, /** * validate directory doesn't start "home" or "homes" * @param value * @returns {boolean} */ isAllowedDir: function(value) { var dirs = value.replace(/,/g,' ').trim().split(new RegExp("\\s+", "g")); for(var i = 0; i < dirs.length; i++){ if(dirs[i].startsWith('/home') || dirs[i].startsWith('/homes')) { return false; } } return true; }, /** * validate ip address with port * @param value * @return {Boolean} */ isIpAddress: function(value) { var ipRegex = /^(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)($|\:[0-9]{1,5})$/; return ipRegex.test(value); }, /** * validate hostname * @param value * @return {Boolean} */ isHostname: function(value) { var regex = /(?=^.{3,254}$)(^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])(\.([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9]))*(\.[a-zA-Z]{1,62})$)/; return value === 'localhost' || regex.test(value); }, hasSpaces: function(value) { var regex = /(\s+)/; return regex.test(value); }, isNotTrimmed: function(value) { var regex = /(^\s+|\s+$)/; return regex.test(value); }, /** * Check if string ends with spaces. * For multiline content only last line will be checked. * * @method isNotTrimmedLeft * @param {String} value * @returns {Boolean} - <code>true</code> if ends with spaces */ isNotTrimmedRight: function(value) { return /\s+$/.test(("" + value).split(/\n/).slice(-1)[0]); }, /** * validate domain name with port * @param value * @return {Boolean} */ isDomainName: function(value) { var domainRegex = /^([a-zA-Z0-9]([a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])?\.)+[a-zA-Z]{2,6}$/; return domainRegex.test(value); }, /** * validate username * @param value * @return {Boolean} */ isValidUserName: function(value) { var usernameRegex = /^[a-z]([-a-z0-9]{0,30})$/; return usernameRegex.test(value); }, /** * validate db name * @param value * @returns {boolean} */ isValidDbName: function(value) { var dbPattern = /^\S+$/; return dbPattern.test(value); }, /** * validate key of configurations * @param value * @return {Boolean} */ isValidConfigKey: function(value) { var configKeyRegex = /^[0-9a-z_\-\.\*]+$/i; return configKeyRegex.test(value); }, /** * validate configuration group name * @param value * @return {Boolean} */ isValidConfigGroupName: function(value) { var configKeyRegex = /^[\s0-9a-z_\-]+$/i; return configKeyRegex.test(value); }, /** * validate alert group name * @param value * @return {Boolean} */ isValidAlertGroupName: function(value) { var configKeyRegex = /^[\s0-9a-z_\-]+$/i; return configKeyRegex.test(value); }, empty:function (e) { switch (e) { case "": case 0: case "0": case null: case false: case undefined: case typeof this == "undefined": return true; default : return false; } }, /** * Validate string that will pass as parameter to .matches() url param. * Try to prevent invalid regexp. * For example: /api/v1/clusters/c1/hosts?Hosts/host_name.matches(.*localhost.) * * @param {String} value - string to validate * @return {Boolean} * @method isValidMatchesRegexp */ isValidMatchesRegexp: function(value) { var checkPair = function(chars) { chars = chars.map(function(c) { return '\\' + c; }); var charsReg = new RegExp(chars.join('|'), 'g'); if (charsReg.test(value)) { var pairContentReg = new RegExp(chars.join('.*'), 'g'); if (!pairContentReg.test(value)) return false; var pairCounts = chars.map(function(c) { return value.match(new RegExp(c, 'g')).length; }); if (pairCounts[0] != pairCounts[1] ) return false; } return true; }; if (/^[\?\|\*\!,]/.test(value)) return false; return /^((\.\*?)?([\w\s\[\]\/\?\-_,\|\*\!\{\}]*)?)+(\.\*?)?$/g.test(value) && (checkPair(['[',']'])) && (checkPair(['{','}'])); }, /** * Remove validation messages for components which are already installed */ filterNotInstalledComponents: function(validationData) { var hostComponents = App.HostComponent.find(); return validationData.resources[0].items.filter(function(item) { // true is there is no host with this component return hostComponents.filterProperty("componentName", item["component-name"]).filterProperty("hostName", item.host).length === 0; }); }, isValidRackId: function(path) { // See app/message.js:hostPopup.setRackId.invalid return /^\/[/.\w-]+$/.test(path); }, /** * Validate url * @param value * @return {Boolean} */ isValidURL: function(value) { var urlRegex = /^(https?|ftp):\/\/(((([a-z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(%[\da-f]{2})|[!\$&'\(\)\*\+,;=]|:)*@)?(((\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5]))|((([a-z]|\d|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(([a-z]|\d|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])([a-z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])*([a-z]|\d|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])))\.)+(([a-z]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(([a-z]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])([a-z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])*([a-z]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])))\.?)(:\d*)?)(\/((([a-z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(%[\da-f]{2})|[!\$&'\(\)\*\+,;=]|:|@)+(\/(([a-z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(%[\da-f]{2})|[!\$&'\(\)\*\+,;=]|:|@)*)*)?)?(\?((([a-z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(%[\da-f]{2})|[!\$&'\(\)\*\+,;=]|:|@)|[\uE000-\uF8FF]|\/|\?)*)?(\#((([a-z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(%[\da-f]{2})|[!\$&'\(\)\*\+,;=]|:|@)|\/|\?)*)?$/i; return urlRegex.test(value); }, /** * Validate base URL * @param {string} value * @returns {boolean} */ isValidBaseUrl: function (value) { var remotePattern = /^(?:(?:https?|ftp):\/{2})(?:\S+(?::\S*)?@)?(?:(?:(?:[\w\-.]))*)(?::[0-9]+)?(?:\/\S*)?$/, localPattern = /^file:\/{2,3}([a-zA-Z][:|]\/){0,1}[\w~!*'();@&=\/\\\-+$,?%#.\[\]]+$/; return remotePattern.test(value) || localPattern.test(value); } };
import archive from "./../../api/archive"; import { added } from "./addedMsg"; import deleteSP from "./deleteAllAction"; import getSujetPlanif from "./getSujetPlanif"; const archiveAction = ( e, dbName, dbNameS, presentateurId, ArchId, Sujet, Presentateur, date, Lien, Lien2 ) => { return dispatch => { e.preventDefault(); archive(dbName, e, ArchId, Sujet, Presentateur, date, Lien, Lien2) .then(u => { dispatch(added("archive", true, " ")); dispatch(deleteSP(dbNameS, presentateurId, dispatch)); dispatch(getSujetPlanif()); }) .catch(e => { dispatch(added("archive", false, "Erreur")); }); }; }; export default archiveAction;
import { debounce } from '../../utils/underscore'; import { SettingsMenu } from 'view/controls/components/settings/menu'; import { addCaptionsSubmenu, removeCaptionsSubmenu, addQualitiesSubmenu, removeQualitiesSubmenu, addAudioTracksSubmenu, removeAudioTracksSubmenu, addPlaybackRatesSubmenu, removePlaybackRatesSubmenu } from 'view/utils/submenu-factory'; export function createSettingsMenu(controlbar, onVisibility, localization) { const settingsButton = controlbar.elements.settingsButton; const settingsMenu = SettingsMenu(onVisibility, { hide: () => settingsButton.hide(), show: () => settingsButton.show() }, localization); controlbar.on('settingsInteraction', (submenuName, isDefault, event) => { const submenu = settingsMenu.getSubmenu(submenuName); const nonKeyboardInteraction = event && event.type !== 'enter'; const delayedOpen = debounce(settingsMenu.open, 10); if (!submenu && !isDefault) { // Do nothing if activating an invalid submenu // An invalid submenu is one which does not exist // The default submenu may not exist, but this case has defined behavior return; } if (settingsMenu.visible) { if (isDefault || submenu.active) { // Close the submenu if clicking the default button (the gear) or if we're already at that submenu settingsMenu.close(); } else { // Tab to the newly activated submenu settingsMenu.activateSubmenu(submenuName, false, nonKeyboardInteraction); } } else { if (submenu) { // Activate the selected submenu settingsMenu.activateSubmenu(submenuName, false, nonKeyboardInteraction); } else { // Activate the first submenu if clicking the default button settingsMenu.activateFirstSubmenu(nonKeyboardInteraction); } delayedOpen(isDefault, event); } }); return settingsMenu; } function showSettingsMenuIcon(settingsMenu, controlbar) { // Show or hide settings menu icon dependant on amount of submenus const submenuNames = settingsMenu.getSubmenuNames(); const toggleIcon = submenuNames.length > 1 || submenuNames.some(name => (name === 'quality' || name === 'playbackRates')); controlbar.elements.settingsButton.toggle(toggleIcon); } export function setupSubmenuListeners(settingsMenu, controlbar, viewModel, api) { const model = viewModel.player; const activateSubmenuItem = (submenuName, itemIndex) => { const submenu = settingsMenu.getSubmenu(submenuName); if (submenu) { submenu.activateItem(itemIndex); } }; const onAudiotracksChanged = (changedModel, audioTracks) => { if (!audioTracks || audioTracks.length <= 1) { removeAudioTracksSubmenu(settingsMenu); return; } addAudioTracksSubmenu( settingsMenu, audioTracks, (index) => api.setCurrentAudioTrack(index), model.get('currentAudioTrack'), model.get('localization').audioTracks ); }; const onQualitiesChanged = (changedModel, levels) => { if (!levels || levels.length <= 1) { removeQualitiesSubmenu(settingsMenu); } else { const { hd, auto } = model.get('localization'); addQualitiesSubmenu( settingsMenu, levels, (index) => api.setCurrentQuality(index), model.get('currentLevel'), hd, auto ); } showSettingsMenuIcon(settingsMenu, controlbar); }; const onCaptionsChanged = (changedModel, captionsList) => { const controlbarButton = controlbar.elements.captionsButton; if (!captionsList || captionsList.length <= 1) { removeCaptionsSubmenu(settingsMenu); controlbarButton.hide(); return; } const { cc, off } = model.get('localization'); addCaptionsSubmenu(settingsMenu, captionsList, (index) => api.setCurrentCaptions(index), model.get('captionsIndex'), cc, off ); controlbar.toggleCaptionsButtonState(!!model.get('captionsIndex')); controlbarButton.show(); }; const setupPlaybackRatesMenu = (changedModel, playbackRates) => { const showPlaybackRateControls = model.get('supportsPlaybackRate') && model.get('streamType') !== 'LIVE' && model.get('playbackRateControls') && playbackRates.length > 1; if (!showPlaybackRateControls) { removePlaybackRatesSubmenu(settingsMenu); return; } addPlaybackRatesSubmenu( settingsMenu, playbackRates, (playbackRate) => api.setPlaybackRate(playbackRate), playbackRates.indexOf(model.get('playbackRate')), model.get('localization').playbackRates ); }; const changeAutoLabel = function (qualityLevel, qualitySubMenu, currentIndex) { const levels = model.get('levels'); // Return early if the label isn't "Auto" (html5 provider with multiple mp4 sources) if (!levels || levels[0].label !== 'Auto') { return; } const items = qualitySubMenu.getItems(); const item = items[0].element().querySelector('.jw-auto-label'); const level = levels[qualityLevel.index] || { label: '' }; item.textContent = currentIndex ? '' : level.label; }; // Quality Levels model.change('levels', onQualitiesChanged, settingsMenu); model.on('change:currentLevel', (changedModel, currentIndex) => { const qualitySubMenu = settingsMenu.getSubmenu('quality'); const visualQuality = model.get('visualQuality'); if (visualQuality && qualitySubMenu) { changeAutoLabel(visualQuality.level, qualitySubMenu, currentIndex); } activateSubmenuItem('quality', currentIndex); }, settingsMenu); // Audio Tracks model.change('audioTracks', onAudiotracksChanged, settingsMenu); model.on('change:currentAudioTrack', (changedModel, currentAudioTrack) => { activateSubmenuItem('audioTracks', currentAudioTrack); }, settingsMenu); // Captions model.on('change:playlistItem', () => { // captions.js silently clears captions when the playlist item changes. The reason it silently clear captions // instead of dispatching an event is because we don't want to emit 'captionsList' if the new list is empty. removeCaptionsSubmenu(settingsMenu); controlbar.elements.captionsButton.hide(); // Settings menu should not be visible when switching playlist items via controls or .load() if (settingsMenu.visible) { settingsMenu.close(); } }); model.change('captionsList', onCaptionsChanged, settingsMenu); model.change('captionsIndex', (changedModel, index) => { const captionsSubmenu = settingsMenu.getSubmenu('captions'); if (captionsSubmenu) { captionsSubmenu.activateItem(index); controlbar.toggleCaptionsButtonState(!!index); } }, settingsMenu); // Playback Rates model.change('playbackRates', setupPlaybackRatesMenu, settingsMenu); model.change('playbackRate', (changedModel, playbackRate) => { const rates = model.get('playbackRates'); if (rates) { activateSubmenuItem('playbackRates', rates.indexOf(playbackRate)); } }, settingsMenu); model.on('change:playbackRateControls', () => { setupPlaybackRatesMenu(model, model.get('playbackRates')); }); // Visual Quality model.on('change:visualQuality', (changedModel, quality) => { const qualitySubMenu = settingsMenu.getSubmenu('quality'); if (quality && qualitySubMenu) { changeAutoLabel(quality.level, qualitySubMenu, model.get('currentLevel')); } }); // Remove the audio tracks, qualities, and playback rates submenus when casting model.on('change:castActive', (changedModel, active, previousState) => { if (active === previousState) { return; } if (active) { removeAudioTracksSubmenu(settingsMenu); removeQualitiesSubmenu(settingsMenu); removePlaybackRatesSubmenu(settingsMenu); } else { onAudiotracksChanged(model, model.get('audioTracks')); onQualitiesChanged(model, model.get('levels')); setupPlaybackRatesMenu(model, model.get('playbackRates')); } }, settingsMenu); model.on('change:streamType', () => { setupPlaybackRatesMenu(model, model.get('playbackRates')); }, settingsMenu); }
(self.webpackChunk=self.webpackChunk||[]).push([[773],{7080:(e,a,s)=>{s(1689)},1689:(e,a,s)=>{window._=s(6486),window.axios=s(9669),window.axios.defaults.headers.common["X-Requested-With"]="XMLHttpRequest",window.jQuery=window.$=s(9755),window.moment=s(381),s(8981),s(3734),s(3248),window.Swal=s(6455),s(686),s(1842),s(4712),s(2278);var t=document.head.querySelector('meta[name="csrf-token"]');t?window.axios.defaults.headers.common["X-CSRF-TOKEN"]=t.content:console.error("CSRF token not found: https://laravel.com/docs/csrf#csrf-x-csrf-token")},2278:()=>{"use strict";document.addEventListener("show-message",(function(e){var a=e.detail.type,s=e.detail.message;Swal.fire({icon:a,text:s})}))},4712:()=>{"use strict";window.Chart&&(Chart.defaults.global.defaultFontFamily="'Nunito', 'Segoe UI', 'Arial'",Chart.defaults.global.defaultFontSize=12,Chart.defaults.global.defaultFontStyle=500,Chart.defaults.global.defaultFontColor="#999",Chart.defaults.global.tooltips.backgroundColor="#000",Chart.defaults.global.tooltips.bodyFontColor="rgba(255,255,255,.7)",Chart.defaults.global.tooltips.titleMarginBottom=10,Chart.defaults.global.tooltips.titleFontSize=14,Chart.defaults.global.tooltips.titleFontFamily="'Nunito', 'Segoe UI', 'Arial'",Chart.defaults.global.tooltips.titleFontColor="#fff",Chart.defaults.global.tooltips.xPadding=15,Chart.defaults.global.tooltips.yPadding=15,Chart.defaults.global.tooltips.displayColors=!1,Chart.defaults.global.tooltips.intersect=!1,Chart.defaults.global.tooltips.mode="nearest"),window.Dropzone&&(Dropzone.autoDiscover=!1),$("[data-confirm]").each((function(){var me=$(this),me_data=me.data("confirm");me_data=me_data.split("|"),me.fireModal({title:me_data[0],body:me_data[1],buttons:[{text:me.data("confirm-text-yes")||"Yes",class:"btn btn-danger btn-shadow",handler:function handler(){eval(me.data("confirm-yes"))}},{text:me.data("confirm-text-cancel")||"Cancel",class:"btn btn-secondary",handler:function handler(modal){$.destroyModal(modal),eval(me.data("confirm-no"))}}]})})),$((function(){var sidebar_nicescroll_opts={cursoropacitymin:0,cursoropacitymax:.8,zindex:892},now_layout_class=null,sidebar_sticky=function(){$("body").hasClass("layout-2")&&($("body.layout-2 #sidebar-wrapper").stick_in_parent({parent:$("body")}),$("body.layout-2 #sidebar-wrapper").stick_in_parent({recalc_every:1}))},sidebar_nicescroll;sidebar_sticky();var update_sidebar_nicescroll=function(){var e=setInterval((function(){null!=sidebar_nicescroll&&sidebar_nicescroll.resize()}),10);setTimeout((function(){clearInterval(e)}),600)},sidebar_dropdown=function(){$(".main-sidebar").length&&($(".main-sidebar").niceScroll(sidebar_nicescroll_opts),sidebar_nicescroll=$(".main-sidebar").getNiceScroll(),$(".main-sidebar .sidebar-menu li a.has-dropdown").off("click").on("click",(function(){var e=$(this),a=!1;return e.parent().hasClass("active")&&(a=!0),$(".main-sidebar .sidebar-menu li.active > .dropdown-menu").slideUp(500,(function(){return update_sidebar_nicescroll(),!1})),$(".main-sidebar .sidebar-menu li.active").removeClass("active"),1==a?(e.parent().removeClass("active"),e.parent().find("> .dropdown-menu").slideUp(500,(function(){return update_sidebar_nicescroll(),!1}))):(e.parent().addClass("active"),e.parent().find("> .dropdown-menu").slideDown(500,(function(){return update_sidebar_nicescroll(),!1}))),!1})),$(".main-sidebar .sidebar-menu li.active > .dropdown-menu").slideDown(500,(function(){return update_sidebar_nicescroll(),!1})))};sidebar_dropdown(),$("#top-5-scroll").length&&$("#top-5-scroll").css({height:315}).niceScroll(),$(".main-content").css({minHeight:$(window).outerHeight()-108}),$(".nav-collapse-toggle").click((function(){return $(this).parent().find(".navbar-nav").toggleClass("show"),!1})),$(document).on("click",(function(e){$(".nav-collapse .navbar-nav").removeClass("show")}));var toggle_sidebar_mini=function(e){var a=$("body");e?(a.addClass("sidebar-mini"),a.removeClass("sidebar-show"),sidebar_nicescroll.remove(),sidebar_nicescroll=null,$(".main-sidebar .sidebar-menu > li").each((function(){var e=$(this);e.find("> .dropdown-menu").length?(e.find("> .dropdown-menu").hide(),e.find("> .dropdown-menu").prepend('<li class="dropdown-title pt-3">'+e.find("> a").text()+"</li>")):(e.find("> a").attr("data-toggle","tooltip"),e.find("> a").attr("data-original-title",e.find("> a").text()),$("[data-toggle='tooltip']").tooltip({placement:"right"}))}))):(a.removeClass("sidebar-mini"),$(".main-sidebar").css({overflow:"hidden"}),setTimeout((function(){$(".main-sidebar").niceScroll(sidebar_nicescroll_opts),sidebar_nicescroll=$(".main-sidebar").getNiceScroll()}),500),$(".main-sidebar .sidebar-menu > li > ul .dropdown-title").remove(),$(".main-sidebar .sidebar-menu > li > a").removeAttr("data-toggle"),$(".main-sidebar .sidebar-menu > li > a").removeAttr("data-original-title"),$(".main-sidebar .sidebar-menu > li > a").removeAttr("title"))};$("[data-toggle='sidebar']").click((function(){var e=$("body");return $(window).outerWidth()<=1024?(e.removeClass("search-show search-gone"),e.hasClass("sidebar-gone")?(e.removeClass("sidebar-gone"),e.addClass("sidebar-show")):(e.addClass("sidebar-gone"),e.removeClass("sidebar-show")),update_sidebar_nicescroll()):(e.removeClass("search-show search-gone"),e.hasClass("sidebar-mini")?toggle_sidebar_mini(!1):toggle_sidebar_mini(!0)),!1}));var toggleLayout=function(){var e=$(window),a=$("body").attr("class")||"",s=a.trim().length>0?a.split(" "):"";if(s.length>0&&s.forEach((function(e){-1!=e.indexOf("layout-")&&(now_layout_class=e)})),e.outerWidth()<=1024){if($("body").hasClass("sidebar-mini")&&(toggle_sidebar_mini(!1),$(".main-sidebar").niceScroll(sidebar_nicescroll_opts),sidebar_nicescroll=$(".main-sidebar").getNiceScroll()),$("body").addClass("sidebar-gone"),$("body").removeClass("layout-2 layout-3 sidebar-mini sidebar-show"),$("body").off("click touchend").on("click touchend",(function(e){($(e.target).hasClass("sidebar-show")||$(e.target).hasClass("search-show"))&&($("body").removeClass("sidebar-show"),$("body").addClass("sidebar-gone"),$("body").removeClass("search-show"),update_sidebar_nicescroll())})),update_sidebar_nicescroll(),"layout-3"==now_layout_class){var t=$(".navbar-secondary").attr("class"),o=$(".navbar-secondary");o.attr("data-nav-classes",t),o.removeAttr("class"),o.addClass("main-sidebar");var i=$(".main-sidebar");i.find(".container").addClass("sidebar-wrapper").removeClass("container"),i.find(".navbar-nav").addClass("sidebar-menu").removeClass("navbar-nav"),i.find(".sidebar-menu .nav-item.dropdown.show a").click(),i.find(".sidebar-brand").remove(),i.find(".sidebar-menu").before($("<div>",{class:"sidebar-brand"}).append($("<a>",{href:$(".navbar-brand").attr("href")}).html($(".navbar-brand").html()))),setTimeout((function(){sidebar_nicescroll=i.niceScroll(sidebar_nicescroll_opts),sidebar_nicescroll=i.getNiceScroll()}),700),sidebar_dropdown(),$(".main-wrapper").removeClass("container")}}else{$("body").removeClass("sidebar-gone sidebar-show"),now_layout_class&&$("body").addClass(now_layout_class);var n=$(".main-sidebar").attr("data-nav-classes"),r=$(".main-sidebar");if("layout-3"==now_layout_class&&r.hasClass("main-sidebar")){r.find(".sidebar-menu li a.has-dropdown").off("click"),r.find(".sidebar-brand").remove(),r.removeAttr("class"),r.addClass(n);var l=$(".navbar-secondary");l.find(".sidebar-wrapper").addClass("container").removeClass("sidebar-wrapper"),l.find(".sidebar-menu").addClass("navbar-nav").removeClass("sidebar-menu"),l.find(".dropdown-menu").hide(),l.removeAttr("style"),l.removeAttr("tabindex"),l.removeAttr("data-nav-classes"),$(".main-wrapper").addClass("container")}else"layout-2"==now_layout_class?$("body").addClass("layout-2"):update_sidebar_nicescroll()}};toggleLayout(),$(window).resize(toggleLayout),$("[data-toggle='search']").click((function(){var e=$("body");e.hasClass("search-gone")?(e.addClass("search-gone"),e.removeClass("search-show")):(e.removeClass("search-gone"),e.addClass("search-show"))})),$("[data-toggle='tooltip']").tooltip(),$('[data-toggle="popover"]').popover({container:"body"}),jQuery().select2&&$(".select2").select2(),jQuery().selectric&&$(".selectric").selectric({disableOnMobile:!1,nativeOnMobile:!1}),$(".notification-toggle").dropdown(),$(".notification-toggle").parent().on("shown.bs.dropdown",(function(){$(".dropdown-list-icons").niceScroll({cursoropacitymin:.3,cursoropacitymax:.8,cursorwidth:7})})),$(".message-toggle").dropdown(),$(".message-toggle").parent().on("shown.bs.dropdown",(function(){$(".dropdown-list-message").niceScroll({cursoropacitymin:.3,cursoropacitymax:.8,cursorwidth:7})})),$(".chat-content").length&&($(".chat-content").niceScroll({cursoropacitymin:.3,cursoropacitymax:.8}),$(".chat-content").getNiceScroll(0).doScrollTop($(".chat-content").height())),jQuery().summernote&&($(".summernote").summernote({dialogsInBody:!0,minHeight:250}),$(".summernote-simple").summernote({dialogsInBody:!0,minHeight:150,toolbar:[["style",["bold","italic","underline","clear"]],["font",["strikethrough"]],["para",["paragraph"]]]})),window.CodeMirror&&$(".codeeditor").each((function(){CodeMirror.fromTextArea(this,{lineNumbers:!0,theme:"duotone-dark",mode:"javascript",height:200}).setSize("100%",200)})),$(".follow-btn, .following-btn").each((function(){var me=$(this),follow_text="Follow",unfollow_text="Following";me.click((function(){return me.hasClass("following-btn")?(me.removeClass("btn-danger"),me.removeClass("following-btn"),me.addClass("btn-primary"),me.html(follow_text),eval(me.data("unfollow-action"))):(me.removeClass("btn-primary"),me.addClass("btn-danger"),me.addClass("following-btn"),me.html(unfollow_text),eval(me.data("follow-action"))),!1}))})),$("[data-dismiss]").each((function(){var e=$(this),a=e.data("dismiss");e.click((function(){return $(a).fadeOut((function(){$(a).remove()})),!1}))})),$("[data-collapse]").each((function(){var e=$(this),a=e.data("collapse");e.click((function(){return $(a).collapse("toggle"),$(a).on("shown.bs.collapse",(function(a){a.stopPropagation(),e.html('<i class="fas fa-minus"></i>')})),$(a).on("hidden.bs.collapse",(function(a){a.stopPropagation(),e.html('<i class="fas fa-plus"></i>')})),!1}))})),$(".gallery .gallery-item").each((function(){var e=$(this);e.attr("href",e.data("image")),e.attr("title",e.data("title")),e.parent().hasClass("gallery-fw")&&(e.css({height:e.parent().data("item-height")}),e.find("div").css({lineHeight:e.parent().data("item-height")+"px"})),e.css({backgroundImage:'url("'+e.data("image")+'")'})})),jQuery().Chocolat&&$(".gallery").Chocolat({className:"gallery",imageSelector:".gallery-item"}),$("[data-background]").each((function(){var e=$(this);e.css({backgroundImage:"url("+e.data("background")+")"})})),$("[data-tab]").each((function(){var e=$(this);e.click((function(){if(!e.hasClass("active")){$('[data-tab-group="'+e.data("tab")+'"]');var a=$('[data-tab-group="'+e.data("tab")+'"].active'),s=$(e.attr("href"));$('[data-tab="'+e.data("tab")+'"]').removeClass("active"),e.addClass("active"),s.addClass("active"),a.removeClass("active")}return!1}))})),$(".needs-validation").submit((function(){var e=$(this);!1===e[0].checkValidity()&&(event.preventDefault(),event.stopPropagation()),e.addClass("was-validated")})),$(".alert-dismissible").each((function(){var e=$(this);e.find(".close").click((function(){e.alert("close")}))})),$(".main-navbar").length,$("[data-crop-image]").each((function(e){$(this).css({overflow:"hidden",position:"relative",height:$(this).data("crop-image")})})),$("[data-toggle-slide]").click((function(){var e=$(this).data("toggle-slide");return $(e).slideToggle(),!1})),$("[data-dismiss=modal]").click((function(){return $(this).closest(".modal").modal("hide"),!1})),$("[data-width]").each((function(){$(this).css({width:$(this).data("width")})})),$("[data-height]").each((function(){$(this).css({height:$(this).data("height")})})),$(".chocolat-parent").length&&jQuery().Chocolat&&$(".chocolat-parent").Chocolat(),$(".sortable-card").length&&jQuery().sortable&&$(".sortable-card").sortable({handle:".card-header",opacity:.8,tolerance:"pointer"}),jQuery().daterangepicker&&($(".datepicker").length&&$(".datepicker").daterangepicker({locale:{format:"YYYY-MM-DD"},singleDatePicker:!0}),$(".datetimepicker").length&&$(".datetimepicker").daterangepicker({locale:{format:"YYYY-MM-DD hh:mm"},singleDatePicker:!0,timePicker:!0,timePicker24Hour:!0}),$(".daterange").length&&$(".daterange").daterangepicker({locale:{format:"YYYY-MM-DD"},drops:"down",opens:"right"})),jQuery().timepicker&&$(".timepicker").length&&$(".timepicker").timepicker({icons:{up:"fas fa-chevron-up",down:"fas fa-chevron-down"}})}))},1842:function(){"use strict";function e(a){return(e="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(a)}!function(a,s,t){a.fn.fireModal=function(s){s=a.extend({size:"modal-md",center:!1,animation:!0,title:"Modal Title",closeButton:!0,header:!0,bodyClass:"",footerClass:"",body:"",buttons:[],autoFocus:!0,removeOnDismiss:!1,created:function(){},appended:function(){},onFormSubmit:function(){},modal:{}},s);this.each((function(){var o="fire-modal-"+ ++t,i="trigger--"+o;a("."+i);a(this).addClass(i);var n=s.body;if("object"==e(n))if(n.length){var r=n;n=n.removeAttr("id").clone().removeClass("modal-part"),r.remove()}else n='<div class="text-danger">Modal part element not found!</div>';var l,d=' <div class="modal'+(1==s.animation?" fade":"")+'" tabindex="-1" role="dialog" id="'+o+'"> <div class="modal-dialog '+s.size+(s.center?" modal-dialog-centered":"")+'" role="document"> <div class="modal-content"> '+(1==s.header?' <div class="modal-header"> <h5 class="modal-title">'+s.title+"</h5> "+(1==s.closeButton?' <button type="button" class="close" data-dismiss="modal" aria-label="Close"> <span aria-hidden="true">&times;</span> </button> ':"")+" </div> ":"")+' <div class="modal-body"> </div> '+(s.buttons.length>0?' <div class="modal-footer"> </div> ':"")+" </div> </div> </div> ";d=a(d);s.buttons.forEach((function(e){var s="id"in e?e.id:"";l='<button type="'+("submit"in e&&1==e.submit?"submit":"button")+'" class="'+e.class+'" id="'+s+'">'+e.text+"</button>",l=a(l).off("click").on("click",(function(){e.handler.call(this,d)})),a(d).find(".modal-footer").append(l)})),a(d).find(".modal-body").append(n),s.bodyClass&&a(d).find(".modal-body").addClass(s.bodyClass),s.footerClass&&a(d).find(".modal-footer").addClass(s.footerClass),s.created.call(this,d,s);var c=a(d).find(".modal-body form"),m=d.find("button[type=submit]");if(a("body").append(d),s.appended.call(this,a("#"+o),c,s),c.length){s.autoFocus&&a(d).on("shown.bs.modal",(function(){"boolean"==typeof s.autoFocus?c.find("input:eq(0)").focus():"string"==typeof s.autoFocus&&c.find(s.autoFocus).length&&c.find(s.autoFocus).focus()}));var u={startProgress:function(){d.addClass("modal-progress")},stopProgress:function(){d.removeClass("modal-progress")}};c.find("button").length||a(c).append('<button class="d-none" id="'+o+'-submit"></button>'),m.click((function(){c.submit()})),c.submit((function(e){u.startProgress(),s.onFormSubmit.call(this,d,e,u)}))}a(document).on("click","."+i,(function(){var e=a("#"+o).modal(s.modal);return s.removeOnDismiss&&e.on("hidden.bs.modal",(function(){e.remove()})),!1}))}))},a.destroyModal=function(e){e.modal("hide"),e.on("hidden.bs.modal",(function(){}))},a.cardProgress=function(e,s){s=a.extend({dismiss:!1,dismissText:"Cancel",spinner:!0,onDismiss:function(){}},s);var t=a(e);if(t.addClass("card-progress"),0==s.spinner&&t.addClass("remove-spinner"),1==s.dismiss){var o='<a class="btn btn-danger card-progress-dismiss">'+s.dismissText+"</a>";o=a(o).off("click").on("click",(function(){t.removeClass("card-progress"),t.find(".card-progress-dismiss").remove(),s.onDismiss.call(this,t)})),t.append(o)}return{dismiss:function(e){a.cardProgressDismiss(t,e)}}},a.cardProgressDismiss=function(e,s){var t=a(e);t.removeClass("card-progress"),t.find(".card-progress-dismiss").remove(),s&&s.call(this,t)},a.chatCtrl=function(e,s){s=a.extend({position:"chat-right",text:"",time:moment((new Date).toISOString()).format("hh:mm"),picture:"",type:"text",timeout:0,onShow:function(){}},s);var t=a(e),o=(e='<div class="chat-item '+s.position+'" style="display:none"><img src="'+s.picture+'"><div class="chat-details"><div class="chat-text">'+s.text+'</div><div class="chat-time">'+s.time+"</div></div></div>",'<div class="chat-item chat-left chat-typing" style="display:none"><img src="'+s.picture+'"><div class="chat-details"><div class="chat-text"></div></div></div>'),i=e;"typing"==s.type&&(i=o),s.timeout>0?setTimeout((function(){t.find(".chat-content").append(a(i).fadeIn())}),s.timeout):t.find(".chat-content").append(a(i).fadeIn());var n=0;t.find(".chat-content .chat-item").each((function(){n+=a(this).outerHeight()})),setTimeout((function(){t.find(".chat-content").scrollTop(n,-1)}),100),s.onShow.call(this,i)}}(jQuery,0,0)},7425:()=>{},6700:(e,a,s)=>{var t={"./af":2786,"./af.js":2786,"./ar":867,"./ar-dz":4130,"./ar-dz.js":4130,"./ar-kw":6135,"./ar-kw.js":6135,"./ar-ly":6440,"./ar-ly.js":6440,"./ar-ma":7702,"./ar-ma.js":7702,"./ar-sa":6040,"./ar-sa.js":6040,"./ar-tn":7100,"./ar-tn.js":7100,"./ar.js":867,"./az":1083,"./az.js":1083,"./be":9808,"./be.js":9808,"./bg":8338,"./bg.js":8338,"./bm":7438,"./bm.js":7438,"./bn":8905,"./bn-bd":6225,"./bn-bd.js":6225,"./bn.js":8905,"./bo":1560,"./bo.js":1560,"./br":1278,"./br.js":1278,"./bs":622,"./bs.js":622,"./ca":2468,"./ca.js":2468,"./cs":5822,"./cs.js":5822,"./cv":877,"./cv.js":877,"./cy":7373,"./cy.js":7373,"./da":4780,"./da.js":4780,"./de":9740,"./de-at":217,"./de-at.js":217,"./de-ch":894,"./de-ch.js":894,"./de.js":9740,"./dv":5300,"./dv.js":5300,"./el":837,"./el.js":837,"./en-au":8348,"./en-au.js":8348,"./en-ca":7925,"./en-ca.js":7925,"./en-gb":2243,"./en-gb.js":2243,"./en-ie":6436,"./en-ie.js":6436,"./en-il":7207,"./en-il.js":7207,"./en-in":4175,"./en-in.js":4175,"./en-nz":6319,"./en-nz.js":6319,"./en-sg":1662,"./en-sg.js":1662,"./eo":2915,"./eo.js":2915,"./es":7093,"./es-do":5251,"./es-do.js":5251,"./es-mx":6112,"./es-mx.js":6112,"./es-us":1146,"./es-us.js":1146,"./es.js":7093,"./et":5603,"./et.js":5603,"./eu":7763,"./eu.js":7763,"./fa":6959,"./fa.js":6959,"./fi":1897,"./fi.js":1897,"./fil":2549,"./fil.js":2549,"./fo":4694,"./fo.js":4694,"./fr":4470,"./fr-ca":3049,"./fr-ca.js":3049,"./fr-ch":2330,"./fr-ch.js":2330,"./fr.js":4470,"./fy":5044,"./fy.js":5044,"./ga":9295,"./ga.js":9295,"./gd":2101,"./gd.js":2101,"./gl":8794,"./gl.js":8794,"./gom-deva":7884,"./gom-deva.js":7884,"./gom-latn":3168,"./gom-latn.js":3168,"./gu":5349,"./gu.js":5349,"./he":4206,"./he.js":4206,"./hi":94,"./hi.js":94,"./hr":316,"./hr.js":316,"./hu":2138,"./hu.js":2138,"./hy-am":1423,"./hy-am.js":1423,"./id":9218,"./id.js":9218,"./is":135,"./is.js":135,"./it":626,"./it-ch":150,"./it-ch.js":150,"./it.js":626,"./ja":9183,"./ja.js":9183,"./jv":4286,"./jv.js":4286,"./ka":2105,"./ka.js":2105,"./kk":7772,"./kk.js":7772,"./km":8758,"./km.js":8758,"./kn":9282,"./kn.js":9282,"./ko":3730,"./ko.js":3730,"./ku":1408,"./ku.js":1408,"./ky":3291,"./ky.js":3291,"./lb":6841,"./lb.js":6841,"./lo":5466,"./lo.js":5466,"./lt":7010,"./lt.js":7010,"./lv":7595,"./lv.js":7595,"./me":9861,"./me.js":9861,"./mi":5493,"./mi.js":5493,"./mk":5966,"./mk.js":5966,"./ml":7341,"./ml.js":7341,"./mn":5115,"./mn.js":5115,"./mr":370,"./mr.js":370,"./ms":9847,"./ms-my":1237,"./ms-my.js":1237,"./ms.js":9847,"./mt":2126,"./mt.js":2126,"./my":6165,"./my.js":6165,"./nb":4924,"./nb.js":4924,"./ne":6744,"./ne.js":6744,"./nl":3901,"./nl-be":9814,"./nl-be.js":9814,"./nl.js":3901,"./nn":3877,"./nn.js":3877,"./oc-lnc":2135,"./oc-lnc.js":2135,"./pa-in":5858,"./pa-in.js":5858,"./pl":4495,"./pl.js":4495,"./pt":9520,"./pt-br":7971,"./pt-br.js":7971,"./pt.js":9520,"./ro":6459,"./ro.js":6459,"./ru":238,"./ru.js":238,"./sd":950,"./sd.js":950,"./se":490,"./se.js":490,"./si":124,"./si.js":124,"./sk":4249,"./sk.js":4249,"./sl":4985,"./sl.js":4985,"./sq":1104,"./sq.js":1104,"./sr":9131,"./sr-cyrl":9915,"./sr-cyrl.js":9915,"./sr.js":9131,"./ss":5893,"./ss.js":5893,"./sv":8760,"./sv.js":8760,"./sw":1172,"./sw.js":1172,"./ta":7333,"./ta.js":7333,"./te":3110,"./te.js":3110,"./tet":2095,"./tet.js":2095,"./tg":7321,"./tg.js":7321,"./th":9041,"./th.js":9041,"./tk":9005,"./tk.js":9005,"./tl-ph":5768,"./tl-ph.js":5768,"./tlh":9444,"./tlh.js":9444,"./tr":2397,"./tr.js":2397,"./tzl":8254,"./tzl.js":8254,"./tzm":1106,"./tzm-latn":699,"./tzm-latn.js":699,"./tzm.js":1106,"./ug-cn":9288,"./ug-cn.js":9288,"./uk":7691,"./uk.js":7691,"./ur":3795,"./ur.js":3795,"./uz":6791,"./uz-latn":588,"./uz-latn.js":588,"./uz.js":6791,"./vi":5666,"./vi.js":5666,"./x-pseudo":4378,"./x-pseudo.js":4378,"./yo":5805,"./yo.js":5805,"./zh-cn":3839,"./zh-cn.js":3839,"./zh-hk":5726,"./zh-hk.js":5726,"./zh-mo":9807,"./zh-mo.js":9807,"./zh-tw":4152,"./zh-tw.js":4152};function o(e){var a=i(e);return s(a)}function i(e){if(!s.o(t,e)){var a=new Error("Cannot find module '"+e+"'");throw a.code="MODULE_NOT_FOUND",a}return t[e]}o.keys=function(){return Object.keys(t)},o.resolve=i,e.exports=o,o.id=6700}},e=>{var a=a=>e(e.s=a);e.O(0,[170,898],(()=>(a(7080),a(7425))));e.O()}]);
# Generated by Django 1.11.14 on 2018-08-03 13:47 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('case_importer', '0010_caseuploadformrecord'), ] operations = [ migrations.RunSQL( # This migration is not reversible because blobs created # since the migration will no longer be accessible after # reversing because the old blob db would use the wrong path """ UPDATE case_importer_caseuploadfilemeta SET identifier = 'case_importer/' || identifier WHERE identifier NOT LIKE 'case_importer/%' """ ), ]
YUI.add('async-queue-tests', function(Y) { var suite = new Y.Test.Suite("AsyncQueue"); // FIXME: remove this and update the tests to handle the asynchronicity Y.AsyncQueue.defaults.timeout = -1; function f() {} suite.add(new Y.Test.Case({ name : "Queue isntantiation", test_instantiation : function () { var basic = new Y.AsyncQueue(), withCallbacks = new Y.AsyncQueue(f,f,f,f); Y.Assert.areSame(true, basic instanceof Y.AsyncQueue); Y.Assert.areSame(0, basic.size()); Y.Assert.areSame(0, basic._q.length); Y.Assert.areSame(4, withCallbacks.size()); Y.Assert.isFunction(withCallbacks._q[0]); Y.Assert.isFunction(withCallbacks.next()); Y.Assert.areSame(f, withCallbacks.next().fn); } })); suite.add(new Y.Test.Case({ name : "queue-base", test_next : function () { var i = 0; YUI({ useBrowserConsole : false, logInclude : { TestRunner: true } }).use('queue-base', function (Y) { function inc() { i++; } var callback, q = new Y.Queue(inc, inc, "string", inc); while ((callback = q.next())) { if (Y.Lang.isFunction(callback)) { callback(); } } }); Y.Assert.areSame(3, i); } })); suite.add(new Y.Test.Case({ name : "Test API", test_chaining : function () { var q = new Y.AsyncQueue(); q.defaults = { timeout : 10 }; Y.Assert.areSame(q, q.add()); Y.Assert.areSame(q, q.add(f)); Y.Assert.areSame(q, q.add(f,f,{fn:f,id:'a'},"garbage")); Y.Assert.areSame(q, q.pause()); Y.Assert.areSame(q, q.promote('a')); Y.Assert.areSame(q, q.remove('a')); Y.Assert.areSame(q, q.run()); Y.Assert.areSame(q, q.stop()); }, test_add : function () { var q = new Y.AsyncQueue(f); Y.Assert.areSame(1, q.size()); q = new Y.AsyncQueue().add(f); Y.Assert.areSame(1, q.size()); q.add(f,f).add(f,f,f); Y.Assert.areSame(6, q.size()); q.add("Only functions and objects are allowed", undefined, null, 1, true); Y.Assert.areSame(6, q.size()); q.add({},{}); // empty objects are ok, since config can be defaulted Y.Assert.areSame(8, q.size()); // Add from within a callback var count = 0; function x() { count++; } function addToQueue() { this.add(x); } // Three x calls scheduled. A fourth added during a callback q = new Y.AsyncQueue(x,f,x,addToQueue,f,x).run(); Y.Assert.areSame(4,count); }, test_remove : function () { var results = '', self = this, q = new Y.AsyncQueue( function () { Y.Assert.areSame(7, this.size()); results += 'R'; }, { id: "remove me", fn: X }, { id: "not removed", fn: function () { results += 'E'; this.remove('me too'); }, timeout: 10 }, { id: "me too", fn: X }, function () { this.remove("fail"); if (q.size() !== 4) { self.resume(function () { Y.Assert.fail("Expected 3, got " + q.size() + " - remove(n) should defer until callback completion"); }); } results += 'M'; }, { id: "fail", fn: function () { self.resume(function () { Y.Assert.fail("This callback should have been removed"); }); } }, function () { if (q.size() !== 2) { self.resume(function () { Y.Assert.fail("Size should be 1"); }); } results += 'OV'; }, function () { self.resume(function () { results += 'E'; Y.Assert.areSame('REMOVE', results); }); }); function X() { q.run(); results += 'X'; } Y.Assert.areSame(8, q.size()); // Removal when the Queue is inactive is immediate q.remove("remove me"); Y.Assert.areSame(7, q.size()); q.run(); Y.Assert.areSame('R',results); Y.Assert.areSame(6, q.size()); q.remove("not removed"); Y.Assert.areSame(6, q.size()); this.wait(); }, test_promote : function () { function O() { results += 'O'; } var results = '', self = this, q = new Y.AsyncQueue( function () { results += "R"; }, { id: "p", fn: function () { results += 'P'; } }, O, { id: 'm', fn: function () { if (this.count++ > 3) { results += 'M'; } else if (!this.count) { q.promote('o'); } }, context : { count : 0 }, iterations : 5 }, { id : 'o', fn: O, timeout: 10 }, function () { results += 'E'; }, { id : 't', fn : function () { results += 'T'; } }, function () { self.resume(function () { Y.Assert.areSame('PROMOTE', results); }); }); Y.Assert.isUndefined(q._q[0].id); q.promote('p'); Y.Assert.areSame('p', q._q[0].id); q.run(); Y.Assert.areSame('PROM', results); q.promote('t'); this.wait(); }, test_pause : function () { var results = '', self = this, q = new Y.AsyncQueue( function () { results += 'P'; }, { fn: function () { results += 'A'; }, timeout : 10 }, function () { results += 'U'; }, function () { results += 'S'; this.pause(); self.resume(function () { Y.Assert.areSame('PAUS',results); setTimeout(function () { q.run(); },10); self.wait(); }); }, function () { results += 'E'; self.resume(function () { Y.Assert.areSame('PAUSE',results); }); }); Y.Assert.areSame(5,q.size()); q.run(); // Test during timeout Y.Assert.areSame('P', results); q.pause(); setTimeout(function () { self.resume(function () { q.run(); self.wait(); }); }, 20); this.wait(); }, test_stop : function () { var results = "", self = this, q = new Y.AsyncQueue( function () { results += 'S'; }, function () { results += 'T'; }, function () { results += 'O'; }, function () { results += 'P'; }, { fn: function () { self.resume(function () { Y.Assert.fail("Synchronous q.stop() should have cleared this async callback"); }); }, timeout: 10 }); q.run(); q.stop(); Y.Assert.areSame('STOP',results); Y.Assert.areSame(0,q.size()); setTimeout(function () { self.resume(function () { Y.Assert.areSame('STOP',results); Y.Assert.areSame(false, q.isRunning()); }); },100); q.run(); this.wait(); }, test_stop_inside_the_callback : function () { var results = "", self = this, completeEvt = false, q = new Y.AsyncQueue( function () { this.stop(); }, function () { Y.Assert.fail("q.stop() should have cleared this callback"); }); q.defaults = { timeout: -1 }; // sync queue q.on('complete', function () { completeEvt = true; }); q.run(); Y.Assert.areSame(0, q.size()); Y.Assert.areSame(false, q.isRunning()); if (!completeEvt) { Y.Assert.fail("'q.stop() should fire the 'complete' event"); } }, test_stop_inside_the_callback_async : function () { var results = "", self = this, q = new Y.AsyncQueue( function () { this.stop(); }, function () { self.resume(function () { Y.Assert.fail("q.stop() should have cleared this callback"); }); }); q.defaults = { timeout: 10 }; // async queue q.on('complete', function () { self.resume(function () { Y.Assert.areSame(0, q.size()); Y.Assert.areSame(false, q.isRunning()); }); }); q.run(); this.wait(); }, test_getCallback : function () { var c, q = new Y.AsyncQueue( { id : 'a', test: 1 }, { id : 'b', test: 2, fn: function () { this.pause(); } }, { id : 'c', test: 3 }, { id : 'd', test: 4, fn: function () { Y.Assert.areSame(this._q[0], this.getCallback('d')); } }, { id : 'a', test: 5 }); q.defaults = { fn: function () {} }; c = q.getCallback('a'); Y.Assert.isObject(c); Y.Assert.areSame(1, c.test); q.run(); c = q.getCallback('a'); Y.Assert.isObject(c); Y.Assert.areSame(5, c.test); q.run(); }, test_isRunning : function () { var self = this, q = new Y.AsyncQueue( function () { Y.Assert.areSame(true, this.isRunning()); }, { fn: function () { q.pause(); self.resume(function () { Y.Assert.areSame(false, q.isRunning()); }); }, timeout: 10 }); Y.Assert.areSame(false, q.isRunning()); q.run(); Y.Assert.areSame(true, q.isRunning()); /* setTimeout(function () { self.resume(function () { Y.Assert.areSame(false, q.isRunning()); q.run(); // run to completion Y.Assert.areSame(false, q.isRunning()); }); },100); */ this.wait(); } })); suite.add(new Y.Test.Case({ name : "Test callback config", test_fn : function () { var results = '', q = new Y.AsyncQueue( function () { results += 'R'; }, {}, function () { results += 'N'; }); q.defaults = { fn: function () { results += 'U'; } }; q.run(); Y.Assert.areSame("RUN", results); q.add({ fn : "results += 'X'" }, { fn : /results += 'X'/ }, { fn : function () { Y.Assert.areSame("RUN", results); } }).run(); }, test_context : function () { var a = { id : 'a', test : 'A', fn : function () { Y.Assert.areSame('A', this.test); } }, q = new Y.AsyncQueue({ test : 'callbacks exec from Queue ctx by default' }, function () { Y.Assert.areSame('X', this.test); }, { fn: function () { Y.Assert.areSame('X', this.test); this.test = 'Z'; } }, function () { Y.Assert.areSame('Z', this.test); }, a, { fn: function () { Y.Assert.areSame('B', this.test); }, context : { test : 'B' } }); q.getCallback('a').context = a; q.test = 'X'; q.run(); }, test_args : function () { (new Y.AsyncQueue( function () { Y.Assert.areSame(0,arguments.length); }, { fn: function () { Y.ArrayAssert.itemsAreSame([1,2,3],arguments); }, args : [1,2,3] }, { fn: function () { Y.ArrayAssert.itemsAreSame(['X'],arguments); }, args : 'X' })).run(); }, test_iterations : function () { var results = '', self = this; (new Y.AsyncQueue( function () { results += 'A'; }, { fn: function () { results += 'B'; } }, { fn: function () { results += 'C'; }, iterations: 3 }, { fn: function () { results += 'D'; }, iterations: 3, timeout: 10 }, { fn: function () { self.resume(function () { Y.Assert.areSame('ABCCCDDD', results); }); } })).run(); this.wait(); }, test_until : function () { var results = '', self = this; (new Y.AsyncQueue( function () { results += 'A'; }, { fn: function () { results += 'B'; }, until: function () { this.data = this.data.slice(1); return !this.data; }, data : '1234' }, { fn: function () { results += 'C'; }, until: function () { return results.length >= 7; }, timeout: 10 }, { fn: function () { self.resume(function () { Y.Assert.areSame('ABBBCCC', results); }); } })).run(); Y.Assert.areSame('ABBB', results); this.wait(); }, test_until_after_paused_callback: function () { var results = '', self = this; (new Y.AsyncQueue( function () { this.pause(); Y.later(0, this, function () { results += 'A'; this.run(); }); }, { fn: function () { results += 'B'; // should be executed once }, until: function () { return results === '' || results === 'AB'; } }, { fn: function () { self.resume(function () { Y.Assert.areSame('AB', results); }); } })).run(); Y.Assert.areSame('', results); this.wait(); }, test_timeout : function () { function inc() { ++results; } var results = 0, self = this, // default timeout -1 triggers synchronous mode q = new Y.AsyncQueue( inc, // -1 == sync { fn: inc }, // -1 == sync { fn: inc, timeout: 10, iterations: 4 }, { fn: inc, timeout: -300, iterations: 4 }, // neg == sync // garbage timeout doesn't throw error, but is async { fn: inc, timeout: 'a', until: function () { return results >= 10; } }, function () { self.resume(function () { Y.Assert.areSame(10,results); }); }).run(); Y.Assert.areSame(2, results); this.wait(); } /* test_waitForIOResponse : function () { function good() { var url = 'queue.html?cachebuster='+Y.guid(); Y.io(url, { on : { success : function () { results.success++; }, failure : function () { results.failure++; } } }); } function bad() { var url = Y.guid() + (Math.random() * 1000) + '.html'; // 404 Y.io(url, { on : { success : function () { results.success++; }, failure : function () { results.failure++; } } }); } function late() { var url = 'io_timeout.php?cachebuster=' + Y.guid(); Y.io(url, { on : { success : function () { results.success++; }, failure : function () { results.failure++; }, abort : function () { results.failure++; } }, timeout : 10 }); } function test(s,f,step) { return function () { var msg = "Incorrect number of ", data; if (results.success !== s) { msg += 'successes'; data = [s,results.success]; } else if (results.failure !== f) { msg += 'failures'; data = [f,results.failure]; } else { msg = ''; } if (msg) { msg += ' at step ' + step + '. Expected ' + data[0] + ', got ' + data[1]; q.stop(); self.resume(function () { Y.Assert.fail(msg); }); } } } var results = { success: 0, failure: 0 }, self = this, q = new Y.AsyncQueue( { fn : good, waitForIOResponse: true }, test(1,0,1), { fn : function () { good(); good(); good(); }, waitForIOResponse: true }, test(4,0,2), { fn : function () { bad(); good(); late(); }, waitForIOResponse: true }, test(5,2,3), { fn : function () { late(); good(); }, waitForIOResponse: true }, test(6,3,4), { // wait not triggered fn : function () { bad(); bad(); } }, test(6,3,5), function () { self.resume(function () {}); }).run(); this.wait(); } */ })); suite.add(new Y.Test.Case({ name : "Test Events", _should: { ignore: { //Ignored because it uses IO to make a request // and the request is not formatted propertly in Nodejs test_events: Y.UA.nodejs } }, test_events : function () { var results = [], self = this, q = new Y.AsyncQueue( function () { results.push("E"); this.pause(); }, { fn: function () { results.push("E"); }, until: function () { return results.length > 25; }, timeout: 10 }, { id: 'x', fn: function () { results.push("X"); } }, { id: 'v', fn: function () { results.push("V"); }, iterations: 3 }, { fn: function () { results.push("N"); } }); q.on('execute',function () { results.push("(onExec)"); }); q.after('execute', function () { results.push("(afterExec)"); }); q.on("shift", function () { results.push("(onShift)"); }); q.after("shift", function () { results.push("(afterShift)"); }); q.on("remove", function () { results.push("(onRemove)"); }); q.after("remove", function () { results.push("(afterRemove)"); }); q.on("add", function (e) { results.push("(onAdd)"); }); q.after("add", function (e) { var data = e.added; results.push("(afterAdd)"); if (!data || data.length !== 4) { self.resume(function () { Y.Assert.fail("add args not right"); }); } }); q.on("promote", function () { results.push("(onPromote)"); }); q.after("promote", function () { results.push("(afterPromote)"); setTimeout(function () { q.run(); }, 0); }); q.on("complete", function () { results.push("(onComplete)"); self.resume(function () { Y.ArrayAssert.itemsAreEqual([ "(onAdd)", "(afterAdd)", "(onRemove)", "(afterRemove)", "(onExec)", "E", "(afterExec)", "(onPromote)", "(afterPromote)", "(onExec)", "V", "(afterExec)", "(onExec)", "V", "(afterExec)", "(onExec)", "V", "(afterExec)", "(onShift)", "(afterShift)", "(onShift)", "(afterShift)", "(onExec)", "E", "(afterExec)", "(onExec)", "E", "(afterExec)", "(onShift)", "(afterShift)", "(onExec)", "N", "(afterExec)", "(onShift)", "(afterShift)", /* "(onExec)", "T", "(afterExec)", "(onShift)", "(afterShift)", */ "(onExec)", "S", "(afterExec)", /* // no shift because stop() flushed _q "(onShift)", "(afterShift)", */ "(onComplete)" ], results); }); }); q.add(function () { results.push("S"); this.stop(); },f,f,f); q.remove('x'); q.run(); q.promote('v'); this.wait(); }, test_preventCallback : function () { function inc () { i++; } var i = 0, q = new Y.AsyncQueue(inc,inc, { foo: true, fn: inc, iterations: 20 }, { fn: inc, until : function () { return i >= 10; } }); q.on('execute', function (e) { if (e.callback.foo) { e.preventDefault(); } }); q.run(); Y.Assert.areSame(10,i); q = new Y.AsyncQueue(inc, inc, inc, inc, inc, inc, inc, inc, inc, inc); q.on('shift', function (e) { if (i % 2) { e.preventDefault(); q._q[0].iterations++; } }); q.run(); Y.Assert.areSame(30, i); } })); suite.add(new Y.Test.Case({ name : "From bugs", // Bug 2528602 test_double_exec_when_pause_and_run_async : function () { var q = new Y.AsyncQueue(), register = 0, self = this; q.defaults.timeout = 10; q.add({ id: 'one', fn: function() { q.pause(); register += 1; q.run(); } }, { id: 'two', fn: function() { register += 10; }, iterations: 1 }); q.on( 'complete', function () { self.resume( function () { Y.log( register ); Y.Assert.areSame( 11, register ); } ); } ); q.run(); this.wait(); } })); Y.Test.Runner.add(suite); }, '@VERSION@' ,{requires:['async-queue', 'test', 'io-base']});
from torchvision.datasets import MNIST as torchMNIST from torchvision.datasets import FashionMNIST from torch.utils.data import Subset import torchvision.transforms.functional as TF import torchvision.transforms as transforms import torch import numpy as np import os import nn_ood class MNIST(Subset): def __init__(self, split, N=None): dataset = torchMNIST root = os.path.join(nn_ood.DATASET_FOLDER, "MNIST") if split == "train": mnist_split = "train" target_criterion = lambda x: x < 5 elif split == "val": mnist_split = "val" target_criterion = lambda x: x < 5 elif split == "ood": mnist_split = "val" target_criterion = lambda x: x >= 5 elif split == "fashion": dataset = FashionMNIST root = os.path.join(nn_ood.DATASET_FOLDER, "FashionMNIST") mnist_split = "val" target_criterion = lambda x: x == x self.mnist = dataset(root=root, train=(split=="train"), download=True) self.normalize = transforms.Normalize((0.1307,), (0.3081,)) # filter out only 1s valid_idx = np.flatnonzero(target_criterion(self.mnist.targets)) if N is not None: valid_idx = valid_idx[:N] super().__init__(self.mnist, valid_idx) def __getitem__(self, i): input, target = super(MNIST, self).__getitem__(i) target = target % 5 input = transforms.ToTensor()(input) input = self.normalize(input) return input, target
//%includeGuardStart { #ifndef INFERNALMAP_H #define INFERNALMAP_H //%includeGuardStart } droX77VdyETABT3BzCPPFg //%Header { /***************************************************************************** * * File: src/Infernal/InfernalMap.h * * Copyright: Andy Southgate 2002-2007, 2020 * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. * ****************************************************************************/ //%Header } vaki6/uxfoFMQn+qaEOcVg /* * $Id: InfernalMap.h,v 1.7 2006/06/01 20:12:58 southa Exp $ * $Log: InfernalMap.h,v $ * Revision 1.7 2006/06/01 20:12:58 southa * Initial texture caching * * Revision 1.6 2006/06/01 15:39:03 southa * DrawArray verification and fixes * * Revision 1.5 2005/05/19 13:02:05 southa * Mac release work * * Revision 1.4 2004/01/06 20:46:50 southa * Build fixes * * Revision 1.3 2004/01/02 21:13:08 southa * Source conditioning * * Revision 1.2 2003/10/04 12:44:35 southa * File renaming * * Revision 1.1 2003/10/04 12:23:05 southa * File renaming * * Revision 1.16 2003/09/17 19:40:32 southa * Source conditioning upgrades * * Revision 1.15 2003/08/21 23:08:47 southa * Fixed file headers * * Revision 1.14 2003/01/13 14:31:59 southa * Build frameworks for Mac OS X * * Revision 1.13 2003/01/12 17:32:55 southa * Mushcore work * * Revision 1.12 2003/01/11 13:03:13 southa * Use Mushcore header * * Revision 1.11 2003/01/07 17:13:43 southa * Fixes for gcc 3.1 * * Revision 1.10 2002/12/20 13:17:40 southa * Namespace changes, licence changes and source conditioning * * Revision 1.9 2002/10/22 20:42:05 southa * Source conditioning * * Revision 1.8 2002/10/11 14:01:13 southa * Lighting work * * Revision 1.7 2002/10/10 18:25:15 southa * Light links and test lights * */ #include "mushMushcore.h" #include "mushGL.h" template<class T> class InfernalMap { public: InfernalMap(); void SizeSet(Mushware::U32 inX, Mushware::U32 inY); const T& ElementGet(const GLPoint& inPoint) const { return ElementGet(inPoint.U32XGet(), inPoint.U32YGet()); } const T& ElementGet(Mushware::U32 inX, Mushware::U32 inY) const; void ElementSet(const T& inValue, Mushware::U32 inX, Mushware::U32 inY); private: Mushware::U32 m_xSize; Mushware::U32 m_ySize; MushwareValarray<T> m_map; }; template<class T> InfernalMap<T>::InfernalMap() : m_xSize(0), m_ySize(0) { } template<class T> inline void InfernalMap<T>::SizeSet(Mushware::U32 inX, Mushware::U32 inY) { m_xSize=inX; m_ySize=inY; m_map.resize(m_xSize * m_ySize); } template<class T> inline void InfernalMap<T>::ElementSet(const T& inValue, Mushware::U32 inX, Mushware::U32 inY) { MUSHCOREASSERT(inX < m_xSize); MUSHCOREASSERT(inY < m_ySize); m_map[inY * m_xSize + inX] = inValue; } template<class T> inline const T& InfernalMap<T>::ElementGet(Mushware::U32 inX, Mushware::U32 inY) const { if (inX >= m_xSize) { std::cerr << "inX=" << inX << ", m_xSize=" << m_xSize << std::endl; } MUSHCOREASSERT(inX < m_xSize); MUSHCOREASSERT(inY < m_ySize); return m_map[inY * m_xSize + inX]; } //%includeGuardEnd { #endif //%includeGuardEnd } hNb4yLSsimk5RFvFdUzHEw
#include <linux/autoconf.h> #include <linux/module.h> #include <linux/version.h> #include <linux/ioport.h> void skull_release(unsigned int port, unsigned int range) { release_region(port,range); } void skull_cleanup(void) { /* should put real values here, exp: skull_release(0,0); */ } module_exit(skull_cleanup);
#ifndef v1_pod_anti_affinity_TEST #define v1_pod_anti_affinity_TEST // the following is to include only the main from the first c file #ifndef TEST_MAIN #define TEST_MAIN #define v1_pod_anti_affinity_MAIN #endif // TEST_MAIN #include <stdlib.h> #include <string.h> #include <stdio.h> #include <stdbool.h> #include "../external/cJSON.h" #include "../model/v1_pod_anti_affinity.h" v1_pod_anti_affinity_t* instantiate_v1_pod_anti_affinity(int include_optional); v1_pod_anti_affinity_t* instantiate_v1_pod_anti_affinity(int include_optional) { v1_pod_anti_affinity_t* v1_pod_anti_affinity = NULL; if (include_optional) { v1_pod_anti_affinity = v1_pod_anti_affinity_create( list_createList(), list_createList() ); } else { v1_pod_anti_affinity = v1_pod_anti_affinity_create( list_createList(), list_createList() ); } return v1_pod_anti_affinity; } #ifdef v1_pod_anti_affinity_MAIN void test_v1_pod_anti_affinity(int include_optional) { v1_pod_anti_affinity_t* v1_pod_anti_affinity_1 = instantiate_v1_pod_anti_affinity(include_optional); cJSON* jsonv1_pod_anti_affinity_1 = v1_pod_anti_affinity_convertToJSON(v1_pod_anti_affinity_1); printf("v1_pod_anti_affinity :\n%s\n", cJSON_Print(jsonv1_pod_anti_affinity_1)); v1_pod_anti_affinity_t* v1_pod_anti_affinity_2 = v1_pod_anti_affinity_parseFromJSON(jsonv1_pod_anti_affinity_1); cJSON* jsonv1_pod_anti_affinity_2 = v1_pod_anti_affinity_convertToJSON(v1_pod_anti_affinity_2); printf("repeating v1_pod_anti_affinity:\n%s\n", cJSON_Print(jsonv1_pod_anti_affinity_2)); } int main() { test_v1_pod_anti_affinity(1); test_v1_pod_anti_affinity(0); printf("Hello world \n"); return 0; } #endif // v1_pod_anti_affinity_MAIN #endif // v1_pod_anti_affinity_TEST
#!/usr/bin/env python # Copyright (C) 2015-2018 Swift Navigation Inc. # Contact: https://support.swiftnav.com # # This source is subject to the license found in the file 'LICENSE' which must # be be distributed together with this source. All other rights reserved. # # THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND, # EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR PURPOSE. """ Linux state monitoring. """ import json import construct from sbp.msg import SBP, SENDER_ID from sbp.utils import fmt_repr, exclude_fields, walk_json_dict, containerize # Automatically generated from piksi/yaml/swiftnav/sbp/linux.yaml with generate.py. # Please do not hand edit! SBP_MSG_LINUX_CPU_STATE = 0x7F00 class MsgLinuxCpuState(SBP): """SBP class for message MSG_LINUX_CPU_STATE (0x7F00). You can have MSG_LINUX_CPU_STATE inherit its fields directly from an inherited SBP object, or construct it inline using a dict of its fields. This message indicates the process state of the top 10 heaviest consumers of CPU on the system. Parameters ---------- sbp : SBP SBP parent object to inherit from. index : int sequence of this status message, values from 0-9 pid : int the PID of the process pcpu : int percent of cpu used, expressed as a fraction of 256 tname : string fixed length string representing the thread name cmdline : string the command line (as much as it fits in the remaining packet) sender : int Optional sender ID, defaults to SENDER_ID (see sbp/msg.py). """ _parser = construct.Struct( 'index' / construct.Int8ul, 'pid' / construct.Int16ul, 'pcpu' / construct.Int8ul, 'tname'/ construct.Bytes(15), 'cmdline' / construct.GreedyBytes,) __slots__ = [ 'index', 'pid', 'pcpu', 'tname', 'cmdline', ] def __init__(self, sbp=None, **kwargs): if sbp: super( MsgLinuxCpuState, self).__init__(sbp.msg_type, sbp.sender, sbp.length, sbp.payload, sbp.crc) self.from_binary(sbp.payload) else: super( MsgLinuxCpuState, self).__init__() self.msg_type = SBP_MSG_LINUX_CPU_STATE self.sender = kwargs.pop('sender', SENDER_ID) self.index = kwargs.pop('index') self.pid = kwargs.pop('pid') self.pcpu = kwargs.pop('pcpu') self.tname = kwargs.pop('tname') self.cmdline = kwargs.pop('cmdline') def __repr__(self): return fmt_repr(self) @staticmethod def from_json(s): """Given a JSON-encoded string s, build a message object. """ d = json.loads(s) return MsgLinuxCpuState.from_json_dict(d) @staticmethod def from_json_dict(d): sbp = SBP.from_json_dict(d) return MsgLinuxCpuState(sbp, **d) def from_binary(self, d): """Given a binary payload d, update the appropriate payload fields of the message. """ p = MsgLinuxCpuState._parser.parse(d) for n in self.__class__.__slots__: setattr(self, n, getattr(p, n)) def to_binary(self): """Produce a framed/packed SBP message. """ c = containerize(exclude_fields(self)) self.payload = MsgLinuxCpuState._parser.build(c) return self.pack() def into_buffer(self, buf, offset): """Produce a framed/packed SBP message into the provided buffer and offset. """ self.payload = containerize(exclude_fields(self)) self.parser = MsgLinuxCpuState._parser self.stream_payload.reset(buf, offset) return self.pack_into(buf, offset, self._build_payload) def to_json_dict(self): self.to_binary() d = super( MsgLinuxCpuState, self).to_json_dict() j = walk_json_dict(exclude_fields(self)) d.update(j) return d SBP_MSG_LINUX_MEM_STATE = 0x7F01 class MsgLinuxMemState(SBP): """SBP class for message MSG_LINUX_MEM_STATE (0x7F01). You can have MSG_LINUX_MEM_STATE inherit its fields directly from an inherited SBP object, or construct it inline using a dict of its fields. This message indicates the process state of the top 10 heaviest consumers of memory on the system. Parameters ---------- sbp : SBP SBP parent object to inherit from. index : int sequence of this status message, values from 0-9 pid : int the PID of the process pmem : int percent of memory used, expressed as a fraction of 256 tname : string fixed length string representing the thread name cmdline : string the command line (as much as it fits in the remaining packet) sender : int Optional sender ID, defaults to SENDER_ID (see sbp/msg.py). """ _parser = construct.Struct( 'index' / construct.Int8ul, 'pid' / construct.Int16ul, 'pmem' / construct.Int8ul, 'tname'/ construct.Bytes(15), 'cmdline' / construct.GreedyBytes,) __slots__ = [ 'index', 'pid', 'pmem', 'tname', 'cmdline', ] def __init__(self, sbp=None, **kwargs): if sbp: super( MsgLinuxMemState, self).__init__(sbp.msg_type, sbp.sender, sbp.length, sbp.payload, sbp.crc) self.from_binary(sbp.payload) else: super( MsgLinuxMemState, self).__init__() self.msg_type = SBP_MSG_LINUX_MEM_STATE self.sender = kwargs.pop('sender', SENDER_ID) self.index = kwargs.pop('index') self.pid = kwargs.pop('pid') self.pmem = kwargs.pop('pmem') self.tname = kwargs.pop('tname') self.cmdline = kwargs.pop('cmdline') def __repr__(self): return fmt_repr(self) @staticmethod def from_json(s): """Given a JSON-encoded string s, build a message object. """ d = json.loads(s) return MsgLinuxMemState.from_json_dict(d) @staticmethod def from_json_dict(d): sbp = SBP.from_json_dict(d) return MsgLinuxMemState(sbp, **d) def from_binary(self, d): """Given a binary payload d, update the appropriate payload fields of the message. """ p = MsgLinuxMemState._parser.parse(d) for n in self.__class__.__slots__: setattr(self, n, getattr(p, n)) def to_binary(self): """Produce a framed/packed SBP message. """ c = containerize(exclude_fields(self)) self.payload = MsgLinuxMemState._parser.build(c) return self.pack() def into_buffer(self, buf, offset): """Produce a framed/packed SBP message into the provided buffer and offset. """ self.payload = containerize(exclude_fields(self)) self.parser = MsgLinuxMemState._parser self.stream_payload.reset(buf, offset) return self.pack_into(buf, offset, self._build_payload) def to_json_dict(self): self.to_binary() d = super( MsgLinuxMemState, self).to_json_dict() j = walk_json_dict(exclude_fields(self)) d.update(j) return d SBP_MSG_LINUX_SYS_STATE = 0x7F02 class MsgLinuxSysState(SBP): """SBP class for message MSG_LINUX_SYS_STATE (0x7F02). You can have MSG_LINUX_SYS_STATE inherit its fields directly from an inherited SBP object, or construct it inline using a dict of its fields. This presents a summary of CPU and memory utilization. Parameters ---------- sbp : SBP SBP parent object to inherit from. mem_total : int total system memory pcpu : int percent of total cpu currently utilized pmem : int percent of total memory currently utilized procs_starting : int number of processes that started during collection phase procs_stopping : int number of processes that stopped during collection phase pid_count : int the count of processes on the system sender : int Optional sender ID, defaults to SENDER_ID (see sbp/msg.py). """ _parser = construct.Struct( 'mem_total' / construct.Int16ul, 'pcpu' / construct.Int8ul, 'pmem' / construct.Int8ul, 'procs_starting' / construct.Int16ul, 'procs_stopping' / construct.Int16ul, 'pid_count' / construct.Int16ul,) __slots__ = [ 'mem_total', 'pcpu', 'pmem', 'procs_starting', 'procs_stopping', 'pid_count', ] def __init__(self, sbp=None, **kwargs): if sbp: super( MsgLinuxSysState, self).__init__(sbp.msg_type, sbp.sender, sbp.length, sbp.payload, sbp.crc) self.from_binary(sbp.payload) else: super( MsgLinuxSysState, self).__init__() self.msg_type = SBP_MSG_LINUX_SYS_STATE self.sender = kwargs.pop('sender', SENDER_ID) self.mem_total = kwargs.pop('mem_total') self.pcpu = kwargs.pop('pcpu') self.pmem = kwargs.pop('pmem') self.procs_starting = kwargs.pop('procs_starting') self.procs_stopping = kwargs.pop('procs_stopping') self.pid_count = kwargs.pop('pid_count') def __repr__(self): return fmt_repr(self) @staticmethod def from_json(s): """Given a JSON-encoded string s, build a message object. """ d = json.loads(s) return MsgLinuxSysState.from_json_dict(d) @staticmethod def from_json_dict(d): sbp = SBP.from_json_dict(d) return MsgLinuxSysState(sbp, **d) def from_binary(self, d): """Given a binary payload d, update the appropriate payload fields of the message. """ p = MsgLinuxSysState._parser.parse(d) for n in self.__class__.__slots__: setattr(self, n, getattr(p, n)) def to_binary(self): """Produce a framed/packed SBP message. """ c = containerize(exclude_fields(self)) self.payload = MsgLinuxSysState._parser.build(c) return self.pack() def into_buffer(self, buf, offset): """Produce a framed/packed SBP message into the provided buffer and offset. """ self.payload = containerize(exclude_fields(self)) self.parser = MsgLinuxSysState._parser self.stream_payload.reset(buf, offset) return self.pack_into(buf, offset, self._build_payload) def to_json_dict(self): self.to_binary() d = super( MsgLinuxSysState, self).to_json_dict() j = walk_json_dict(exclude_fields(self)) d.update(j) return d SBP_MSG_LINUX_PROCESS_SOCKET_COUNTS = 0x7F03 class MsgLinuxProcessSocketCounts(SBP): """SBP class for message MSG_LINUX_PROCESS_SOCKET_COUNTS (0x7F03). You can have MSG_LINUX_PROCESS_SOCKET_COUNTS inherit its fields directly from an inherited SBP object, or construct it inline using a dict of its fields. Top 10 list of processes with high socket counts. Parameters ---------- sbp : SBP SBP parent object to inherit from. index : int sequence of this status message, values from 0-9 pid : int the PID of the process in question socket_count : int the number of sockets the process is using socket_types : int A bitfield indicating the socket types used: 0x1 (tcp), 0x2 (udp), 0x4 (unix stream), 0x8 (unix dgram), 0x10 (netlink), and 0x8000 (unknown) socket_states : int A bitfield indicating the socket states: 0x1 (established), 0x2 (syn-sent), 0x4 (syn-recv), 0x8 (fin-wait-1), 0x10 (fin-wait-2), 0x20 (time-wait), 0x40 (closed), 0x80 (close-wait), 0x100 (last-ack), 0x200 (listen), 0x400 (closing), 0x800 (unconnected), and 0x8000 (unknown) cmdline : string the command line of the process in question sender : int Optional sender ID, defaults to SENDER_ID (see sbp/msg.py). """ _parser = construct.Struct( 'index' / construct.Int8ul, 'pid' / construct.Int16ul, 'socket_count' / construct.Int16ul, 'socket_types' / construct.Int16ul, 'socket_states' / construct.Int16ul, 'cmdline' / construct.GreedyBytes,) __slots__ = [ 'index', 'pid', 'socket_count', 'socket_types', 'socket_states', 'cmdline', ] def __init__(self, sbp=None, **kwargs): if sbp: super( MsgLinuxProcessSocketCounts, self).__init__(sbp.msg_type, sbp.sender, sbp.length, sbp.payload, sbp.crc) self.from_binary(sbp.payload) else: super( MsgLinuxProcessSocketCounts, self).__init__() self.msg_type = SBP_MSG_LINUX_PROCESS_SOCKET_COUNTS self.sender = kwargs.pop('sender', SENDER_ID) self.index = kwargs.pop('index') self.pid = kwargs.pop('pid') self.socket_count = kwargs.pop('socket_count') self.socket_types = kwargs.pop('socket_types') self.socket_states = kwargs.pop('socket_states') self.cmdline = kwargs.pop('cmdline') def __repr__(self): return fmt_repr(self) @staticmethod def from_json(s): """Given a JSON-encoded string s, build a message object. """ d = json.loads(s) return MsgLinuxProcessSocketCounts.from_json_dict(d) @staticmethod def from_json_dict(d): sbp = SBP.from_json_dict(d) return MsgLinuxProcessSocketCounts(sbp, **d) def from_binary(self, d): """Given a binary payload d, update the appropriate payload fields of the message. """ p = MsgLinuxProcessSocketCounts._parser.parse(d) for n in self.__class__.__slots__: setattr(self, n, getattr(p, n)) def to_binary(self): """Produce a framed/packed SBP message. """ c = containerize(exclude_fields(self)) self.payload = MsgLinuxProcessSocketCounts._parser.build(c) return self.pack() def into_buffer(self, buf, offset): """Produce a framed/packed SBP message into the provided buffer and offset. """ self.payload = containerize(exclude_fields(self)) self.parser = MsgLinuxProcessSocketCounts._parser self.stream_payload.reset(buf, offset) return self.pack_into(buf, offset, self._build_payload) def to_json_dict(self): self.to_binary() d = super( MsgLinuxProcessSocketCounts, self).to_json_dict() j = walk_json_dict(exclude_fields(self)) d.update(j) return d SBP_MSG_LINUX_PROCESS_SOCKET_QUEUES = 0x7F04 class MsgLinuxProcessSocketQueues(SBP): """SBP class for message MSG_LINUX_PROCESS_SOCKET_QUEUES (0x7F04). You can have MSG_LINUX_PROCESS_SOCKET_QUEUES inherit its fields directly from an inherited SBP object, or construct it inline using a dict of its fields. Top 10 list of sockets with deep queues. Parameters ---------- sbp : SBP SBP parent object to inherit from. index : int sequence of this status message, values from 0-9 pid : int the PID of the process in question recv_queued : int the total amount of receive data queued for this process send_queued : int the total amount of send data queued for this process socket_types : int A bitfield indicating the socket types used: 0x1 (tcp), 0x2 (udp), 0x4 (unix stream), 0x8 (unix dgram), 0x10 (netlink), and 0x8000 (unknown) socket_states : int A bitfield indicating the socket states: 0x1 (established), 0x2 (syn-sent), 0x4 (syn-recv), 0x8 (fin-wait-1), 0x10 (fin-wait-2), 0x20 (time-wait), 0x40 (closed), 0x80 (close-wait), 0x100 (last-ack), 0x200 (listen), 0x400 (closing), 0x800 (unconnected), and 0x8000 (unknown) address_of_largest : string Address of the largest queue, remote or local depending on the directionality of the connection. cmdline : string the command line of the process in question sender : int Optional sender ID, defaults to SENDER_ID (see sbp/msg.py). """ _parser = construct.Struct( 'index' / construct.Int8ul, 'pid' / construct.Int16ul, 'recv_queued' / construct.Int16ul, 'send_queued' / construct.Int16ul, 'socket_types' / construct.Int16ul, 'socket_states' / construct.Int16ul, 'address_of_largest'/ construct.Bytes(64), 'cmdline' / construct.GreedyBytes,) __slots__ = [ 'index', 'pid', 'recv_queued', 'send_queued', 'socket_types', 'socket_states', 'address_of_largest', 'cmdline', ] def __init__(self, sbp=None, **kwargs): if sbp: super( MsgLinuxProcessSocketQueues, self).__init__(sbp.msg_type, sbp.sender, sbp.length, sbp.payload, sbp.crc) self.from_binary(sbp.payload) else: super( MsgLinuxProcessSocketQueues, self).__init__() self.msg_type = SBP_MSG_LINUX_PROCESS_SOCKET_QUEUES self.sender = kwargs.pop('sender', SENDER_ID) self.index = kwargs.pop('index') self.pid = kwargs.pop('pid') self.recv_queued = kwargs.pop('recv_queued') self.send_queued = kwargs.pop('send_queued') self.socket_types = kwargs.pop('socket_types') self.socket_states = kwargs.pop('socket_states') self.address_of_largest = kwargs.pop('address_of_largest') self.cmdline = kwargs.pop('cmdline') def __repr__(self): return fmt_repr(self) @staticmethod def from_json(s): """Given a JSON-encoded string s, build a message object. """ d = json.loads(s) return MsgLinuxProcessSocketQueues.from_json_dict(d) @staticmethod def from_json_dict(d): sbp = SBP.from_json_dict(d) return MsgLinuxProcessSocketQueues(sbp, **d) def from_binary(self, d): """Given a binary payload d, update the appropriate payload fields of the message. """ p = MsgLinuxProcessSocketQueues._parser.parse(d) for n in self.__class__.__slots__: setattr(self, n, getattr(p, n)) def to_binary(self): """Produce a framed/packed SBP message. """ c = containerize(exclude_fields(self)) self.payload = MsgLinuxProcessSocketQueues._parser.build(c) return self.pack() def into_buffer(self, buf, offset): """Produce a framed/packed SBP message into the provided buffer and offset. """ self.payload = containerize(exclude_fields(self)) self.parser = MsgLinuxProcessSocketQueues._parser self.stream_payload.reset(buf, offset) return self.pack_into(buf, offset, self._build_payload) def to_json_dict(self): self.to_binary() d = super( MsgLinuxProcessSocketQueues, self).to_json_dict() j = walk_json_dict(exclude_fields(self)) d.update(j) return d SBP_MSG_LINUX_SOCKET_USAGE = 0x7F05 class MsgLinuxSocketUsage(SBP): """SBP class for message MSG_LINUX_SOCKET_USAGE (0x7F05). You can have MSG_LINUX_SOCKET_USAGE inherit its fields directly from an inherited SBP object, or construct it inline using a dict of its fields. Summaries the socket usage across the system. Parameters ---------- sbp : SBP SBP parent object to inherit from. avg_queue_depth : int average socket queue depths across all sockets on the system max_queue_depth : int the max queue depth seen within the reporting period socket_state_counts : array A count for each socket type reported in the `socket_types_reported` field, the first entry corresponds to the first enabled bit in `types_reported`. socket_type_counts : array A count for each socket type reported in the `socket_types_reported` field, the first entry corresponds to the first enabled bit in `types_reported`. sender : int Optional sender ID, defaults to SENDER_ID (see sbp/msg.py). """ _parser = construct.Struct( 'avg_queue_depth' / construct.Int32ul, 'max_queue_depth' / construct.Int32ul, 'socket_state_counts' / construct.Array(16, construct.Int16ul), 'socket_type_counts' / construct.Array(16, construct.Int16ul),) __slots__ = [ 'avg_queue_depth', 'max_queue_depth', 'socket_state_counts', 'socket_type_counts', ] def __init__(self, sbp=None, **kwargs): if sbp: super( MsgLinuxSocketUsage, self).__init__(sbp.msg_type, sbp.sender, sbp.length, sbp.payload, sbp.crc) self.from_binary(sbp.payload) else: super( MsgLinuxSocketUsage, self).__init__() self.msg_type = SBP_MSG_LINUX_SOCKET_USAGE self.sender = kwargs.pop('sender', SENDER_ID) self.avg_queue_depth = kwargs.pop('avg_queue_depth') self.max_queue_depth = kwargs.pop('max_queue_depth') self.socket_state_counts = kwargs.pop('socket_state_counts') self.socket_type_counts = kwargs.pop('socket_type_counts') def __repr__(self): return fmt_repr(self) @staticmethod def from_json(s): """Given a JSON-encoded string s, build a message object. """ d = json.loads(s) return MsgLinuxSocketUsage.from_json_dict(d) @staticmethod def from_json_dict(d): sbp = SBP.from_json_dict(d) return MsgLinuxSocketUsage(sbp, **d) def from_binary(self, d): """Given a binary payload d, update the appropriate payload fields of the message. """ p = MsgLinuxSocketUsage._parser.parse(d) for n in self.__class__.__slots__: setattr(self, n, getattr(p, n)) def to_binary(self): """Produce a framed/packed SBP message. """ c = containerize(exclude_fields(self)) self.payload = MsgLinuxSocketUsage._parser.build(c) return self.pack() def into_buffer(self, buf, offset): """Produce a framed/packed SBP message into the provided buffer and offset. """ self.payload = containerize(exclude_fields(self)) self.parser = MsgLinuxSocketUsage._parser self.stream_payload.reset(buf, offset) return self.pack_into(buf, offset, self._build_payload) def to_json_dict(self): self.to_binary() d = super( MsgLinuxSocketUsage, self).to_json_dict() j = walk_json_dict(exclude_fields(self)) d.update(j) return d SBP_MSG_LINUX_PROCESS_FD_COUNT = 0x7F06 class MsgLinuxProcessFdCount(SBP): """SBP class for message MSG_LINUX_PROCESS_FD_COUNT (0x7F06). You can have MSG_LINUX_PROCESS_FD_COUNT inherit its fields directly from an inherited SBP object, or construct it inline using a dict of its fields. Top 10 list of processes with a large number of open file descriptors. Parameters ---------- sbp : SBP SBP parent object to inherit from. index : int sequence of this status message, values from 0-9 pid : int the PID of the process in question fd_count : int a count of the number of file descriptors opened by the process cmdline : string the command line of the process in question sender : int Optional sender ID, defaults to SENDER_ID (see sbp/msg.py). """ _parser = construct.Struct( 'index' / construct.Int8ul, 'pid' / construct.Int16ul, 'fd_count' / construct.Int16ul, 'cmdline' / construct.GreedyBytes,) __slots__ = [ 'index', 'pid', 'fd_count', 'cmdline', ] def __init__(self, sbp=None, **kwargs): if sbp: super( MsgLinuxProcessFdCount, self).__init__(sbp.msg_type, sbp.sender, sbp.length, sbp.payload, sbp.crc) self.from_binary(sbp.payload) else: super( MsgLinuxProcessFdCount, self).__init__() self.msg_type = SBP_MSG_LINUX_PROCESS_FD_COUNT self.sender = kwargs.pop('sender', SENDER_ID) self.index = kwargs.pop('index') self.pid = kwargs.pop('pid') self.fd_count = kwargs.pop('fd_count') self.cmdline = kwargs.pop('cmdline') def __repr__(self): return fmt_repr(self) @staticmethod def from_json(s): """Given a JSON-encoded string s, build a message object. """ d = json.loads(s) return MsgLinuxProcessFdCount.from_json_dict(d) @staticmethod def from_json_dict(d): sbp = SBP.from_json_dict(d) return MsgLinuxProcessFdCount(sbp, **d) def from_binary(self, d): """Given a binary payload d, update the appropriate payload fields of the message. """ p = MsgLinuxProcessFdCount._parser.parse(d) for n in self.__class__.__slots__: setattr(self, n, getattr(p, n)) def to_binary(self): """Produce a framed/packed SBP message. """ c = containerize(exclude_fields(self)) self.payload = MsgLinuxProcessFdCount._parser.build(c) return self.pack() def into_buffer(self, buf, offset): """Produce a framed/packed SBP message into the provided buffer and offset. """ self.payload = containerize(exclude_fields(self)) self.parser = MsgLinuxProcessFdCount._parser self.stream_payload.reset(buf, offset) return self.pack_into(buf, offset, self._build_payload) def to_json_dict(self): self.to_binary() d = super( MsgLinuxProcessFdCount, self).to_json_dict() j = walk_json_dict(exclude_fields(self)) d.update(j) return d SBP_MSG_LINUX_PROCESS_FD_SUMMARY = 0x7F07 class MsgLinuxProcessFdSummary(SBP): """SBP class for message MSG_LINUX_PROCESS_FD_SUMMARY (0x7F07). You can have MSG_LINUX_PROCESS_FD_SUMMARY inherit its fields directly from an inherited SBP object, or construct it inline using a dict of its fields. Summary of open file descriptors on the system. Parameters ---------- sbp : SBP SBP parent object to inherit from. sys_fd_count : int count of total FDs open on the system most_opened : string A null delimited list of strings which alternates between a string representation of the process count and the file name whose count it being reported. That is, in C string syntax "32\0/var/log/syslog\012\0/tmp/foo\0" with the end of the list being 2 NULL terminators in a row. sender : int Optional sender ID, defaults to SENDER_ID (see sbp/msg.py). """ _parser = construct.Struct( 'sys_fd_count' / construct.Int32ul, 'most_opened' / construct.GreedyBytes,) __slots__ = [ 'sys_fd_count', 'most_opened', ] def __init__(self, sbp=None, **kwargs): if sbp: super( MsgLinuxProcessFdSummary, self).__init__(sbp.msg_type, sbp.sender, sbp.length, sbp.payload, sbp.crc) self.from_binary(sbp.payload) else: super( MsgLinuxProcessFdSummary, self).__init__() self.msg_type = SBP_MSG_LINUX_PROCESS_FD_SUMMARY self.sender = kwargs.pop('sender', SENDER_ID) self.sys_fd_count = kwargs.pop('sys_fd_count') self.most_opened = kwargs.pop('most_opened') def __repr__(self): return fmt_repr(self) @staticmethod def from_json(s): """Given a JSON-encoded string s, build a message object. """ d = json.loads(s) return MsgLinuxProcessFdSummary.from_json_dict(d) @staticmethod def from_json_dict(d): sbp = SBP.from_json_dict(d) return MsgLinuxProcessFdSummary(sbp, **d) def from_binary(self, d): """Given a binary payload d, update the appropriate payload fields of the message. """ p = MsgLinuxProcessFdSummary._parser.parse(d) for n in self.__class__.__slots__: setattr(self, n, getattr(p, n)) def to_binary(self): """Produce a framed/packed SBP message. """ c = containerize(exclude_fields(self)) self.payload = MsgLinuxProcessFdSummary._parser.build(c) return self.pack() def into_buffer(self, buf, offset): """Produce a framed/packed SBP message into the provided buffer and offset. """ self.payload = containerize(exclude_fields(self)) self.parser = MsgLinuxProcessFdSummary._parser self.stream_payload.reset(buf, offset) return self.pack_into(buf, offset, self._build_payload) def to_json_dict(self): self.to_binary() d = super( MsgLinuxProcessFdSummary, self).to_json_dict() j = walk_json_dict(exclude_fields(self)) d.update(j) return d msg_classes = { 0x7F00: MsgLinuxCpuState, 0x7F01: MsgLinuxMemState, 0x7F02: MsgLinuxSysState, 0x7F03: MsgLinuxProcessSocketCounts, 0x7F04: MsgLinuxProcessSocketQueues, 0x7F05: MsgLinuxSocketUsage, 0x7F06: MsgLinuxProcessFdCount, 0x7F07: MsgLinuxProcessFdSummary, }
""" Forest of trees-based ensemble methods. Those methods include random forests and extremely randomized trees. The module structure is the following: - The ``BaseForest`` base class implements a common ``fit`` method for all the estimators in the module. The ``fit`` method of the base ``Forest`` class calls the ``fit`` method of each sub-estimator on random samples (with replacement, a.k.a. bootstrap) of the training set. The init of the sub-estimator is further delegated to the ``BaseEnsemble`` constructor. - The ``ForestClassifier`` and ``ForestRegressor`` base classes further implement the prediction logic by computing an average of the predicted outcomes of the sub-estimators. - The ``RandomForestClassifier`` and ``RandomForestRegressor`` derived classes provide the user with concrete implementations of the forest ensemble method using classical, deterministic ``DecisionTreeClassifier`` and ``DecisionTreeRegressor`` as sub-estimator implementations. - The ``ExtraTreesClassifier`` and ``ExtraTreesRegressor`` derived classes provide the user with concrete implementations of the forest ensemble method using the extremely randomized trees ``ExtraTreeClassifier`` and ``ExtraTreeRegressor`` as sub-estimator implementations. Single and multi-output problems are both handled. """ # Authors: Gilles Louppe <g.louppe@gmail.com> # Brian Holt <bdholt1@gmail.com> # Joly Arnaud <arnaud.v.joly@gmail.com> # Fares Hedayati <fares.hedayati@gmail.com> # # License: BSD 3 clause import numbers from warnings import catch_warnings, simplefilter, warn import threading from abc import ABCMeta, abstractmethod import numpy as np from scipy.sparse import issparse from scipy.sparse import hstack as sparse_hstack from joblib import Parallel from ..base import is_classifier from ..base import ClassifierMixin, MultiOutputMixin, RegressorMixin from ..metrics import accuracy_score, r2_score from ..preprocessing import OneHotEncoder from ..tree import ( DecisionTreeClassifier, DecisionTreeRegressor, ExtraTreeClassifier, ExtraTreeRegressor, ) from ..tree._tree import DTYPE, DOUBLE from ..utils import check_random_state, compute_sample_weight, deprecated from ..exceptions import DataConversionWarning from ._base import BaseEnsemble, _partition_estimators from ..utils.fixes import delayed from ..utils.fixes import _joblib_parallel_args from ..utils.multiclass import check_classification_targets, type_of_target from ..utils.validation import check_is_fitted, _check_sample_weight from ..utils.validation import _num_samples __all__ = [ "RandomForestClassifier", "RandomForestRegressor", "ExtraTreesClassifier", "ExtraTreesRegressor", "RandomTreesEmbedding", ] MAX_INT = np.iinfo(np.int32).max def _get_n_samples_bootstrap(n_samples, max_samples): """ Get the number of samples in a bootstrap sample. Parameters ---------- n_samples : int Number of samples in the dataset. max_samples : int or float The maximum number of samples to draw from the total available: - if float, this indicates a fraction of the total and should be the interval `(0.0, 1.0]`; - if int, this indicates the exact number of samples; - if None, this indicates the total number of samples. Returns ------- n_samples_bootstrap : int The total number of samples to draw for the bootstrap sample. """ if max_samples is None: return n_samples if isinstance(max_samples, numbers.Integral): if not (1 <= max_samples <= n_samples): msg = "`max_samples` must be in range 1 to {} but got value {}" raise ValueError(msg.format(n_samples, max_samples)) return max_samples if isinstance(max_samples, numbers.Real): if not (0 < max_samples <= 1): msg = "`max_samples` must be in range (0.0, 1.0] but got value {}" raise ValueError(msg.format(max_samples)) return round(n_samples * max_samples) msg = "`max_samples` should be int or float, but got type '{}'" raise TypeError(msg.format(type(max_samples))) def _generate_sample_indices(random_state, n_samples, n_samples_bootstrap): """ Private function used to _parallel_build_trees function.""" random_instance = check_random_state(random_state) sample_indices = random_instance.randint(0, n_samples, n_samples_bootstrap) return sample_indices def _generate_unsampled_indices(random_state, n_samples, n_samples_bootstrap): """ Private function used to forest._set_oob_score function.""" sample_indices = _generate_sample_indices( random_state, n_samples, n_samples_bootstrap ) sample_counts = np.bincount(sample_indices, minlength=n_samples) unsampled_mask = sample_counts == 0 indices_range = np.arange(n_samples) unsampled_indices = indices_range[unsampled_mask] return unsampled_indices def _parallel_build_trees( tree, forest, X, y, sample_weight, tree_idx, n_trees, verbose=0, class_weight=None, n_samples_bootstrap=None, ): """ Private function used to fit a single tree in parallel.""" if verbose > 1: print("building tree %d of %d" % (tree_idx + 1, n_trees)) if forest.bootstrap: n_samples = X.shape[0] if sample_weight is None: curr_sample_weight = np.ones((n_samples,), dtype=np.float64) else: curr_sample_weight = sample_weight.copy() indices = _generate_sample_indices( tree.random_state, n_samples, n_samples_bootstrap ) sample_counts = np.bincount(indices, minlength=n_samples) curr_sample_weight *= sample_counts if class_weight == "subsample": with catch_warnings(): simplefilter("ignore", DeprecationWarning) curr_sample_weight *= compute_sample_weight("auto", y, indices=indices) elif class_weight == "balanced_subsample": curr_sample_weight *= compute_sample_weight("balanced", y, indices=indices) tree.fit(X, y, sample_weight=curr_sample_weight, check_input=False) else: tree.fit(X, y, sample_weight=sample_weight, check_input=False) return tree class BaseForest(MultiOutputMixin, BaseEnsemble, metaclass=ABCMeta): """ Base class for forests of trees. Warning: This class should not be used directly. Use derived classes instead. """ @abstractmethod def __init__( self, base_estimator, n_estimators=100, *, estimator_params=tuple(), bootstrap=False, oob_score=False, n_jobs=None, random_state=None, verbose=0, warm_start=False, class_weight=None, max_samples=None, ): super().__init__( base_estimator=base_estimator, n_estimators=n_estimators, estimator_params=estimator_params, ) self.bootstrap = bootstrap self.oob_score = oob_score self.n_jobs = n_jobs self.random_state = random_state self.verbose = verbose self.warm_start = warm_start self.class_weight = class_weight self.max_samples = max_samples def apply(self, X): """ Apply trees in the forest to X, return leaf indices. Parameters ---------- X : {array-like, sparse matrix} of shape (n_samples, n_features) The input samples. Internally, its dtype will be converted to ``dtype=np.float32``. If a sparse matrix is provided, it will be converted into a sparse ``csr_matrix``. Returns ------- X_leaves : ndarray of shape (n_samples, n_estimators) For each datapoint x in X and for each tree in the forest, return the index of the leaf x ends up in. """ X = self._validate_X_predict(X) results = Parallel( n_jobs=self.n_jobs, verbose=self.verbose, **_joblib_parallel_args(prefer="threads"), )(delayed(tree.apply)(X, check_input=False) for tree in self.estimators_) return np.array(results).T def decision_path(self, X): """ Return the decision path in the forest. .. versionadded:: 0.18 Parameters ---------- X : {array-like, sparse matrix} of shape (n_samples, n_features) The input samples. Internally, its dtype will be converted to ``dtype=np.float32``. If a sparse matrix is provided, it will be converted into a sparse ``csr_matrix``. Returns ------- indicator : sparse matrix of shape (n_samples, n_nodes) Return a node indicator matrix where non zero elements indicates that the samples goes through the nodes. The matrix is of CSR format. n_nodes_ptr : ndarray of shape (n_estimators + 1,) The columns from indicator[n_nodes_ptr[i]:n_nodes_ptr[i+1]] gives the indicator value for the i-th estimator. """ X = self._validate_X_predict(X) indicators = Parallel( n_jobs=self.n_jobs, verbose=self.verbose, **_joblib_parallel_args(prefer="threads"), )( delayed(tree.decision_path)(X, check_input=False) for tree in self.estimators_ ) n_nodes = [0] n_nodes.extend([i.shape[1] for i in indicators]) n_nodes_ptr = np.array(n_nodes).cumsum() return sparse_hstack(indicators).tocsr(), n_nodes_ptr def fit(self, X, y, sample_weight=None): """ Build a forest of trees from the training set (X, y). Parameters ---------- X : {array-like, sparse matrix} of shape (n_samples, n_features) The training input samples. Internally, its dtype will be converted to ``dtype=np.float32``. If a sparse matrix is provided, it will be converted into a sparse ``csc_matrix``. y : array-like of shape (n_samples,) or (n_samples, n_outputs) The target values (class labels in classification, real numbers in regression). sample_weight : array-like of shape (n_samples,), default=None Sample weights. If None, then samples are equally weighted. Splits that would create child nodes with net zero or negative weight are ignored while searching for a split in each node. In the case of classification, splits are also ignored if they would result in any single class carrying a negative weight in either child node. Returns ------- self : object Fitted estimator. """ # Validate or convert input data if issparse(y): raise ValueError("sparse multilabel-indicator for y is not supported.") X, y = self._validate_data( X, y, multi_output=True, accept_sparse="csc", dtype=DTYPE ) if sample_weight is not None: sample_weight = _check_sample_weight(sample_weight, X) if issparse(X): # Pre-sort indices to avoid that each individual tree of the # ensemble sorts the indices. X.sort_indices() y = np.atleast_1d(y) if y.ndim == 2 and y.shape[1] == 1: warn( "A column-vector y was passed when a 1d array was" " expected. Please change the shape of y to " "(n_samples,), for example using ravel().", DataConversionWarning, stacklevel=2, ) if y.ndim == 1: # reshape is necessary to preserve the data contiguity against vs # [:, np.newaxis] that does not. y = np.reshape(y, (-1, 1)) if self.criterion == "poisson": if np.any(y < 0): raise ValueError( "Some value(s) of y are negative which is " "not allowed for Poisson regression." ) if np.sum(y) <= 0: raise ValueError( "Sum of y is not strictly positive which " "is necessary for Poisson regression." ) self.n_outputs_ = y.shape[1] y, expanded_class_weight = self._validate_y_class_weight(y) if getattr(y, "dtype", None) != DOUBLE or not y.flags.contiguous: y = np.ascontiguousarray(y, dtype=DOUBLE) if expanded_class_weight is not None: if sample_weight is not None: sample_weight = sample_weight * expanded_class_weight else: sample_weight = expanded_class_weight if not self.bootstrap and self.max_samples is not None: raise ValueError( "`max_sample` cannot be set if `bootstrap=False`. " "Either switch to `bootstrap=True` or set " "`max_sample=None`." ) elif self.bootstrap: n_samples_bootstrap = _get_n_samples_bootstrap( n_samples=X.shape[0], max_samples=self.max_samples ) else: n_samples_bootstrap = None # Check parameters self._validate_estimator() # TODO: Remove in v1.2 if isinstance(self, (RandomForestRegressor, ExtraTreesRegressor)): if self.criterion == "mse": warn( "Criterion 'mse' was deprecated in v1.0 and will be " "removed in version 1.2. Use `criterion='squared_error'` " "which is equivalent.", FutureWarning, ) elif self.criterion == "mae": warn( "Criterion 'mae' was deprecated in v1.0 and will be " "removed in version 1.2. Use `criterion='absolute_error'` " "which is equivalent.", FutureWarning, ) if self.max_features == "auto": warn( "`max_features='auto'` has been deprecated in 1.1 " "and will be removed in 1.3. To keep the past behaviour, " "explicitly set `max_features=1.0` or remove this " "parameter as it is also the default value for " "RandomForestRegressors and ExtraTreesRegressors.", FutureWarning, ) elif isinstance(self, (RandomForestClassifier, ExtraTreesClassifier)): if self.max_features == "auto": warn( "`max_features='auto'` has been deprecated in 1.1 " "and will be removed in 1.3. To keep the past behaviour, " "explicitly set `max_features='sqrt'` or remove this " "parameter as it is also the default value for " "RandomForestClassifiers and ExtraTreesClassifiers.", FutureWarning, ) if not self.bootstrap and self.oob_score: raise ValueError("Out of bag estimation only available if bootstrap=True") random_state = check_random_state(self.random_state) if not self.warm_start or not hasattr(self, "estimators_"): # Free allocated memory, if any self.estimators_ = [] n_more_estimators = self.n_estimators - len(self.estimators_) if n_more_estimators < 0: raise ValueError( "n_estimators=%d must be larger or equal to " "len(estimators_)=%d when warm_start==True" % (self.n_estimators, len(self.estimators_)) ) elif n_more_estimators == 0: warn( "Warm-start fitting without increasing n_estimators does not " "fit new trees." ) else: if self.warm_start and len(self.estimators_) > 0: # We draw from the random state to get the random state we # would have got if we hadn't used a warm_start. random_state.randint(MAX_INT, size=len(self.estimators_)) trees = [ self._make_estimator(append=False, random_state=random_state) for i in range(n_more_estimators) ] # Parallel loop: we prefer the threading backend as the Cython code # for fitting the trees is internally releasing the Python GIL # making threading more efficient than multiprocessing in # that case. However, for joblib 0.12+ we respect any # parallel_backend contexts set at a higher level, # since correctness does not rely on using threads. trees = Parallel( n_jobs=self.n_jobs, verbose=self.verbose, **_joblib_parallel_args(prefer="threads"), )( delayed(_parallel_build_trees)( t, self, X, y, sample_weight, i, len(trees), verbose=self.verbose, class_weight=self.class_weight, n_samples_bootstrap=n_samples_bootstrap, ) for i, t in enumerate(trees) ) # Collect newly grown trees self.estimators_.extend(trees) if self.oob_score: y_type = type_of_target(y) if y_type in ("multiclass-multioutput", "unknown"): # FIXME: we could consider to support multiclass-multioutput if # we introduce or reuse a constructor parameter (e.g. # oob_score) allowing our user to pass a callable defining the # scoring strategy on OOB sample. raise ValueError( "The type of target cannot be used to compute OOB " f"estimates. Got {y_type} while only the following are " "supported: continuous, continuous-multioutput, binary, " "multiclass, multilabel-indicator." ) self._set_oob_score_and_attributes(X, y) # Decapsulate classes_ attributes if hasattr(self, "classes_") and self.n_outputs_ == 1: self.n_classes_ = self.n_classes_[0] self.classes_ = self.classes_[0] return self @abstractmethod def _set_oob_score_and_attributes(self, X, y): """Compute and set the OOB score and attributes. Parameters ---------- X : array-like of shape (n_samples, n_features) The data matrix. y : ndarray of shape (n_samples, n_outputs) The target matrix. """ def _compute_oob_predictions(self, X, y): """Compute and set the OOB score. Parameters ---------- X : array-like of shape (n_samples, n_features) The data matrix. y : ndarray of shape (n_samples, n_outputs) The target matrix. Returns ------- oob_pred : ndarray of shape (n_samples, n_classes, n_outputs) or \ (n_samples, 1, n_outputs) The OOB predictions. """ # Prediction requires X to be in CSR format if issparse(X): X = X.tocsr() n_samples = y.shape[0] n_outputs = self.n_outputs_ if is_classifier(self) and hasattr(self, "n_classes_"): # n_classes_ is a ndarray at this stage # all the supported type of target will have the same number of # classes in all outputs oob_pred_shape = (n_samples, self.n_classes_[0], n_outputs) else: # for regression, n_classes_ does not exist and we create an empty # axis to be consistent with the classification case and make # the array operations compatible with the 2 settings oob_pred_shape = (n_samples, 1, n_outputs) oob_pred = np.zeros(shape=oob_pred_shape, dtype=np.float64) n_oob_pred = np.zeros((n_samples, n_outputs), dtype=np.int64) n_samples_bootstrap = _get_n_samples_bootstrap( n_samples, self.max_samples, ) for estimator in self.estimators_: unsampled_indices = _generate_unsampled_indices( estimator.random_state, n_samples, n_samples_bootstrap, ) y_pred = self._get_oob_predictions(estimator, X[unsampled_indices, :]) oob_pred[unsampled_indices, ...] += y_pred n_oob_pred[unsampled_indices, :] += 1 for k in range(n_outputs): if (n_oob_pred == 0).any(): warn( "Some inputs do not have OOB scores. This probably means " "too few trees were used to compute any reliable OOB " "estimates.", UserWarning, ) n_oob_pred[n_oob_pred == 0] = 1 oob_pred[..., k] /= n_oob_pred[..., [k]] return oob_pred def _validate_y_class_weight(self, y): # Default implementation return y, None def _validate_X_predict(self, X): """ Validate X whenever one tries to predict, apply, predict_proba.""" check_is_fitted(self) X = self._validate_data(X, dtype=DTYPE, accept_sparse="csr", reset=False) if issparse(X) and (X.indices.dtype != np.intc or X.indptr.dtype != np.intc): raise ValueError("No support for np.int64 index based sparse matrices") return X @property def feature_importances_(self): """ The impurity-based feature importances. The higher, the more important the feature. The importance of a feature is computed as the (normalized) total reduction of the criterion brought by that feature. It is also known as the Gini importance. Warning: impurity-based feature importances can be misleading for high cardinality features (many unique values). See :func:`sklearn.inspection.permutation_importance` as an alternative. Returns ------- feature_importances_ : ndarray of shape (n_features,) The values of this array sum to 1, unless all trees are single node trees consisting of only the root node, in which case it will be an array of zeros. """ check_is_fitted(self) all_importances = Parallel( n_jobs=self.n_jobs, **_joblib_parallel_args(prefer="threads") )( delayed(getattr)(tree, "feature_importances_") for tree in self.estimators_ if tree.tree_.node_count > 1 ) if not all_importances: return np.zeros(self.n_features_in_, dtype=np.float64) all_importances = np.mean(all_importances, axis=0, dtype=np.float64) return all_importances / np.sum(all_importances) # TODO: Remove in 1.2 # mypy error: Decorated property not supported @deprecated( # type: ignore "Attribute `n_features_` was deprecated in version 1.0 and will be " "removed in 1.2. Use `n_features_in_` instead." ) @property def n_features_(self): """Number of features when fitting the estimator.""" return self.n_features_in_ def _accumulate_prediction(predict, X, out, lock): """ This is a utility function for joblib's Parallel. It can't go locally in ForestClassifier or ForestRegressor, because joblib complains that it cannot pickle it when placed there. """ prediction = predict(X, check_input=False) with lock: if len(out) == 1: out[0] += prediction else: for i in range(len(out)): out[i] += prediction[i] class ForestClassifier(ClassifierMixin, BaseForest, metaclass=ABCMeta): """ Base class for forest of trees-based classifiers. Warning: This class should not be used directly. Use derived classes instead. """ @abstractmethod def __init__( self, base_estimator, n_estimators=100, *, estimator_params=tuple(), bootstrap=False, oob_score=False, n_jobs=None, random_state=None, verbose=0, warm_start=False, class_weight=None, max_samples=None, ): super().__init__( base_estimator, n_estimators=n_estimators, estimator_params=estimator_params, bootstrap=bootstrap, oob_score=oob_score, n_jobs=n_jobs, random_state=random_state, verbose=verbose, warm_start=warm_start, class_weight=class_weight, max_samples=max_samples, ) @staticmethod def _get_oob_predictions(tree, X): """Compute the OOB predictions for an individual tree. Parameters ---------- tree : DecisionTreeClassifier object A single decision tree classifier. X : ndarray of shape (n_samples, n_features) The OOB samples. Returns ------- y_pred : ndarray of shape (n_samples, n_classes, n_outputs) The OOB associated predictions. """ y_pred = tree.predict_proba(X, check_input=False) y_pred = np.array(y_pred, copy=False) if y_pred.ndim == 2: # binary and multiclass y_pred = y_pred[..., np.newaxis] else: # Roll the first `n_outputs` axis to the last axis. We will reshape # from a shape of (n_outputs, n_samples, n_classes) to a shape of # (n_samples, n_classes, n_outputs). y_pred = np.rollaxis(y_pred, axis=0, start=3) return y_pred def _set_oob_score_and_attributes(self, X, y): """Compute and set the OOB score and attributes. Parameters ---------- X : array-like of shape (n_samples, n_features) The data matrix. y : ndarray of shape (n_samples, n_outputs) The target matrix. """ self.oob_decision_function_ = super()._compute_oob_predictions(X, y) if self.oob_decision_function_.shape[-1] == 1: # drop the n_outputs axis if there is a single output self.oob_decision_function_ = self.oob_decision_function_.squeeze(axis=-1) self.oob_score_ = accuracy_score( y, np.argmax(self.oob_decision_function_, axis=1) ) def _validate_y_class_weight(self, y): check_classification_targets(y) y = np.copy(y) expanded_class_weight = None if self.class_weight is not None: y_original = np.copy(y) self.classes_ = [] self.n_classes_ = [] y_store_unique_indices = np.zeros(y.shape, dtype=int) for k in range(self.n_outputs_): classes_k, y_store_unique_indices[:, k] = np.unique( y[:, k], return_inverse=True ) self.classes_.append(classes_k) self.n_classes_.append(classes_k.shape[0]) y = y_store_unique_indices if self.class_weight is not None: valid_presets = ("balanced", "balanced_subsample") if isinstance(self.class_weight, str): if self.class_weight not in valid_presets: raise ValueError( "Valid presets for class_weight include " '"balanced" and "balanced_subsample".' 'Given "%s".' % self.class_weight ) if self.warm_start: warn( 'class_weight presets "balanced" or ' '"balanced_subsample" are ' "not recommended for warm_start if the fitted data " "differs from the full dataset. In order to use " '"balanced" weights, use compute_class_weight ' '("balanced", classes, y). In place of y you can use ' "a large enough sample of the full training set " "target to properly estimate the class frequency " "distributions. Pass the resulting weights as the " "class_weight parameter." ) if self.class_weight != "balanced_subsample" or not self.bootstrap: if self.class_weight == "balanced_subsample": class_weight = "balanced" else: class_weight = self.class_weight expanded_class_weight = compute_sample_weight(class_weight, y_original) return y, expanded_class_weight def predict(self, X): """ Predict class for X. The predicted class of an input sample is a vote by the trees in the forest, weighted by their probability estimates. That is, the predicted class is the one with highest mean probability estimate across the trees. Parameters ---------- X : {array-like, sparse matrix} of shape (n_samples, n_features) The input samples. Internally, its dtype will be converted to ``dtype=np.float32``. If a sparse matrix is provided, it will be converted into a sparse ``csr_matrix``. Returns ------- y : ndarray of shape (n_samples,) or (n_samples, n_outputs) The predicted classes. """ proba = self.predict_proba(X) if self.n_outputs_ == 1: return self.classes_.take(np.argmax(proba, axis=1), axis=0) else: n_samples = proba[0].shape[0] # all dtypes should be the same, so just take the first class_type = self.classes_[0].dtype predictions = np.empty((n_samples, self.n_outputs_), dtype=class_type) for k in range(self.n_outputs_): predictions[:, k] = self.classes_[k].take( np.argmax(proba[k], axis=1), axis=0 ) return predictions def predict_proba(self, X, uncertainty=[]): """ Predict class probabilities for X. The predicted class probabilities of an input sample are computed as the mean predicted class probabilities of the trees in the forest. The class probability of a single tree is the fraction of samples of the same class in a leaf. Parameters ---------- X : {array-like, sparse matrix} of shape (n_samples, n_features) The input samples. Internally, its dtype will be converted to ``dtype=np.float32``. If a sparse matrix is provided, it will be converted into a sparse ``csr_matrix``. Returns ------- p : ndarray of shape (n_samples, n_classes), or a list of such arrays The class probabilities of the input samples. The order of the classes corresponds to that in the attribute :term:`classes_`. """ check_is_fitted(self) # Check data X = self._validate_X_predict(X) # Assign chunk of trees to jobs n_jobs, _, _ = _partition_estimators(self.n_estimators, self.n_jobs) # avoid storing the output of every estimator by summing them here all_proba = [ np.zeros((X.shape[0], j), dtype=np.float64) for j in np.atleast_1d(self.n_classes_) ] lock = threading.Lock() Parallel( n_jobs=n_jobs, verbose=self.verbose, **_joblib_parallel_args(require="sharedmem"), )( delayed(_accumulate_prediction)(e.predict_proba, X, all_proba, lock) for e in self.estimators_ ) for proba in all_proba: proba /= len(self.estimators_) if len(all_proba) == 1: return all_proba[0] else: return all_proba def predict_log_proba(self, X): """ Predict class log-probabilities for X. The predicted class log-probabilities of an input sample is computed as the log of the mean predicted class probabilities of the trees in the forest. Parameters ---------- X : {array-like, sparse matrix} of shape (n_samples, n_features) The input samples. Internally, its dtype will be converted to ``dtype=np.float32``. If a sparse matrix is provided, it will be converted into a sparse ``csr_matrix``. Returns ------- p : ndarray of shape (n_samples, n_classes), or a list of such arrays The class probabilities of the input samples. The order of the classes corresponds to that in the attribute :term:`classes_`. """ proba = self.predict_proba(X) if self.n_outputs_ == 1: return np.log(proba) else: for k in range(self.n_outputs_): proba[k] = np.log(proba[k]) return proba def _more_tags(self): return {"multilabel": True} class ForestRegressor(RegressorMixin, BaseForest, metaclass=ABCMeta): """ Base class for forest of trees-based regressors. Warning: This class should not be used directly. Use derived classes instead. """ @abstractmethod def __init__( self, base_estimator, n_estimators=100, *, estimator_params=tuple(), bootstrap=False, oob_score=False, n_jobs=None, random_state=None, verbose=0, warm_start=False, max_samples=None, ): super().__init__( base_estimator, n_estimators=n_estimators, estimator_params=estimator_params, bootstrap=bootstrap, oob_score=oob_score, n_jobs=n_jobs, random_state=random_state, verbose=verbose, warm_start=warm_start, max_samples=max_samples, ) def predict(self, X): """ Predict regression target for X. The predicted regression target of an input sample is computed as the mean predicted regression targets of the trees in the forest. Parameters ---------- X : {array-like, sparse matrix} of shape (n_samples, n_features) The input samples. Internally, its dtype will be converted to ``dtype=np.float32``. If a sparse matrix is provided, it will be converted into a sparse ``csr_matrix``. Returns ------- y : ndarray of shape (n_samples,) or (n_samples, n_outputs) The predicted values. """ check_is_fitted(self) # Check data X = self._validate_X_predict(X) # Assign chunk of trees to jobs n_jobs, _, _ = _partition_estimators(self.n_estimators, self.n_jobs) # avoid storing the output of every estimator by summing them here if self.n_outputs_ > 1: y_hat = np.zeros((X.shape[0], self.n_outputs_), dtype=np.float64) else: y_hat = np.zeros((X.shape[0]), dtype=np.float64) # Parallel loop lock = threading.Lock() Parallel( n_jobs=n_jobs, verbose=self.verbose, **_joblib_parallel_args(require="sharedmem"), )( delayed(_accumulate_prediction)(e.predict, X, [y_hat], lock) for e in self.estimators_ ) y_hat /= len(self.estimators_) return y_hat @staticmethod def _get_oob_predictions(tree, X): """Compute the OOB predictions for an individual tree. Parameters ---------- tree : DecisionTreeRegressor object A single decision tree regressor. X : ndarray of shape (n_samples, n_features) The OOB samples. Returns ------- y_pred : ndarray of shape (n_samples, 1, n_outputs) The OOB associated predictions. """ y_pred = tree.predict(X, check_input=False) if y_pred.ndim == 1: # single output regression y_pred = y_pred[:, np.newaxis, np.newaxis] else: # multioutput regression y_pred = y_pred[:, np.newaxis, :] return y_pred def _set_oob_score_and_attributes(self, X, y): """Compute and set the OOB score and attributes. Parameters ---------- X : array-like of shape (n_samples, n_features) The data matrix. y : ndarray of shape (n_samples, n_outputs) The target matrix. """ self.oob_prediction_ = super()._compute_oob_predictions(X, y).squeeze(axis=1) if self.oob_prediction_.shape[-1] == 1: # drop the n_outputs axis if there is a single output self.oob_prediction_ = self.oob_prediction_.squeeze(axis=-1) self.oob_score_ = r2_score(y, self.oob_prediction_) def _compute_partial_dependence_recursion(self, grid, target_features): """Fast partial dependence computation. Parameters ---------- grid : ndarray of shape (n_samples, n_target_features) The grid points on which the partial dependence should be evaluated. target_features : ndarray of shape (n_target_features) The set of target features for which the partial dependence should be evaluated. Returns ------- averaged_predictions : ndarray of shape (n_samples,) The value of the partial dependence function on each grid point. """ grid = np.asarray(grid, dtype=DTYPE, order="C") averaged_predictions = np.zeros( shape=grid.shape[0], dtype=np.float64, order="C" ) for tree in self.estimators_: # Note: we don't sum in parallel because the GIL isn't released in # the fast method. tree.tree_.compute_partial_dependence( grid, target_features, averaged_predictions ) # Average over the forest averaged_predictions /= len(self.estimators_) return averaged_predictions def _more_tags(self): return {"multilabel": True} class RandomForestClassifier(ForestClassifier): """ A random forest classifier. A random forest is a meta estimator that fits a number of decision tree classifiers on various sub-samples of the dataset and uses averaging to improve the predictive accuracy and control over-fitting. The sub-sample size is controlled with the `max_samples` parameter if `bootstrap=True` (default), otherwise the whole dataset is used to build each tree. Read more in the :ref:`User Guide <forest>`. Parameters ---------- n_estimators : int, default=100 The number of trees in the forest. .. versionchanged:: 0.22 The default value of ``n_estimators`` changed from 10 to 100 in 0.22. criterion : {"gini", "entropy"}, default="gini" The function to measure the quality of a split. Supported criteria are "gini" for the Gini impurity and "entropy" for the information gain. Note: this parameter is tree-specific. max_depth : int, default=None The maximum depth of the tree. If None, then nodes are expanded until all leaves are pure or until all leaves contain less than min_samples_split samples. min_samples_split : int or float, default=2 The minimum number of samples required to split an internal node: - If int, then consider `min_samples_split` as the minimum number. - If float, then `min_samples_split` is a fraction and `ceil(min_samples_split * n_samples)` are the minimum number of samples for each split. .. versionchanged:: 0.18 Added float values for fractions. min_samples_leaf : int or float, default=1 The minimum number of samples required to be at a leaf node. A split point at any depth will only be considered if it leaves at least ``min_samples_leaf`` training samples in each of the left and right branches. This may have the effect of smoothing the model, especially in regression. - If int, then consider `min_samples_leaf` as the minimum number. - If float, then `min_samples_leaf` is a fraction and `ceil(min_samples_leaf * n_samples)` are the minimum number of samples for each node. .. versionchanged:: 0.18 Added float values for fractions. min_weight_fraction_leaf : float, default=0.0 The minimum weighted fraction of the sum total of weights (of all the input samples) required to be at a leaf node. Samples have equal weight when sample_weight is not provided. max_features : {"sqrt", "log2", None}, int or float, default="sqrt" The number of features to consider when looking for the best split: - If int, then consider `max_features` features at each split. - If float, then `max_features` is a fraction and `round(max_features * n_features)` features are considered at each split. - If "auto", then `max_features=sqrt(n_features)`. - If "sqrt", then `max_features=sqrt(n_features)`. - If "log2", then `max_features=log2(n_features)`. - If None, then `max_features=n_features`. .. versionchanged:: 1.1 The default of `max_features` changed from `"auto"` to `"sqrt"`. .. deprecated:: 1.1 The `"auto"` option was deprecated in 1.1 and will be removed in 1.3. Note: the search for a split does not stop until at least one valid partition of the node samples is found, even if it requires to effectively inspect more than ``max_features`` features. max_leaf_nodes : int, default=None Grow trees with ``max_leaf_nodes`` in best-first fashion. Best nodes are defined as relative reduction in impurity. If None then unlimited number of leaf nodes. min_impurity_decrease : float, default=0.0 A node will be split if this split induces a decrease of the impurity greater than or equal to this value. The weighted impurity decrease equation is the following:: N_t / N * (impurity - N_t_R / N_t * right_impurity - N_t_L / N_t * left_impurity) where ``N`` is the total number of samples, ``N_t`` is the number of samples at the current node, ``N_t_L`` is the number of samples in the left child, and ``N_t_R`` is the number of samples in the right child. ``N``, ``N_t``, ``N_t_R`` and ``N_t_L`` all refer to the weighted sum, if ``sample_weight`` is passed. .. versionadded:: 0.19 bootstrap : bool, default=True Whether bootstrap samples are used when building trees. If False, the whole dataset is used to build each tree. oob_score : bool, default=False Whether to use out-of-bag samples to estimate the generalization score. Only available if bootstrap=True. n_jobs : int, default=None The number of jobs to run in parallel. :meth:`fit`, :meth:`predict`, :meth:`decision_path` and :meth:`apply` are all parallelized over the trees. ``None`` means 1 unless in a :obj:`joblib.parallel_backend` context. ``-1`` means using all processors. See :term:`Glossary <n_jobs>` for more details. random_state : int, RandomState instance or None, default=None Controls both the randomness of the bootstrapping of the samples used when building trees (if ``bootstrap=True``) and the sampling of the features to consider when looking for the best split at each node (if ``max_features < n_features``). See :term:`Glossary <random_state>` for details. verbose : int, default=0 Controls the verbosity when fitting and predicting. warm_start : bool, default=False When set to ``True``, reuse the solution of the previous call to fit and add more estimators to the ensemble, otherwise, just fit a whole new forest. See :term:`the Glossary <warm_start>`. class_weight : {"balanced", "balanced_subsample"}, dict or list of dicts, \ default=None Weights associated with classes in the form ``{class_label: weight}``. If not given, all classes are supposed to have weight one. For multi-output problems, a list of dicts can be provided in the same order as the columns of y. Note that for multioutput (including multilabel) weights should be defined for each class of every column in its own dict. For example, for four-class multilabel classification weights should be [{0: 1, 1: 1}, {0: 1, 1: 5}, {0: 1, 1: 1}, {0: 1, 1: 1}] instead of [{1:1}, {2:5}, {3:1}, {4:1}]. The "balanced" mode uses the values of y to automatically adjust weights inversely proportional to class frequencies in the input data as ``n_samples / (n_classes * np.bincount(y))`` The "balanced_subsample" mode is the same as "balanced" except that weights are computed based on the bootstrap sample for every tree grown. For multi-output, the weights of each column of y will be multiplied. Note that these weights will be multiplied with sample_weight (passed through the fit method) if sample_weight is specified. ccp_alpha : non-negative float, default=0.0 Complexity parameter used for Minimal Cost-Complexity Pruning. The subtree with the largest cost complexity that is smaller than ``ccp_alpha`` will be chosen. By default, no pruning is performed. See :ref:`minimal_cost_complexity_pruning` for details. .. versionadded:: 0.22 max_samples : int or float, default=None If bootstrap is True, the number of samples to draw from X to train each base estimator. - If None (default), then draw `X.shape[0]` samples. - If int, then draw `max_samples` samples. - If float, then draw `max_samples * X.shape[0]` samples. Thus, `max_samples` should be in the interval `(0.0, 1.0]`. .. versionadded:: 0.22 Attributes ---------- base_estimator_ : DecisionTreeClassifier The child estimator template used to create the collection of fitted sub-estimators. estimators_ : list of DecisionTreeClassifier The collection of fitted sub-estimators. classes_ : ndarray of shape (n_classes,) or a list of such arrays The classes labels (single output problem), or a list of arrays of class labels (multi-output problem). n_classes_ : int or list The number of classes (single output problem), or a list containing the number of classes for each output (multi-output problem). n_features_ : int The number of features when ``fit`` is performed. .. deprecated:: 1.0 Attribute `n_features_` was deprecated in version 1.0 and will be removed in 1.2. Use `n_features_in_` instead. n_features_in_ : int Number of features seen during :term:`fit`. .. versionadded:: 0.24 feature_names_in_ : ndarray of shape (`n_features_in_`,) Names of features seen during :term:`fit`. Defined only when `X` has feature names that are all strings. .. versionadded:: 1.0 n_outputs_ : int The number of outputs when ``fit`` is performed. feature_importances_ : ndarray of shape (n_features,) The impurity-based feature importances. The higher, the more important the feature. The importance of a feature is computed as the (normalized) total reduction of the criterion brought by that feature. It is also known as the Gini importance. Warning: impurity-based feature importances can be misleading for high cardinality features (many unique values). See :func:`sklearn.inspection.permutation_importance` as an alternative. oob_score_ : float Score of the training dataset obtained using an out-of-bag estimate. This attribute exists only when ``oob_score`` is True. oob_decision_function_ : ndarray of shape (n_samples, n_classes) or \ (n_samples, n_classes, n_outputs) Decision function computed with out-of-bag estimate on the training set. If n_estimators is small it might be possible that a data point was never left out during the bootstrap. In this case, `oob_decision_function_` might contain NaN. This attribute exists only when ``oob_score`` is True. See Also -------- sklearn.tree.DecisionTreeClassifier : A decision tree classifier. sklearn.ensemble.ExtraTreesClassifier : Ensemble of extremely randomized tree classifiers. Notes ----- The default values for the parameters controlling the size of the trees (e.g. ``max_depth``, ``min_samples_leaf``, etc.) lead to fully grown and unpruned trees which can potentially be very large on some data sets. To reduce memory consumption, the complexity and size of the trees should be controlled by setting those parameter values. The features are always randomly permuted at each split. Therefore, the best found split may vary, even with the same training data, ``max_features=n_features`` and ``bootstrap=False``, if the improvement of the criterion is identical for several splits enumerated during the search of the best split. To obtain a deterministic behaviour during fitting, ``random_state`` has to be fixed. References ---------- .. [1] L. Breiman, "Random Forests", Machine Learning, 45(1), 5-32, 2001. Examples -------- >>> from sklearn.ensemble import RandomForestClassifier >>> from sklearn.datasets import make_classification >>> X, y = make_classification(n_samples=1000, n_features=4, ... n_informative=2, n_redundant=0, ... random_state=0, shuffle=False) >>> clf = RandomForestClassifier(max_depth=2, random_state=0) >>> clf.fit(X, y) RandomForestClassifier(...) >>> print(clf.predict([[0, 0, 0, 0]])) [1] """ def __init__( self, n_estimators=100, *, criterion="gini", max_depth=None, min_samples_split=2, min_samples_leaf=1, min_weight_fraction_leaf=0.0, max_features="sqrt", max_leaf_nodes=None, min_impurity_decrease=0.0, bootstrap=True, oob_score=False, n_jobs=None, random_state=None, verbose=0, warm_start=False, class_weight=None, ccp_alpha=0.0, max_samples=None, ): super().__init__( base_estimator=DecisionTreeClassifier(), n_estimators=n_estimators, estimator_params=( "criterion", "max_depth", "min_samples_split", "min_samples_leaf", "min_weight_fraction_leaf", "max_features", "max_leaf_nodes", "min_impurity_decrease", "random_state", "ccp_alpha", ), bootstrap=bootstrap, oob_score=oob_score, n_jobs=n_jobs, random_state=random_state, verbose=verbose, warm_start=warm_start, class_weight=class_weight, max_samples=max_samples, ) self.criterion = criterion self.max_depth = max_depth self.min_samples_split = min_samples_split self.min_samples_leaf = min_samples_leaf self.min_weight_fraction_leaf = min_weight_fraction_leaf self.max_features = max_features self.max_leaf_nodes = max_leaf_nodes self.min_impurity_decrease = min_impurity_decrease self.ccp_alpha = ccp_alpha class RandomForestRegressor(ForestRegressor): """ A random forest regressor. A random forest is a meta estimator that fits a number of classifying decision trees on various sub-samples of the dataset and uses averaging to improve the predictive accuracy and control over-fitting. The sub-sample size is controlled with the `max_samples` parameter if `bootstrap=True` (default), otherwise the whole dataset is used to build each tree. Read more in the :ref:`User Guide <forest>`. Parameters ---------- n_estimators : int, default=100 The number of trees in the forest. .. versionchanged:: 0.22 The default value of ``n_estimators`` changed from 10 to 100 in 0.22. criterion : {"squared_error", "absolute_error", "poisson"}, \ default="squared_error" The function to measure the quality of a split. Supported criteria are "squared_error" for the mean squared error, which is equal to variance reduction as feature selection criterion, "absolute_error" for the mean absolute error, and "poisson" which uses reduction in Poisson deviance to find splits. Training using "absolute_error" is significantly slower than when using "squared_error". .. versionadded:: 0.18 Mean Absolute Error (MAE) criterion. .. versionadded:: 1.0 Poisson criterion. .. deprecated:: 1.0 Criterion "mse" was deprecated in v1.0 and will be removed in version 1.2. Use `criterion="squared_error"` which is equivalent. .. deprecated:: 1.0 Criterion "mae" was deprecated in v1.0 and will be removed in version 1.2. Use `criterion="absolute_error"` which is equivalent. max_depth : int, default=None The maximum depth of the tree. If None, then nodes are expanded until all leaves are pure or until all leaves contain less than min_samples_split samples. min_samples_split : int or float, default=2 The minimum number of samples required to split an internal node: - If int, then consider `min_samples_split` as the minimum number. - If float, then `min_samples_split` is a fraction and `ceil(min_samples_split * n_samples)` are the minimum number of samples for each split. .. versionchanged:: 0.18 Added float values for fractions. min_samples_leaf : int or float, default=1 The minimum number of samples required to be at a leaf node. A split point at any depth will only be considered if it leaves at least ``min_samples_leaf`` training samples in each of the left and right branches. This may have the effect of smoothing the model, especially in regression. - If int, then consider `min_samples_leaf` as the minimum number. - If float, then `min_samples_leaf` is a fraction and `ceil(min_samples_leaf * n_samples)` are the minimum number of samples for each node. .. versionchanged:: 0.18 Added float values for fractions. min_weight_fraction_leaf : float, default=0.0 The minimum weighted fraction of the sum total of weights (of all the input samples) required to be at a leaf node. Samples have equal weight when sample_weight is not provided. max_features : {"sqrt", "log2", None}, int or float, default=1.0 The number of features to consider when looking for the best split: - If int, then consider `max_features` features at each split. - If float, then `max_features` is a fraction and `round(max_features * n_features)` features are considered at each split. - If "auto", then `max_features=n_features`. - If "sqrt", then `max_features=sqrt(n_features)`. - If "log2", then `max_features=log2(n_features)`. - If None or 1.0, then `max_features=n_features`. .. note:: The default of 1.0 is equivalent to bagged trees and more randomness can be achieved by setting smaller values, e.g. 0.3. .. versionchanged:: 1.1 The default of `max_features` changed from `"auto"` to 1.0. .. deprecated:: 1.1 The `"auto"` option was deprecated in 1.1 and will be removed in 1.3. Note: the search for a split does not stop until at least one valid partition of the node samples is found, even if it requires to effectively inspect more than ``max_features`` features. max_leaf_nodes : int, default=None Grow trees with ``max_leaf_nodes`` in best-first fashion. Best nodes are defined as relative reduction in impurity. If None then unlimited number of leaf nodes. min_impurity_decrease : float, default=0.0 A node will be split if this split induces a decrease of the impurity greater than or equal to this value. The weighted impurity decrease equation is the following:: N_t / N * (impurity - N_t_R / N_t * right_impurity - N_t_L / N_t * left_impurity) where ``N`` is the total number of samples, ``N_t`` is the number of samples at the current node, ``N_t_L`` is the number of samples in the left child, and ``N_t_R`` is the number of samples in the right child. ``N``, ``N_t``, ``N_t_R`` and ``N_t_L`` all refer to the weighted sum, if ``sample_weight`` is passed. .. versionadded:: 0.19 bootstrap : bool, default=True Whether bootstrap samples are used when building trees. If False, the whole dataset is used to build each tree. oob_score : bool, default=False Whether to use out-of-bag samples to estimate the generalization score. Only available if bootstrap=True. n_jobs : int, default=None The number of jobs to run in parallel. :meth:`fit`, :meth:`predict`, :meth:`decision_path` and :meth:`apply` are all parallelized over the trees. ``None`` means 1 unless in a :obj:`joblib.parallel_backend` context. ``-1`` means using all processors. See :term:`Glossary <n_jobs>` for more details. random_state : int, RandomState instance or None, default=None Controls both the randomness of the bootstrapping of the samples used when building trees (if ``bootstrap=True``) and the sampling of the features to consider when looking for the best split at each node (if ``max_features < n_features``). See :term:`Glossary <random_state>` for details. verbose : int, default=0 Controls the verbosity when fitting and predicting. warm_start : bool, default=False When set to ``True``, reuse the solution of the previous call to fit and add more estimators to the ensemble, otherwise, just fit a whole new forest. See :term:`the Glossary <warm_start>`. ccp_alpha : non-negative float, default=0.0 Complexity parameter used for Minimal Cost-Complexity Pruning. The subtree with the largest cost complexity that is smaller than ``ccp_alpha`` will be chosen. By default, no pruning is performed. See :ref:`minimal_cost_complexity_pruning` for details. .. versionadded:: 0.22 max_samples : int or float, default=None If bootstrap is True, the number of samples to draw from X to train each base estimator. - If None (default), then draw `X.shape[0]` samples. - If int, then draw `max_samples` samples. - If float, then draw `max_samples * X.shape[0]` samples. Thus, `max_samples` should be in the interval `(0.0, 1.0]`. .. versionadded:: 0.22 Attributes ---------- base_estimator_ : DecisionTreeRegressor The child estimator template used to create the collection of fitted sub-estimators. estimators_ : list of DecisionTreeRegressor The collection of fitted sub-estimators. feature_importances_ : ndarray of shape (n_features,) The impurity-based feature importances. The higher, the more important the feature. The importance of a feature is computed as the (normalized) total reduction of the criterion brought by that feature. It is also known as the Gini importance. Warning: impurity-based feature importances can be misleading for high cardinality features (many unique values). See :func:`sklearn.inspection.permutation_importance` as an alternative. n_features_ : int The number of features when ``fit`` is performed. .. deprecated:: 1.0 Attribute `n_features_` was deprecated in version 1.0 and will be removed in 1.2. Use `n_features_in_` instead. n_features_in_ : int Number of features seen during :term:`fit`. .. versionadded:: 0.24 feature_names_in_ : ndarray of shape (`n_features_in_`,) Names of features seen during :term:`fit`. Defined only when `X` has feature names that are all strings. .. versionadded:: 1.0 n_outputs_ : int The number of outputs when ``fit`` is performed. oob_score_ : float Score of the training dataset obtained using an out-of-bag estimate. This attribute exists only when ``oob_score`` is True. oob_prediction_ : ndarray of shape (n_samples,) or (n_samples, n_outputs) Prediction computed with out-of-bag estimate on the training set. This attribute exists only when ``oob_score`` is True. See Also -------- sklearn.tree.DecisionTreeRegressor : A decision tree regressor. sklearn.ensemble.ExtraTreesRegressor : Ensemble of extremely randomized tree regressors. Notes ----- The default values for the parameters controlling the size of the trees (e.g. ``max_depth``, ``min_samples_leaf``, etc.) lead to fully grown and unpruned trees which can potentially be very large on some data sets. To reduce memory consumption, the complexity and size of the trees should be controlled by setting those parameter values. The features are always randomly permuted at each split. Therefore, the best found split may vary, even with the same training data, ``max_features=n_features`` and ``bootstrap=False``, if the improvement of the criterion is identical for several splits enumerated during the search of the best split. To obtain a deterministic behaviour during fitting, ``random_state`` has to be fixed. The default value ``max_features="auto"`` uses ``n_features`` rather than ``n_features / 3``. The latter was originally suggested in [1], whereas the former was more recently justified empirically in [2]. References ---------- .. [1] L. Breiman, "Random Forests", Machine Learning, 45(1), 5-32, 2001. .. [2] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized trees", Machine Learning, 63(1), 3-42, 2006. Examples -------- >>> from sklearn.ensemble import RandomForestRegressor >>> from sklearn.datasets import make_regression >>> X, y = make_regression(n_features=4, n_informative=2, ... random_state=0, shuffle=False) >>> regr = RandomForestRegressor(max_depth=2, random_state=0) >>> regr.fit(X, y) RandomForestRegressor(...) >>> print(regr.predict([[0, 0, 0, 0]])) [-8.32987858] """ def __init__( self, n_estimators=100, *, criterion="squared_error", max_depth=None, min_samples_split=2, min_samples_leaf=1, min_weight_fraction_leaf=0.0, max_features=1.0, max_leaf_nodes=None, min_impurity_decrease=0.0, bootstrap=True, oob_score=False, n_jobs=None, random_state=None, verbose=0, warm_start=False, ccp_alpha=0.0, max_samples=None, ): super().__init__( base_estimator=DecisionTreeRegressor(), n_estimators=n_estimators, estimator_params=( "criterion", "max_depth", "min_samples_split", "min_samples_leaf", "min_weight_fraction_leaf", "max_features", "max_leaf_nodes", "min_impurity_decrease", "random_state", "ccp_alpha", ), bootstrap=bootstrap, oob_score=oob_score, n_jobs=n_jobs, random_state=random_state, verbose=verbose, warm_start=warm_start, max_samples=max_samples, ) self.criterion = criterion self.max_depth = max_depth self.min_samples_split = min_samples_split self.min_samples_leaf = min_samples_leaf self.min_weight_fraction_leaf = min_weight_fraction_leaf self.max_features = max_features self.max_leaf_nodes = max_leaf_nodes self.min_impurity_decrease = min_impurity_decrease self.ccp_alpha = ccp_alpha class ExtraTreesClassifier(ForestClassifier): """ An extra-trees classifier. This class implements a meta estimator that fits a number of randomized decision trees (a.k.a. extra-trees) on various sub-samples of the dataset and uses averaging to improve the predictive accuracy and control over-fitting. Read more in the :ref:`User Guide <forest>`. Parameters ---------- n_estimators : int, default=100 The number of trees in the forest. .. versionchanged:: 0.22 The default value of ``n_estimators`` changed from 10 to 100 in 0.22. criterion : {"gini", "entropy"}, default="gini" The function to measure the quality of a split. Supported criteria are "gini" for the Gini impurity and "entropy" for the information gain. max_depth : int, default=None The maximum depth of the tree. If None, then nodes are expanded until all leaves are pure or until all leaves contain less than min_samples_split samples. min_samples_split : int or float, default=2 The minimum number of samples required to split an internal node: - If int, then consider `min_samples_split` as the minimum number. - If float, then `min_samples_split` is a fraction and `ceil(min_samples_split * n_samples)` are the minimum number of samples for each split. .. versionchanged:: 0.18 Added float values for fractions. min_samples_leaf : int or float, default=1 The minimum number of samples required to be at a leaf node. A split point at any depth will only be considered if it leaves at least ``min_samples_leaf`` training samples in each of the left and right branches. This may have the effect of smoothing the model, especially in regression. - If int, then consider `min_samples_leaf` as the minimum number. - If float, then `min_samples_leaf` is a fraction and `ceil(min_samples_leaf * n_samples)` are the minimum number of samples for each node. .. versionchanged:: 0.18 Added float values for fractions. min_weight_fraction_leaf : float, default=0.0 The minimum weighted fraction of the sum total of weights (of all the input samples) required to be at a leaf node. Samples have equal weight when sample_weight is not provided. max_features : {"sqrt", "log2", None}, int or float, default="sqrt" The number of features to consider when looking for the best split: - If int, then consider `max_features` features at each split. - If float, then `max_features` is a fraction and `round(max_features * n_features)` features are considered at each split. - If "auto", then `max_features=sqrt(n_features)`. - If "sqrt", then `max_features=sqrt(n_features)`. - If "log2", then `max_features=log2(n_features)`. - If None, then `max_features=n_features`. .. versionchanged:: 1.1 The default of `max_features` changed from `"auto"` to `"sqrt"`. .. deprecated:: 1.1 The `"auto"` option was deprecated in 1.1 and will be removed in 1.3. Note: the search for a split does not stop until at least one valid partition of the node samples is found, even if it requires to effectively inspect more than ``max_features`` features. max_leaf_nodes : int, default=None Grow trees with ``max_leaf_nodes`` in best-first fashion. Best nodes are defined as relative reduction in impurity. If None then unlimited number of leaf nodes. min_impurity_decrease : float, default=0.0 A node will be split if this split induces a decrease of the impurity greater than or equal to this value. The weighted impurity decrease equation is the following:: N_t / N * (impurity - N_t_R / N_t * right_impurity - N_t_L / N_t * left_impurity) where ``N`` is the total number of samples, ``N_t`` is the number of samples at the current node, ``N_t_L`` is the number of samples in the left child, and ``N_t_R`` is the number of samples in the right child. ``N``, ``N_t``, ``N_t_R`` and ``N_t_L`` all refer to the weighted sum, if ``sample_weight`` is passed. .. versionadded:: 0.19 bootstrap : bool, default=False Whether bootstrap samples are used when building trees. If False, the whole dataset is used to build each tree. oob_score : bool, default=False Whether to use out-of-bag samples to estimate the generalization score. Only available if bootstrap=True. n_jobs : int, default=None The number of jobs to run in parallel. :meth:`fit`, :meth:`predict`, :meth:`decision_path` and :meth:`apply` are all parallelized over the trees. ``None`` means 1 unless in a :obj:`joblib.parallel_backend` context. ``-1`` means using all processors. See :term:`Glossary <n_jobs>` for more details. random_state : int, RandomState instance or None, default=None Controls 3 sources of randomness: - the bootstrapping of the samples used when building trees (if ``bootstrap=True``) - the sampling of the features to consider when looking for the best split at each node (if ``max_features < n_features``) - the draw of the splits for each of the `max_features` See :term:`Glossary <random_state>` for details. verbose : int, default=0 Controls the verbosity when fitting and predicting. warm_start : bool, default=False When set to ``True``, reuse the solution of the previous call to fit and add more estimators to the ensemble, otherwise, just fit a whole new forest. See :term:`the Glossary <warm_start>`. class_weight : {"balanced", "balanced_subsample"}, dict or list of dicts, \ default=None Weights associated with classes in the form ``{class_label: weight}``. If not given, all classes are supposed to have weight one. For multi-output problems, a list of dicts can be provided in the same order as the columns of y. Note that for multioutput (including multilabel) weights should be defined for each class of every column in its own dict. For example, for four-class multilabel classification weights should be [{0: 1, 1: 1}, {0: 1, 1: 5}, {0: 1, 1: 1}, {0: 1, 1: 1}] instead of [{1:1}, {2:5}, {3:1}, {4:1}]. The "balanced" mode uses the values of y to automatically adjust weights inversely proportional to class frequencies in the input data as ``n_samples / (n_classes * np.bincount(y))`` The "balanced_subsample" mode is the same as "balanced" except that weights are computed based on the bootstrap sample for every tree grown. For multi-output, the weights of each column of y will be multiplied. Note that these weights will be multiplied with sample_weight (passed through the fit method) if sample_weight is specified. ccp_alpha : non-negative float, default=0.0 Complexity parameter used for Minimal Cost-Complexity Pruning. The subtree with the largest cost complexity that is smaller than ``ccp_alpha`` will be chosen. By default, no pruning is performed. See :ref:`minimal_cost_complexity_pruning` for details. .. versionadded:: 0.22 max_samples : int or float, default=None If bootstrap is True, the number of samples to draw from X to train each base estimator. - If None (default), then draw `X.shape[0]` samples. - If int, then draw `max_samples` samples. - If float, then draw `max_samples * X.shape[0]` samples. Thus, `max_samples` should be in the interval `(0.0, 1.0]`. .. versionadded:: 0.22 Attributes ---------- base_estimator_ : ExtraTreesClassifier The child estimator template used to create the collection of fitted sub-estimators. estimators_ : list of DecisionTreeClassifier The collection of fitted sub-estimators. classes_ : ndarray of shape (n_classes,) or a list of such arrays The classes labels (single output problem), or a list of arrays of class labels (multi-output problem). n_classes_ : int or list The number of classes (single output problem), or a list containing the number of classes for each output (multi-output problem). feature_importances_ : ndarray of shape (n_features,) The impurity-based feature importances. The higher, the more important the feature. The importance of a feature is computed as the (normalized) total reduction of the criterion brought by that feature. It is also known as the Gini importance. Warning: impurity-based feature importances can be misleading for high cardinality features (many unique values). See :func:`sklearn.inspection.permutation_importance` as an alternative. n_features_ : int The number of features when ``fit`` is performed. .. deprecated:: 1.0 Attribute `n_features_` was deprecated in version 1.0 and will be removed in 1.2. Use `n_features_in_` instead. n_features_in_ : int Number of features seen during :term:`fit`. .. versionadded:: 0.24 feature_names_in_ : ndarray of shape (`n_features_in_`,) Names of features seen during :term:`fit`. Defined only when `X` has feature names that are all strings. .. versionadded:: 1.0 n_outputs_ : int The number of outputs when ``fit`` is performed. oob_score_ : float Score of the training dataset obtained using an out-of-bag estimate. This attribute exists only when ``oob_score`` is True. oob_decision_function_ : ndarray of shape (n_samples, n_classes) or \ (n_samples, n_classes, n_outputs) Decision function computed with out-of-bag estimate on the training set. If n_estimators is small it might be possible that a data point was never left out during the bootstrap. In this case, `oob_decision_function_` might contain NaN. This attribute exists only when ``oob_score`` is True. See Also -------- ExtraTreesRegressor : An extra-trees regressor with random splits. RandomForestClassifier : A random forest classifier with optimal splits. RandomForestRegressor : Ensemble regressor using trees with optimal splits. Notes ----- The default values for the parameters controlling the size of the trees (e.g. ``max_depth``, ``min_samples_leaf``, etc.) lead to fully grown and unpruned trees which can potentially be very large on some data sets. To reduce memory consumption, the complexity and size of the trees should be controlled by setting those parameter values. References ---------- .. [1] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized trees", Machine Learning, 63(1), 3-42, 2006. Examples -------- >>> from sklearn.ensemble import ExtraTreesClassifier >>> from sklearn.datasets import make_classification >>> X, y = make_classification(n_features=4, random_state=0) >>> clf = ExtraTreesClassifier(n_estimators=100, random_state=0) >>> clf.fit(X, y) ExtraTreesClassifier(random_state=0) >>> clf.predict([[0, 0, 0, 0]]) array([1]) """ def __init__( self, n_estimators=100, *, criterion="gini", max_depth=None, min_samples_split=2, min_samples_leaf=1, min_weight_fraction_leaf=0.0, max_features="sqrt", max_leaf_nodes=None, min_impurity_decrease=0.0, bootstrap=False, oob_score=False, n_jobs=None, random_state=None, verbose=0, warm_start=False, class_weight=None, ccp_alpha=0.0, max_samples=None, ): super().__init__( base_estimator=ExtraTreeClassifier(), n_estimators=n_estimators, estimator_params=( "criterion", "max_depth", "min_samples_split", "min_samples_leaf", "min_weight_fraction_leaf", "max_features", "max_leaf_nodes", "min_impurity_decrease", "random_state", "ccp_alpha", ), bootstrap=bootstrap, oob_score=oob_score, n_jobs=n_jobs, random_state=random_state, verbose=verbose, warm_start=warm_start, class_weight=class_weight, max_samples=max_samples, ) self.criterion = criterion self.max_depth = max_depth self.min_samples_split = min_samples_split self.min_samples_leaf = min_samples_leaf self.min_weight_fraction_leaf = min_weight_fraction_leaf self.max_features = max_features self.max_leaf_nodes = max_leaf_nodes self.min_impurity_decrease = min_impurity_decrease self.ccp_alpha = ccp_alpha class ExtraTreesRegressor(ForestRegressor): """ An extra-trees regressor. This class implements a meta estimator that fits a number of randomized decision trees (a.k.a. extra-trees) on various sub-samples of the dataset and uses averaging to improve the predictive accuracy and control over-fitting. Read more in the :ref:`User Guide <forest>`. Parameters ---------- n_estimators : int, default=100 The number of trees in the forest. .. versionchanged:: 0.22 The default value of ``n_estimators`` changed from 10 to 100 in 0.22. criterion : {"squared_error", "absolute_error"}, default="squared_error" The function to measure the quality of a split. Supported criteria are "squared_error" for the mean squared error, which is equal to variance reduction as feature selection criterion, and "absolute_error" for the mean absolute error. .. versionadded:: 0.18 Mean Absolute Error (MAE) criterion. .. deprecated:: 1.0 Criterion "mse" was deprecated in v1.0 and will be removed in version 1.2. Use `criterion="squared_error"` which is equivalent. .. deprecated:: 1.0 Criterion "mae" was deprecated in v1.0 and will be removed in version 1.2. Use `criterion="absolute_error"` which is equivalent. max_depth : int, default=None The maximum depth of the tree. If None, then nodes are expanded until all leaves are pure or until all leaves contain less than min_samples_split samples. min_samples_split : int or float, default=2 The minimum number of samples required to split an internal node: - If int, then consider `min_samples_split` as the minimum number. - If float, then `min_samples_split` is a fraction and `ceil(min_samples_split * n_samples)` are the minimum number of samples for each split. .. versionchanged:: 0.18 Added float values for fractions. min_samples_leaf : int or float, default=1 The minimum number of samples required to be at a leaf node. A split point at any depth will only be considered if it leaves at least ``min_samples_leaf`` training samples in each of the left and right branches. This may have the effect of smoothing the model, especially in regression. - If int, then consider `min_samples_leaf` as the minimum number. - If float, then `min_samples_leaf` is a fraction and `ceil(min_samples_leaf * n_samples)` are the minimum number of samples for each node. .. versionchanged:: 0.18 Added float values for fractions. min_weight_fraction_leaf : float, default=0.0 The minimum weighted fraction of the sum total of weights (of all the input samples) required to be at a leaf node. Samples have equal weight when sample_weight is not provided. max_features : {"sqrt", "log2", None}, int or float, default=1.0 The number of features to consider when looking for the best split: - If int, then consider `max_features` features at each split. - If float, then `max_features` is a fraction and `round(max_features * n_features)` features are considered at each split. - If "auto", then `max_features=n_features`. - If "sqrt", then `max_features=sqrt(n_features)`. - If "log2", then `max_features=log2(n_features)`. - If None or 1.0, then `max_features=n_features`. .. note:: The default of 1.0 is equivalent to bagged trees and more randomness can be achieved by setting smaller values, e.g. 0.3. .. versionchanged:: 1.1 The default of `max_features` changed from `"auto"` to 1.0. .. deprecated:: 1.1 The `"auto"` option was deprecated in 1.1 and will be removed in 1.3. Note: the search for a split does not stop until at least one valid partition of the node samples is found, even if it requires to effectively inspect more than ``max_features`` features. max_leaf_nodes : int, default=None Grow trees with ``max_leaf_nodes`` in best-first fashion. Best nodes are defined as relative reduction in impurity. If None then unlimited number of leaf nodes. min_impurity_decrease : float, default=0.0 A node will be split if this split induces a decrease of the impurity greater than or equal to this value. The weighted impurity decrease equation is the following:: N_t / N * (impurity - N_t_R / N_t * right_impurity - N_t_L / N_t * left_impurity) where ``N`` is the total number of samples, ``N_t`` is the number of samples at the current node, ``N_t_L`` is the number of samples in the left child, and ``N_t_R`` is the number of samples in the right child. ``N``, ``N_t``, ``N_t_R`` and ``N_t_L`` all refer to the weighted sum, if ``sample_weight`` is passed. .. versionadded:: 0.19 bootstrap : bool, default=False Whether bootstrap samples are used when building trees. If False, the whole dataset is used to build each tree. oob_score : bool, default=False Whether to use out-of-bag samples to estimate the generalization score. Only available if bootstrap=True. n_jobs : int, default=None The number of jobs to run in parallel. :meth:`fit`, :meth:`predict`, :meth:`decision_path` and :meth:`apply` are all parallelized over the trees. ``None`` means 1 unless in a :obj:`joblib.parallel_backend` context. ``-1`` means using all processors. See :term:`Glossary <n_jobs>` for more details. random_state : int, RandomState instance or None, default=None Controls 3 sources of randomness: - the bootstrapping of the samples used when building trees (if ``bootstrap=True``) - the sampling of the features to consider when looking for the best split at each node (if ``max_features < n_features``) - the draw of the splits for each of the `max_features` See :term:`Glossary <random_state>` for details. verbose : int, default=0 Controls the verbosity when fitting and predicting. warm_start : bool, default=False When set to ``True``, reuse the solution of the previous call to fit and add more estimators to the ensemble, otherwise, just fit a whole new forest. See :term:`the Glossary <warm_start>`. ccp_alpha : non-negative float, default=0.0 Complexity parameter used for Minimal Cost-Complexity Pruning. The subtree with the largest cost complexity that is smaller than ``ccp_alpha`` will be chosen. By default, no pruning is performed. See :ref:`minimal_cost_complexity_pruning` for details. .. versionadded:: 0.22 max_samples : int or float, default=None If bootstrap is True, the number of samples to draw from X to train each base estimator. - If None (default), then draw `X.shape[0]` samples. - If int, then draw `max_samples` samples. - If float, then draw `max_samples * X.shape[0]` samples. Thus, `max_samples` should be in the interval `(0.0, 1.0]`. .. versionadded:: 0.22 Attributes ---------- base_estimator_ : ExtraTreeRegressor The child estimator template used to create the collection of fitted sub-estimators. estimators_ : list of DecisionTreeRegressor The collection of fitted sub-estimators. feature_importances_ : ndarray of shape (n_features,) The impurity-based feature importances. The higher, the more important the feature. The importance of a feature is computed as the (normalized) total reduction of the criterion brought by that feature. It is also known as the Gini importance. Warning: impurity-based feature importances can be misleading for high cardinality features (many unique values). See :func:`sklearn.inspection.permutation_importance` as an alternative. n_features_ : int The number of features. .. deprecated:: 1.0 Attribute `n_features_` was deprecated in version 1.0 and will be removed in 1.2. Use `n_features_in_` instead. n_features_in_ : int Number of features seen during :term:`fit`. .. versionadded:: 0.24 feature_names_in_ : ndarray of shape (`n_features_in_`,) Names of features seen during :term:`fit`. Defined only when `X` has feature names that are all strings. .. versionadded:: 1.0 n_outputs_ : int The number of outputs. oob_score_ : float Score of the training dataset obtained using an out-of-bag estimate. This attribute exists only when ``oob_score`` is True. oob_prediction_ : ndarray of shape (n_samples,) or (n_samples, n_outputs) Prediction computed with out-of-bag estimate on the training set. This attribute exists only when ``oob_score`` is True. See Also -------- ExtraTreesClassifier : An extra-trees classifier with random splits. RandomForestClassifier : A random forest classifier with optimal splits. RandomForestRegressor : Ensemble regressor using trees with optimal splits. Notes ----- The default values for the parameters controlling the size of the trees (e.g. ``max_depth``, ``min_samples_leaf``, etc.) lead to fully grown and unpruned trees which can potentially be very large on some data sets. To reduce memory consumption, the complexity and size of the trees should be controlled by setting those parameter values. References ---------- .. [1] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized trees", Machine Learning, 63(1), 3-42, 2006. Examples -------- >>> from sklearn.datasets import load_diabetes >>> from sklearn.model_selection import train_test_split >>> from sklearn.ensemble import ExtraTreesRegressor >>> X, y = load_diabetes(return_X_y=True) >>> X_train, X_test, y_train, y_test = train_test_split( ... X, y, random_state=0) >>> reg = ExtraTreesRegressor(n_estimators=100, random_state=0).fit( ... X_train, y_train) >>> reg.score(X_test, y_test) 0.2708... """ def __init__( self, n_estimators=100, *, criterion="squared_error", max_depth=None, min_samples_split=2, min_samples_leaf=1, min_weight_fraction_leaf=0.0, max_features=1.0, max_leaf_nodes=None, min_impurity_decrease=0.0, bootstrap=False, oob_score=False, n_jobs=None, random_state=None, verbose=0, warm_start=False, ccp_alpha=0.0, max_samples=None, ): super().__init__( base_estimator=ExtraTreeRegressor(), n_estimators=n_estimators, estimator_params=( "criterion", "max_depth", "min_samples_split", "min_samples_leaf", "min_weight_fraction_leaf", "max_features", "max_leaf_nodes", "min_impurity_decrease", "random_state", "ccp_alpha", ), bootstrap=bootstrap, oob_score=oob_score, n_jobs=n_jobs, random_state=random_state, verbose=verbose, warm_start=warm_start, max_samples=max_samples, ) self.criterion = criterion self.max_depth = max_depth self.min_samples_split = min_samples_split self.min_samples_leaf = min_samples_leaf self.min_weight_fraction_leaf = min_weight_fraction_leaf self.max_features = max_features self.max_leaf_nodes = max_leaf_nodes self.min_impurity_decrease = min_impurity_decrease self.ccp_alpha = ccp_alpha class RandomTreesEmbedding(BaseForest): """ An ensemble of totally random trees. An unsupervised transformation of a dataset to a high-dimensional sparse representation. A datapoint is coded according to which leaf of each tree it is sorted into. Using a one-hot encoding of the leaves, this leads to a binary coding with as many ones as there are trees in the forest. The dimensionality of the resulting representation is ``n_out <= n_estimators * max_leaf_nodes``. If ``max_leaf_nodes == None``, the number of leaf nodes is at most ``n_estimators * 2 ** max_depth``. Read more in the :ref:`User Guide <random_trees_embedding>`. Parameters ---------- n_estimators : int, default=100 Number of trees in the forest. .. versionchanged:: 0.22 The default value of ``n_estimators`` changed from 10 to 100 in 0.22. max_depth : int, default=5 The maximum depth of each tree. If None, then nodes are expanded until all leaves are pure or until all leaves contain less than min_samples_split samples. min_samples_split : int or float, default=2 The minimum number of samples required to split an internal node: - If int, then consider `min_samples_split` as the minimum number. - If float, then `min_samples_split` is a fraction and `ceil(min_samples_split * n_samples)` is the minimum number of samples for each split. .. versionchanged:: 0.18 Added float values for fractions. min_samples_leaf : int or float, default=1 The minimum number of samples required to be at a leaf node. A split point at any depth will only be considered if it leaves at least ``min_samples_leaf`` training samples in each of the left and right branches. This may have the effect of smoothing the model, especially in regression. - If int, then consider `min_samples_leaf` as the minimum number. - If float, then `min_samples_leaf` is a fraction and `ceil(min_samples_leaf * n_samples)` is the minimum number of samples for each node. .. versionchanged:: 0.18 Added float values for fractions. min_weight_fraction_leaf : float, default=0.0 The minimum weighted fraction of the sum total of weights (of all the input samples) required to be at a leaf node. Samples have equal weight when sample_weight is not provided. max_leaf_nodes : int, default=None Grow trees with ``max_leaf_nodes`` in best-first fashion. Best nodes are defined as relative reduction in impurity. If None then unlimited number of leaf nodes. min_impurity_decrease : float, default=0.0 A node will be split if this split induces a decrease of the impurity greater than or equal to this value. The weighted impurity decrease equation is the following:: N_t / N * (impurity - N_t_R / N_t * right_impurity - N_t_L / N_t * left_impurity) where ``N`` is the total number of samples, ``N_t`` is the number of samples at the current node, ``N_t_L`` is the number of samples in the left child, and ``N_t_R`` is the number of samples in the right child. ``N``, ``N_t``, ``N_t_R`` and ``N_t_L`` all refer to the weighted sum, if ``sample_weight`` is passed. .. versionadded:: 0.19 sparse_output : bool, default=True Whether or not to return a sparse CSR matrix, as default behavior, or to return a dense array compatible with dense pipeline operators. n_jobs : int, default=None The number of jobs to run in parallel. :meth:`fit`, :meth:`transform`, :meth:`decision_path` and :meth:`apply` are all parallelized over the trees. ``None`` means 1 unless in a :obj:`joblib.parallel_backend` context. ``-1`` means using all processors. See :term:`Glossary <n_jobs>` for more details. random_state : int, RandomState instance or None, default=None Controls the generation of the random `y` used to fit the trees and the draw of the splits for each feature at the trees' nodes. See :term:`Glossary <random_state>` for details. verbose : int, default=0 Controls the verbosity when fitting and predicting. warm_start : bool, default=False When set to ``True``, reuse the solution of the previous call to fit and add more estimators to the ensemble, otherwise, just fit a whole new forest. See :term:`the Glossary <warm_start>`. Attributes ---------- base_estimator_ : :class:`~sklearn.tree.ExtraTreeClassifier` instance The child estimator template used to create the collection of fitted sub-estimators. estimators_ : list of :class:`~sklearn.tree.ExtraTreeClassifier` instances The collection of fitted sub-estimators. feature_importances_ : ndarray of shape (n_features,) The feature importances (the higher, the more important the feature). n_features_ : int The number of features when ``fit`` is performed. .. deprecated:: 1.0 Attribute `n_features_` was deprecated in version 1.0 and will be removed in 1.2. Use `n_features_in_` instead. n_features_in_ : int Number of features seen during :term:`fit`. .. versionadded:: 0.24 feature_names_in_ : ndarray of shape (`n_features_in_`,) Names of features seen during :term:`fit`. Defined only when `X` has feature names that are all strings. .. versionadded:: 1.0 n_outputs_ : int The number of outputs when ``fit`` is performed. one_hot_encoder_ : OneHotEncoder instance One-hot encoder used to create the sparse embedding. See Also -------- ExtraTreesClassifier : An extra-trees classifier. ExtraTreesRegressor : An extra-trees regressor. RandomForestClassifier : A random forest classifier. RandomForestRegressor : A random forest regressor. sklearn.tree.ExtraTreeClassifier: An extremely randomized tree classifier. sklearn.tree.ExtraTreeRegressor : An extremely randomized tree regressor. References ---------- .. [1] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized trees", Machine Learning, 63(1), 3-42, 2006. .. [2] Moosmann, F. and Triggs, B. and Jurie, F. "Fast discriminative visual codebooks using randomized clustering forests" NIPS 2007 Examples -------- >>> from sklearn.ensemble import RandomTreesEmbedding >>> X = [[0,0], [1,0], [0,1], [-1,0], [0,-1]] >>> random_trees = RandomTreesEmbedding( ... n_estimators=5, random_state=0, max_depth=1).fit(X) >>> X_sparse_embedding = random_trees.transform(X) >>> X_sparse_embedding.toarray() array([[0., 1., 1., 0., 1., 0., 0., 1., 1., 0.], [0., 1., 1., 0., 1., 0., 0., 1., 1., 0.], [0., 1., 0., 1., 0., 1., 0., 1., 0., 1.], [1., 0., 1., 0., 1., 0., 1., 0., 1., 0.], [0., 1., 1., 0., 1., 0., 0., 1., 1., 0.]]) """ criterion = "squared_error" max_features = 1 def __init__( self, n_estimators=100, *, max_depth=5, min_samples_split=2, min_samples_leaf=1, min_weight_fraction_leaf=0.0, max_leaf_nodes=None, min_impurity_decrease=0.0, sparse_output=True, n_jobs=None, random_state=None, verbose=0, warm_start=False, ): super().__init__( base_estimator=ExtraTreeRegressor(), n_estimators=n_estimators, estimator_params=( "criterion", "max_depth", "min_samples_split", "min_samples_leaf", "min_weight_fraction_leaf", "max_features", "max_leaf_nodes", "min_impurity_decrease", "random_state", ), bootstrap=False, oob_score=False, n_jobs=n_jobs, random_state=random_state, verbose=verbose, warm_start=warm_start, max_samples=None, ) self.max_depth = max_depth self.min_samples_split = min_samples_split self.min_samples_leaf = min_samples_leaf self.min_weight_fraction_leaf = min_weight_fraction_leaf self.max_leaf_nodes = max_leaf_nodes self.min_impurity_decrease = min_impurity_decrease self.sparse_output = sparse_output def _set_oob_score_and_attributes(self, X, y): raise NotImplementedError("OOB score not supported by tree embedding") def fit(self, X, y=None, sample_weight=None): """ Fit estimator. Parameters ---------- X : {array-like, sparse matrix} of shape (n_samples, n_features) The input samples. Use ``dtype=np.float32`` for maximum efficiency. Sparse matrices are also supported, use sparse ``csc_matrix`` for maximum efficiency. y : Ignored Not used, present for API consistency by convention. sample_weight : array-like of shape (n_samples,), default=None Sample weights. If None, then samples are equally weighted. Splits that would create child nodes with net zero or negative weight are ignored while searching for a split in each node. In the case of classification, splits are also ignored if they would result in any single class carrying a negative weight in either child node. Returns ------- self : object Returns the instance itself. """ self.fit_transform(X, y, sample_weight=sample_weight) return self def fit_transform(self, X, y=None, sample_weight=None): """ Fit estimator and transform dataset. Parameters ---------- X : {array-like, sparse matrix} of shape (n_samples, n_features) Input data used to build forests. Use ``dtype=np.float32`` for maximum efficiency. y : Ignored Not used, present for API consistency by convention. sample_weight : array-like of shape (n_samples,), default=None Sample weights. If None, then samples are equally weighted. Splits that would create child nodes with net zero or negative weight are ignored while searching for a split in each node. In the case of classification, splits are also ignored if they would result in any single class carrying a negative weight in either child node. Returns ------- X_transformed : sparse matrix of shape (n_samples, n_out) Transformed dataset. """ rnd = check_random_state(self.random_state) y = rnd.uniform(size=_num_samples(X)) super().fit(X, y, sample_weight=sample_weight) self.one_hot_encoder_ = OneHotEncoder(sparse=self.sparse_output) return self.one_hot_encoder_.fit_transform(self.apply(X)) def transform(self, X): """ Transform dataset. Parameters ---------- X : {array-like, sparse matrix} of shape (n_samples, n_features) Input data to be transformed. Use ``dtype=np.float32`` for maximum efficiency. Sparse matrices are also supported, use sparse ``csr_matrix`` for maximum efficiency. Returns ------- X_transformed : sparse matrix of shape (n_samples, n_out) Transformed dataset. """ check_is_fitted(self) return self.one_hot_encoder_.transform(self.apply(X))
from django import forms from django.contrib.auth.models import User from django.contrib.auth.forms import UserCreationForm, UserChangeForm from accounts.models import UserProfile class RegistationForm(UserCreationForm): email = forms.EmailField(required=True) def __init__(self, *args, **kwargs): super(UserCreationForm, self).__init__(*args, **kwargs) for fieldname in ['username', 'password1', 'password2']: self.fields[fieldname].help_text = None class Meta: model = User fields = ( 'username', 'first_name', 'last_name', 'email', 'password1', 'password2', ) def save(self, commit = True): user = super(RegistationForm, self).save(commit=False) user.first_name = self.cleaned_data['first_name'] user.last_name = self.cleaned_data['last_name'] user.email = self.cleaned_data['email'] if commit: user.save() return user class EditProfileForm(forms.ModelForm): class Meta: model = User fields = ( 'email', 'first_name', 'last_name', ) exclude = () class EditProfileForm2(forms.ModelForm): class Meta: model = UserProfile fields = ( 'description', 'city', 'website', 'phone', 'image' ) exclude = ()
import asyncio from typing import NoReturn import pytest from pytest_asyncio.plugin import event_loop from periodic_coroutine import Periodic async def awake_msg() -> str: await asyncio.sleep(0.5) return "Good Morning!" async def just_raise() -> NoReturn: await asyncio.sleep(0.1) raise ValueError("Not a case of asyncio.CancelledError or asyncio.TimeoutError") @pytest.mark.asyncio async def test_interval_is_longer_than_blocking(event_loop): """ *awake_msg* blocks for 0.5 seconds, which is a shorter time than the 2sec period - everything should proceed fine """ p = Periodic(awake_msg, 2.0) event_loop.call_soon(p.start) await asyncio.sleep(0.01) # Yielding control to the loop is necessary to start res = await p.get_result() assert res == "Good Morning!" event_loop.call_soon(p.stop) await asyncio.sleep(0.01) # Yielding control to the loop is necessary to end @pytest.mark.asyncio async def test_interval_is_shorter_than_blocking_without_ignore(event_loop): """ *awake_msg* blocks for 0.5 seconds, which is a longer time than the 0.2sec period - if we choose not to ignore exceptions (default), *Periodic* will raise raise an *AttributeError* since _result is not set (and never will be). """ p = Periodic(awake_msg, 0.2) event_loop.call_soon(p.start) await asyncio.sleep(0.01) # Yielding control to the loop is necessary to start with pytest.raises(AttributeError): await p.get_result() event_loop.call_soon(p.stop) await asyncio.sleep(0.01) # Yielding control to the loop is necessary to end @pytest.mark.asyncio async def test_interval_is_shorter_than_blocking_without_ignore(event_loop): """ *awake_msg* blocks for 0.5 seconds, which is a longer time than the 0.2sec period - if we choose to ignore exceptions, *Periodic* will chugg along, although always cancelling the task and awaiting *get_result* will result in a sentinel value """ p = Periodic(awake_msg, 0.2, ignore_exceptions=True) event_loop.call_soon(p.start) await asyncio.sleep(0.01) # Yielding control to the loop is necessary to start res = await p.get_result() assert res is not None # Chuggs along, with nothing meaningful as a result assert p._main in asyncio.all_tasks(event_loop) # Periodic is still running assert p.running event_loop.call_soon(p.stop) await asyncio.sleep(0.01) # Yielding control to the loop is necessary to end @pytest.mark.asyncio async def test_coroutine_throws_unhandled_exception(event_loop): """ Let the coroutine throw an exception which is not handled - this affects the inner task and also the main task. This test serves as a reminder that coroutine and its error handling are tightly coupled and to have a robust periodic scheduling mechanism, one needs to design it more carefully... """ p = Periodic(just_raise, 1, ignore_exceptions=False) event_loop.call_soon(p.start) await asyncio.sleep(0.01) # Yielding control to the loop is necessary to start with pytest.raises(ValueError): await p._task await asyncio.sleep(1.5) # Wait for any necessary clean-up assert p._main not in asyncio.all_tasks(event_loop) # Periodic is NOT running... assert p.running # ...but the state was not switched because of the exception assert len(asyncio.all_tasks()) == 1 # Only the test coroutine is running event_loop.call_soon(p.stop) await asyncio.sleep(0.01) # Yielding control to the loop is necessary to end
/* global exports */ /** * @param {string} property - method to call on object * @param {number} n - number of (curried) arguments * @param {effectRunnerWrapper} effectRunnerWrapper - a function to overrride * effect runner with. `toAffE` for `Aff`, `identity` for `Effect`. * * E.g. these are equivalent: * * function (browser) { * return function () { * return browser.close(); * }; * } * * and * * effectfulGetter('close', 0, identity); */ function effectfulGetter (property, argsCount, effectRunnerWrapper) { var args = []; return function (object) { function effectRunner () { return object[property].apply(object, args); } var affectRunner = effectRunnerWrapper(effectRunner); function chooseNext () { return argsCount > 0 ? argsConsumer : affectRunner; } function argsConsumer (arg) { if (argsCount == 0) { return affectRunner; } else { args.push(arg); argsCount--; return chooseNext(); } } return chooseNext(); }; } function identity (x) { return x; } exports.unsafeEffCall = function (method) { return function (argsCount) { return effectfulGetter(method, argsCount, identity); }; }; exports.unsafeAffCall = function (toAffE) { return function (method) { return function (argsCount) { return effectfulGetter(method, argsCount, toAffE); }; }; }; exports.effProp = function (prop) { return function (object) { return function () { return object[prop]; }; }; }
"use strict"; var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); Object.defineProperty(exports, "__esModule", { value: true }); exports.default = void 0; var _react = _interopRequireDefault(require("react")); var _createSvgIcon = _interopRequireDefault(require("./utils/createSvgIcon")); var _default = (0, _createSvgIcon.default)(_react.default.createElement(_react.default.Fragment, null, _react.default.createElement("path", { fill: "none", d: "M0 0h24v24H0V0z" }), _react.default.createElement("g", null, _react.default.createElement("path", { d: "M21 4H3c-.55 0-1 .45-1 1v14c0 .55.45 1 1 1h18c.55 0 1-.45 1-1V5c0-.55-.45-1-1-1zM8 18H4V6h4v12zm6 0h-4V6h4v12zm6 0h-4V6h4v12z" }), _react.default.createElement("path", { d: "M10 6h4v12h-4zM16 6h4v12h-4zM4 6h4v12H4z", opacity: ".3" }))), 'ViewWeekTwoTone'); exports.default = _default;
/** * External dependencies */ import { __ } from '@wordpress/i18n'; import { registerBlockType } from '@wordpress/blocks'; import { DEFAULT_COLUMNS } from '@woocommerce/block-settings'; import { Icon, widgets } from '@woocommerce/icons'; /** * Internal dependencies */ import './editor.scss'; import Block from './block'; import { deprecatedConvertToShortcode } from '../../utils/deprecations'; registerBlockType( 'woocommerce/handpicked-products', { title: __( 'Hand-picked Products', 'woocommerce' ), icon: { src: <Icon srcElement={ widgets } />, foreground: '#96588a', }, category: 'woocommerce', keywords: [ __( 'Handpicked Products', 'woocommerce' ), __( 'WooCommerce', 'woocommerce' ), ], description: __( 'Display a selection of hand-picked products in a grid.', 'woocommerce' ), supports: { align: [ 'wide', 'full' ], html: false, }, example: { attributes: { isPreview: true, }, }, attributes: { /** * Alignment of product grid */ align: { type: 'string', }, /** * Number of columns. */ columns: { type: 'number', default: DEFAULT_COLUMNS, }, /** * Toggle for edit mode in the block preview. */ editMode: { type: 'boolean', default: true, }, /** * Content visibility setting */ contentVisibility: { type: 'object', default: { title: true, price: true, rating: true, button: true, }, }, /** * How to order the products: 'date', 'popularity', 'price_asc', 'price_desc' 'rating', 'title'. */ orderby: { type: 'string', default: 'date', }, /** * The list of product IDs to display */ products: { type: 'array', default: [], }, /** * How to align cart buttons. */ alignButtons: { type: 'boolean', default: false, }, /** * Are we previewing? */ isPreview: { type: 'boolean', default: false, }, }, deprecated: [ { // Deprecate shortcode save method in favor of dynamic rendering. attributes: { align: { type: 'string', }, columns: { type: 'number', default: DEFAULT_COLUMNS, }, editMode: { type: 'boolean', default: true, }, contentVisibility: { type: 'object', default: { title: true, price: true, rating: true, button: true, }, }, orderby: { type: 'string', default: 'date', }, products: { type: 'array', default: [], }, }, save: deprecatedConvertToShortcode( 'woocommerce/handpicked-products' ), }, ], /** * Renders and manages the block. * * @param {Object} props Props to pass to block. */ edit( props ) { return <Block { ...props } />; }, save() { return null; }, } );
(function(d3, fc) { 'use strict'; var data = fc.data.random.financial().startDate(new Date(2014, 1, 1))(50); var width = 600, height = 250; var container = d3.select('#envelope') .append('svg') .attr('width', width) .attr('height', height); // Create scale for x axis var dateScale = fc.scale.dateTime() .domain(fc.util.extent().fields('date')(data)) .range([0, width]) .nice(); // Create scale for y axis var priceScale = d3.scale.linear() .domain(fc.util.extent().fields(['high', 'low'])(data)) .range([height, 0]) .nice(); // Create the candlestick series var candlestick = fc.series.candlestick () .xScale(dateScale) .yScale(priceScale); // Create and apply the EMA var movingAverage = fc.indicator.algorithm.exponentialMovingAverage(); movingAverage(data); // Create a line that renders the result var ema = fc.series.line() .yValue(function(d) { return d.exponentialMovingAverage; }) .xScale(dateScale) .yScale(priceScale); // Add it to the container container.append('g') .datum(data) .call(ema); // Create and apply the envelopes algorithm to the exponential moving average var envelopeAlgorithm = fc.indicator.algorithm.envelope() .factor(0.01) .value(function(d) { return d.exponentialMovingAverage; }); envelopeAlgorithm(data); // Create the renderer var envelope = fc.indicator.renderer.envelope() .xScale(dateScale) .yScale(priceScale); // Add it to the container container.append('g') .datum(data) .call(envelope); // Adding candlestick to the container container.append('g') .datum(data) .call(candlestick); })(d3, fc);
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import functools import copy from oslo_context import context from oslo_utils import timeutils import six from zun.common import exception from zun.common import policy class RequestContext(context.RequestContext): """Extends security contexts from the OpenStack common library.""" def __init__(self, auth_token=None, domain_id=None, domain_name=None, user_name=None, user_id=None, user_domain_name=None, user_domain_id=None, project_name=None, project_id=None, roles=None, is_admin=None, read_only=False, show_deleted=False, request_id=None, trust_id=None, auth_token_info=None, all_projects=False, password=None, timestamp=None, **kwargs): """Stores several additional request parameters: :param domain_id: The ID of the domain. :param domain_name: The name of the domain. :param user_domain_id: The ID of the domain to authenticate a user against. :param user_domain_name: The name of the domain to authenticate a user against. """ super(RequestContext, self).__init__(auth_token=auth_token, user_id=user_name, project_id=project_name, is_admin=is_admin, read_only=read_only, show_deleted=show_deleted, request_id=request_id, roles=roles) self.user_name = user_name self.user_id = user_id self.project_name = project_name self.project_id = project_id self.domain_id = domain_id self.domain_name = domain_name self.user_domain_id = user_domain_id self.user_domain_name = user_domain_name self.auth_token_info = auth_token_info self.trust_id = trust_id self.all_projects = all_projects self.password = password if is_admin is None: self.is_admin = policy.check_is_admin(self) else: self.is_admin = is_admin if not timestamp: timestamp = timeutils.utcnow() if isinstance(timestamp, six.string_types): timestamp = timeutils.parse_strtime(timestamp) self.timestamp = timestamp def to_dict(self): value = super(RequestContext, self).to_dict() value.update({'auth_token': self.auth_token, 'domain_id': self.domain_id, 'domain_name': self.domain_name, 'user_domain_id': self.user_domain_id, 'user_domain_name': self.user_domain_name, 'user_name': self.user_name, 'user_id': self.user_id, 'project_name': self.project_name, 'project_id': self.project_id, 'is_admin': self.is_admin, 'read_only': self.read_only, 'roles': self.roles, 'show_deleted': self.show_deleted, 'request_id': self.request_id, 'trust_id': self.trust_id, 'auth_token_info': self.auth_token_info, 'password': self.password, 'all_projects': self.all_projects, 'timestamp': timeutils.strtime(self.timestamp) if hasattr(self, 'timestamp') else None }) return value def to_policy_values(self): policy = super(RequestContext, self).to_policy_values() policy['is_admin'] = self.is_admin return policy @classmethod def from_dict(cls, values): return cls(**values) def elevated(self): """Return a version of this context with admin flag set.""" context = copy.copy(self) # context.roles must be deepcopied to leave original roles # without changes context.roles = copy.deepcopy(self.roles) context.is_admin = True if 'admin' not in context.roles: context.roles.append('admin') return context def can(self, action, target=None, fatal=True, might_not_exist=False): """Verifies that the given action is valid on the target in this context. :param action: string representing the action to be checked. :param target: dictionary representing the object of the action for object creation this should be a dictionary representing the location of the object e.g. ``{'project_id': context.project_id}``. If None, then this default target will be considered: {'project_id': self.project_id, 'user_id': self.user_id} :param fatal: if False, will return False when an exception.NotAuthorized occurs. :param might_not_exist: If True the policy check is skipped (and the function returns True) if the specified policy does not exist. Defaults to false. :raises zun.common.exception.NotAuthorized: if verification fails and fatal is True. :return: returns a non-False value (not necessarily "True") if authorized and False if not authorized and fatal is False. """ if target is None: target = {'project_id': self.project_id, 'user_id': self.user_id} try: return policy.authorize(self, action, target, might_not_exist=might_not_exist) except exception.NotAuthorized: if fatal: raise return False def make_context(*args, **kwargs): return RequestContext(*args, **kwargs) def get_admin_context(show_deleted=False, all_projects=False): """Create an administrator context. :param show_deleted: if True, will show deleted items when query db """ context = RequestContext(user_id=None, project=None, is_admin=True, show_deleted=show_deleted, all_projects=all_projects) return context def set_context(func): @functools.wraps(func) def handler(self, ctx): if ctx is None: ctx = get_admin_context(all_projects=True) func(self, ctx) return handler
var Promise = require('bluebird'); var parseDeviceType = require('./parser.devicetype.js'); var parseRoom = require('./parser.room.js'); var parseHouse = require('./parser.house.js'); var parseTime = require('./parser.time.js'); var parseChannel = require('./parser.channel.js'); module.exports.parse = function parse(text) { var deviceTypes = []; var rooms = []; var houses = []; var times = []; var replacedText = ''; return parseRoom(text) .then((result) => { rooms = result.rooms; return parseDeviceType(result.text); }) .then((result) => { deviceTypes = result.deviceTypes; return parseTime(result.text); }) .then((result) => { times = result.times; return parseHouse(result.text); }) .then((result) => { houses = result.houses; allHouses = result.allHouses; return parseChannel(result.text); }) .then((result) => { channel = result.channel; replacedText = result.text; return Promise.resolve({deviceTypes, rooms, houses, channel, times, replacedText, allHouses}); }); };
var namespaceg24__lib = [ [ "CUDA_kernel_size", "namespaceg24__lib_1_1_c_u_d_a__kernel__size.html", "namespaceg24__lib_1_1_c_u_d_a__kernel__size" ], [ "exceptions", "namespaceg24__lib_1_1exceptions.html", "namespaceg24__lib_1_1exceptions" ], [ "internals", "namespaceg24__lib_1_1internals.html", "namespaceg24__lib_1_1internals" ], [ "MemoryContext", "namespaceg24__lib_1_1_memory_context.html", "namespaceg24__lib_1_1_memory_context" ], [ "NonMatrixLinearSolvers", "namespaceg24__lib_1_1_non_matrix_linear_solvers.html", "namespaceg24__lib_1_1_non_matrix_linear_solvers" ], [ "Parallelism", "namespaceg24__lib_1_1_parallelism.html", "namespaceg24__lib_1_1_parallelism" ], [ "RungeKuttaMethods", "namespaceg24__lib_1_1_runge_kutta_methods.html", null ], [ "view", "namespaceg24__lib_1_1view.html", "namespaceg24__lib_1_1view" ], [ "carried_bool_vector", "classg24__lib_1_1carried__bool__vector.html", "classg24__lib_1_1carried__bool__vector" ], [ "class_info", "structg24__lib_1_1class__info.html", "structg24__lib_1_1class__info" ], [ "coll", "classg24__lib_1_1coll.html", "classg24__lib_1_1coll" ], [ "comp", "classg24__lib_1_1comp.html", "classg24__lib_1_1comp" ], [ "constexpr_fraction", "structg24__lib_1_1constexpr__fraction.html", "structg24__lib_1_1constexpr__fraction" ], [ "CUDA_memory_manager", "structg24__lib_1_1_c_u_d_a__memory__manager.html", "structg24__lib_1_1_c_u_d_a__memory__manager" ], [ "default_memory_manager", "structg24__lib_1_1default__memory__manager.html", "structg24__lib_1_1default__memory__manager" ], [ "Definitions", "structg24__lib_1_1_definitions.html", "structg24__lib_1_1_definitions" ], [ "derivator", "classg24__lib_1_1derivator.html", "classg24__lib_1_1derivator" ], [ "dummy_function", "structg24__lib_1_1dummy__function.html", "structg24__lib_1_1dummy__function" ], [ "dummy_void_function", "structg24__lib_1_1dummy__void__function.html", "structg24__lib_1_1dummy__void__function" ], [ "empty_array", "structg24__lib_1_1empty__array.html", "structg24__lib_1_1empty__array" ], [ "fixed_return_array", "structg24__lib_1_1fixed__return__array.html", "structg24__lib_1_1fixed__return__array" ], [ "flat_imitation_struct", "structg24__lib_1_1flat__imitation__struct.html", "structg24__lib_1_1flat__imitation__struct" ], [ "fraction", "structg24__lib_1_1fraction.html", "structg24__lib_1_1fraction" ], [ "fspoint", "classg24__lib_1_1fspoint.html", "classg24__lib_1_1fspoint" ], [ "identity_functor", "structg24__lib_1_1identity__functor.html", "structg24__lib_1_1identity__functor" ], [ "managed_object", "classg24__lib_1_1managed__object.html", "classg24__lib_1_1managed__object" ], [ "memory_manager_base", "structg24__lib_1_1memory__manager__base.html", "structg24__lib_1_1memory__manager__base" ], [ "ndview", "classg24__lib_1_1ndview.html", "classg24__lib_1_1ndview" ], [ "nested_imitation_struct", "structg24__lib_1_1nested__imitation__struct.html", "structg24__lib_1_1nested__imitation__struct" ], [ "operation_array", "structg24__lib_1_1operation__array.html", "structg24__lib_1_1operation__array" ], [ "point", "classg24__lib_1_1point.html", "classg24__lib_1_1point" ], [ "return_ith_functor", "structg24__lib_1_1return__ith__functor.html", "structg24__lib_1_1return__ith__functor" ], [ "set_ith_functor", "structg24__lib_1_1set__ith__functor.html", "structg24__lib_1_1set__ith__functor" ], [ "simple_array", "classg24__lib_1_1simple__array.html", "classg24__lib_1_1simple__array" ], [ "simple_evolve_result", "structg24__lib_1_1simple__evolve__result.html", "structg24__lib_1_1simple__evolve__result" ], [ "split_member_array", "structg24__lib_1_1split__member__array.html", "structg24__lib_1_1split__member__array" ], [ "temporal_evolver", "classg24__lib_1_1temporal__evolver.html", "classg24__lib_1_1temporal__evolver" ], [ "value_padded_array", "structg24__lib_1_1value__padded__array.html", "structg24__lib_1_1value__padded__array" ], [ "wildcard", "structg24__lib_1_1wildcard.html", "structg24__lib_1_1wildcard" ] ];
/* Angel Kids / Space Position hardware driver driver by David Haywood with some help from Steph (DSWs, Inputs, other bits here and there) 2 Board System, Uses Boards X090-PC-A & X090-PC-B Both games appear to be joint Sega / Nasco efforts (although all I see in Angel Kids is 'Exa Planning' but I think that has something to do with Nasco ) Space Position is encrypted, the main processor is D317-0005 (NEC Z80 Custom), see machine/segacrpt.c for details on this encryption scheme */ /* started 23/01/2002 */ /* notes / todo: Decrypt Space Position Somehow (not something I can do) Unknown Reads / Writes Whats the Prom for? nothing important? Clock Speeds etc. Is the level order correct? the progress sprite on the side of the screen re-appears at the bottom when you get to the top, but the wrap-around is needed for other things, actual game bug? */ /* readme's ------------------------------------------------------------------------ Angel Kids 833-6599-01 Sega 1988 Nasco X090-PC-A (Sega 837-6600) SW1 SW2 8255 8255 11429 6116 Z80 YM2203 YM2203 11424 11425 11426 11427 - - - - 5M5165 11428 Z80 4MHz 6MHz Nasco X090-PC-B 2016-55 11437 11445 2016-55 2016-55 U5 11436 11444 11435 11443 11434 11442 11433 11441 2016-55 2016-55 11432 11440 11431 11439 11446 2016-55 11148 11147 2016-55 2016-55 2016-55 18.432MHz 11430 11438 ------------------------------------------------------------------------ Space Position (JPN Ver.) (c)1986 Sega / Nasco X090-PC-A 171-5383 X090-PC-B 171-5384 CPU :D317-0005 (NEC Z80 Custom) Sound :NEC D780C-1 :YM2203C x 2 OSC :4.000MHz 6.000MHz :18.432MHz EPR10120.C1 prg EPR10121.C2 | EPR10122.C3 | EPR10123.C4 | EPR10124.C5 | EPR10125.C10 / EPR10126.D4 snd EPR10127.06 EPR10128.07 EPR10129.08 EPR10130.14 EPR10131.15 EPR10132.16 EPR10133.17 EPR10134.18 EPR10135.19 63S081N.U5 --- Team Japump!!! --- Dumped by Chackn 02/25/2000 ------------------------------------------------------------------------ */ #include "emu.h" #include "cpu/z80/z80.h" #include "machine/segacrp2.h" #include "sound/2203intf.h" #include "includes/angelkds.h" static READ8_HANDLER( angelkds_main_sound_r ); static WRITE8_HANDLER( angelkds_main_sound_w ); static READ8_HANDLER( angelkds_sub_sound_r ); static WRITE8_HANDLER( angelkds_sub_sound_w ); /*** CPU Banking */ static WRITE8_HANDLER( angelkds_cpu_bank_write ) { memory_set_bank(space->machine(), "bank1", data & 0x0f); // shall we check (data & 0x0f) < # of available banks (8 or 10 resp.)? } /*** Fake Inputs these make the game a bit easier for testing purposes */ #define FAKEINPUTS 0 #if FAKEINPUTS static READ8_HANDLER( angelkds_input_r ) { int fake; static const char *const portnames[] = { "I81", "I82" }; static const char *const fakenames[] = { "FAKE1", "FAKE2" }; fake = input_port_read(space->machine(), fakenames[offset]); return ((fake & 0x01) ? fake : input_port_read(space->machine(), portnames[offset])); } #else static READ8_HANDLER( angelkds_input_r ) { static const char *const portnames[] = { "I81", "I82" }; return input_port_read(space->machine(), portnames[offset]); } #endif /*** Memory Structures Angel Kids: I would have expected f003 to be the scroll register for the bottom part of the screen, in the attract mode this works fine, but in the game it doesn't, so maybe it wasn't really hooked up and instead only one of the register (f001) is used for both part? update, it is correct, the screen is meant to split in two when the kid goes what would be offscreen, just looked kinda odd Interesting note, each Bank in the 0x8000 - 0xbfff appears to contain a level. */ static ADDRESS_MAP_START( main_map, AS_PROGRAM, 8 ) AM_RANGE(0x0000, 0x7fff) AM_ROM AM_RANGE(0x8000, 0xbfff) AM_ROMBANK("bank1") AM_RANGE(0xc000, 0xdfff) AM_RAM AM_RANGE(0xe000, 0xe3ff) AM_RAM_WRITE(angelkds_bgtopvideoram_w) AM_BASE_MEMBER(angelkds_state, m_bgtopvideoram) /* Top Half of Screen */ AM_RANGE(0xe400, 0xe7ff) AM_RAM_WRITE(angelkds_bgbotvideoram_w) AM_BASE_MEMBER(angelkds_state, m_bgbotvideoram) /* Bottom Half of Screen */ AM_RANGE(0xe800, 0xebff) AM_RAM_WRITE(angelkds_txvideoram_w) AM_BASE_MEMBER(angelkds_state, m_txvideoram) AM_RANGE(0xec00, 0xecff) AM_RAM AM_BASE_MEMBER(angelkds_state, m_spriteram) AM_RANGE(0xed00, 0xeeff) AM_RAM_WRITE(angelkds_paletteram_w) AM_BASE_MEMBER(angelkds_state, m_paletteram) AM_RANGE(0xef00, 0xefff) AM_RAM AM_RANGE(0xf000, 0xf000) AM_WRITE(angelkds_bgtopbank_write) AM_RANGE(0xf001, 0xf001) AM_WRITE(angelkds_bgtopscroll_write) AM_RANGE(0xf002, 0xf002) AM_WRITE(angelkds_bgbotbank_write) AM_RANGE(0xf003, 0xf003) AM_WRITE(angelkds_bgbotscroll_write) AM_RANGE(0xf004, 0xf004) AM_WRITE(angelkds_txbank_write) AM_RANGE(0xf005, 0xf005) AM_WRITE(angelkds_layer_ctrl_write) ADDRESS_MAP_END static ADDRESS_MAP_START( main_portmap, AS_IO, 8 ) ADDRESS_MAP_GLOBAL_MASK(0xff) AM_RANGE(0x00, 0x00) AM_WRITENOP // 00 on start-up, not again AM_RANGE(0x42, 0x42) AM_WRITE(angelkds_cpu_bank_write) AM_RANGE(0x43, 0x43) AM_WRITENOP // 9a on start-up, not again AM_RANGE(0x40, 0x40) AM_READ_PORT("I40") /* "Coinage" Dip Switches */ AM_RANGE(0x41, 0x41) AM_READ_PORT("I41") /* Other Dip Switches */ AM_RANGE(0x42, 0x42) AM_READ_PORT("I42") /* Players inputs (not needed ?) */ AM_RANGE(0x80, 0x80) AM_READ_PORT("I80") /* System inputs */ AM_RANGE(0x81, 0x82) AM_READ(angelkds_input_r) /* Players inputs */ AM_RANGE(0x83, 0x83) AM_WRITENOP // 9b on start-up, not again AM_RANGE(0xc0, 0xc3) AM_READWRITE(angelkds_main_sound_r, angelkds_main_sound_w) // 02 various points ADDRESS_MAP_END /* sub cpu */ static ADDRESS_MAP_START( sub_map, AS_PROGRAM, 8 ) AM_RANGE(0x0000, 0x7fff) AM_ROM AM_RANGE(0x8000, 0x87ff) AM_RAM AM_RANGE(0xaaa9, 0xaaa9) AM_READNOP AM_RANGE(0xaaab, 0xaaab) AM_READNOP AM_RANGE(0xaaac, 0xaaac) AM_READNOP ADDRESS_MAP_END static ADDRESS_MAP_START( sub_portmap, AS_IO, 8 ) ADDRESS_MAP_GLOBAL_MASK(0xff) AM_RANGE(0x00, 0x01) AM_DEVREADWRITE("ym1", ym2203_r, ym2203_w) AM_RANGE(0x40, 0x41) AM_DEVREADWRITE("ym2", ym2203_r, ym2203_w) AM_RANGE(0x80, 0x83) AM_READWRITE(angelkds_sub_sound_r, angelkds_sub_sound_w) // spcpostn ADDRESS_MAP_END /* Input Ports */ #define ANGELDSK_PLAYERS_INPUT( player ) \ PORT_BIT( 0x01, IP_ACTIVE_LOW, IPT_JOYSTICKRIGHT_UP ) PORT_PLAYER(player) PORT_8WAY \ PORT_BIT( 0x02, IP_ACTIVE_LOW, IPT_JOYSTICKRIGHT_DOWN ) PORT_PLAYER(player) PORT_8WAY \ PORT_BIT( 0x04, IP_ACTIVE_LOW, IPT_JOYSTICKRIGHT_LEFT ) PORT_PLAYER(player) PORT_8WAY \ PORT_BIT( 0x08, IP_ACTIVE_LOW, IPT_JOYSTICKRIGHT_RIGHT ) PORT_PLAYER(player) PORT_8WAY \ PORT_BIT( 0x10, IP_ACTIVE_LOW, IPT_JOYSTICKLEFT_UP ) PORT_PLAYER(player) PORT_8WAY \ PORT_BIT( 0x20, IP_ACTIVE_LOW, IPT_JOYSTICKLEFT_DOWN ) PORT_PLAYER(player) PORT_8WAY \ PORT_BIT( 0x40, IP_ACTIVE_LOW, IPT_JOYSTICKLEFT_LEFT ) PORT_PLAYER(player) PORT_8WAY \ PORT_BIT( 0x80, IP_ACTIVE_LOW, IPT_JOYSTICKLEFT_RIGHT ) PORT_PLAYER(player) PORT_8WAY #define ANGELDSK_FAKE_PLAYERS_INPUT( player ) \ PORT_BIT( 0x02, IP_ACTIVE_LOW, IPT_BUTTON2 ) PORT_PLAYER(player) /* To enter initials */ \ PORT_BIT( 0x04, IP_ACTIVE_LOW, IPT_UNKNOWN ) /* Unused */ \ PORT_BIT( 0x08, IP_ACTIVE_LOW, IPT_JOYSTICK_RIGHT ) PORT_PLAYER(player) PORT_8WAY \ PORT_BIT( 0x10, IP_ACTIVE_LOW, IPT_JOYSTICK_UP ) PORT_PLAYER(player) PORT_8WAY \ PORT_BIT( 0x20, IP_ACTIVE_LOW, IPT_JOYSTICK_DOWN ) PORT_PLAYER(player) PORT_8WAY \ PORT_BIT( 0x40, IP_ACTIVE_LOW, IPT_JOYSTICK_LEFT ) PORT_PLAYER(player) PORT_8WAY \ PORT_BIT( 0x80, IP_ACTIVE_LOW, IPT_BUTTON1 ) PORT_PLAYER(player) /* To shorten the rope and */ \ /* move right in hiscores table */ static INPUT_PORTS_START( angelkds ) /* Free Play: Set SW1:1-8 ON (A:Free Play & B:Free Play). Sound Test: Set SW1:1-8 ON (A:Free Play & B:Free Play), hold test switch and reboot. Joystick Test: Set SW1:1-7 ON & SW1:8 OFF (A:Free Play & B:3C_1C), hold test switch and reboot. Joystick Test Coin_A & Coin_B seem to be switched, only works when setting A to 3C_1C and B to Free Play. */ PORT_START("I40") /* inport $40 */ PORT_DIPNAME( 0xf0, 0xf0, DEF_STR( Coin_A ) ) PORT_DIPLOCATION("SW1:1,2,3,4") PORT_DIPSETTING( 0x70, DEF_STR( 4C_1C ) ) PORT_DIPSETTING( 0x80, DEF_STR( 3C_1C ) ) PORT_DIPSETTING( 0x90, DEF_STR( 2C_1C ) ) // PORT_DIPSETTING( 0x60, DEF_STR( 2C_1C ) ) // PORT_DIPSETTING( 0x50, DEF_STR( 2C_1C ) ) // PORT_DIPSETTING( 0x40, DEF_STR( 2C_1C ) ) PORT_DIPSETTING( 0xf0, DEF_STR( 1C_1C ) ) // PORT_DIPSETTING( 0x30, DEF_STR( 1C_1C ) ) // PORT_DIPSETTING( 0x20, DEF_STR( 1C_1C ) ) // PORT_DIPSETTING( 0x10, DEF_STR( 1C_1C ) ) PORT_DIPSETTING( 0xe0, DEF_STR( 1C_2C ) ) PORT_DIPSETTING( 0xd0, DEF_STR( 1C_3C ) ) PORT_DIPSETTING( 0xc0, DEF_STR( 1C_4C ) ) PORT_DIPSETTING( 0xb0, DEF_STR( 1C_5C ) ) PORT_DIPSETTING( 0xa0, DEF_STR( 1C_6C ) ) PORT_DIPSETTING( 0x00, DEF_STR( Free_Play ) ) PORT_DIPNAME( 0x0f, 0x0f, DEF_STR( Coin_B ) ) PORT_DIPLOCATION("SW1:5,6,7,8") PORT_DIPSETTING( 0x07, DEF_STR( 4C_1C ) ) PORT_DIPSETTING( 0x08, DEF_STR( 3C_1C ) ) PORT_DIPSETTING( 0x09, DEF_STR( 2C_1C ) ) // PORT_DIPSETTING( 0x06, DEF_STR( 2C_1C ) ) // PORT_DIPSETTING( 0x05, DEF_STR( 2C_1C ) ) // PORT_DIPSETTING( 0x04, DEF_STR( 2C_1C ) ) PORT_DIPSETTING( 0x0f, DEF_STR( 1C_1C ) ) // PORT_DIPSETTING( 0x03, DEF_STR( 1C_1C ) ) // PORT_DIPSETTING( 0x02, DEF_STR( 1C_1C ) ) // PORT_DIPSETTING( 0x01, DEF_STR( 1C_1C ) ) PORT_DIPSETTING( 0x0e, DEF_STR( 1C_2C ) ) PORT_DIPSETTING( 0x0d, DEF_STR( 1C_3C ) ) PORT_DIPSETTING( 0x0c, DEF_STR( 1C_4C ) ) PORT_DIPSETTING( 0x0b, DEF_STR( 1C_5C ) ) PORT_DIPSETTING( 0x0a, DEF_STR( 1C_6C ) ) PORT_DIPSETTING( 0x00, DEF_STR( Free_Play ) ) PORT_START("I41") /* inport $41 */ PORT_DIPNAME( 0x01, 0x00, DEF_STR( Cabinet ) ) PORT_DIPLOCATION("SW2:1") PORT_DIPSETTING( 0x00, DEF_STR( Upright ) ) PORT_DIPSETTING( 0x01, DEF_STR( Cocktail ) ) PORT_DIPNAME( 0x02, 0x00, "High Score Characters" ) PORT_DIPLOCATION("SW2:2") PORT_DIPSETTING( 0x00, "3" ) PORT_DIPSETTING( 0x02, "10" ) PORT_DIPNAME( 0x0c, 0x08, DEF_STR( Bonus_Life ) ) PORT_DIPLOCATION("SW2:3,4") PORT_DIPSETTING( 0x0c, "20k, 50k, 100k, 200k and 500k" ) PORT_DIPSETTING( 0x08, "50k, 100k, 200k and 500k" ) PORT_DIPSETTING( 0x04, "100k, 200k and 500k" ) PORT_DIPSETTING( 0x00, DEF_STR( None ) ) PORT_DIPNAME( 0x30, 0x30, DEF_STR( Lives ) ) PORT_DIPLOCATION("SW2:5,6") PORT_DIPSETTING( 0x30, "3" ) PORT_DIPSETTING( 0x20, "4" ) PORT_DIPSETTING( 0x10, "5" ) PORT_DIPSETTING( 0x00, "99 (Cheat)" ) PORT_DIPNAME( 0xc0, 0xc0, DEF_STR( Difficulty ) ) PORT_DIPLOCATION("SW2:7,8") /* Stored at 0xc023 */ PORT_DIPSETTING( 0xc0, DEF_STR( Very_Easy ) ) PORT_DIPSETTING( 0x40, DEF_STR( Easy ) ) PORT_DIPSETTING( 0x80, DEF_STR( Hard ) ) PORT_DIPSETTING( 0x00, DEF_STR( Very_Hard ) ) PORT_START("I42") /* inport $42 */ PORT_BIT( 0x01, IP_ACTIVE_LOW, IPT_UNKNOWN ) PORT_BIT( 0x02, IP_ACTIVE_LOW, IPT_UNKNOWN ) PORT_BIT( 0x04, IP_ACTIVE_LOW, IPT_UNKNOWN ) PORT_BIT( 0x08, IP_ACTIVE_LOW, IPT_UNKNOWN ) PORT_BIT( 0x10, IP_ACTIVE_LOW, IPT_UNKNOWN ) // duplicated IPT_JOYSTICK_LEFTRIGHT PORT_BIT( 0x20, IP_ACTIVE_LOW, IPT_UNKNOWN ) PORT_8WAY // duplicated IPT_JOYSTICK_LEFTRIGHT PORT_BIT( 0x40, IP_ACTIVE_LOW, IPT_UNKNOWN ) PORT_8WAY PORT_COCKTAIL PORT_BIT( 0x80, IP_ACTIVE_LOW, IPT_UNKNOWN ) PORT_START("I80") /* inport $80 */ PORT_BIT( 0x01, IP_ACTIVE_LOW, IPT_COIN1 ) PORT_BIT( 0x02, IP_ACTIVE_LOW, IPT_COIN2 ) PORT_BIT( 0x04, IP_ACTIVE_LOW, IPT_SERVICE1 ) PORT_BIT( 0x08, IP_ACTIVE_LOW, IPT_START1 ) PORT_BIT( 0x10, IP_ACTIVE_LOW, IPT_START2 ) PORT_BIT( 0x20, IP_ACTIVE_LOW, IPT_UNKNOWN ) PORT_BIT( 0x40, IP_ACTIVE_LOW, IPT_UNKNOWN ) PORT_SERVICE( 0x80, IP_ACTIVE_LOW ) PORT_START("I81") /* inport $81 */ ANGELDSK_PLAYERS_INPUT( 1 ) PORT_START("I82") /* inport $82 */ ANGELDSK_PLAYERS_INPUT( 2 ) #if FAKEINPUTS /* Fake inputs to allow to play the game with 1 joystick instead of 2 */ PORT_START("FAKE1") PORT_DIPNAME( 0x01, 0x00, "FAKE (for debug) Joysticks (Player 1)" ) PORT_DIPSETTING( 0x01, "1" ) PORT_DIPSETTING( 0x00, "2" ) ANGELDSK_FAKE_PLAYERS_INPUT( 1 ) PORT_START("FAKE2") PORT_DIPNAME( 0x01, 0x00, "FAKE (for debug) Joysticks (Player 2)" ) PORT_DIPSETTING( 0x01, "1" ) PORT_DIPSETTING( 0x00, "2" ) ANGELDSK_FAKE_PLAYERS_INPUT( 2 ) #endif INPUT_PORTS_END static INPUT_PORTS_START( spcpostn ) PORT_START("I40") /* inport $40 */ PORT_DIPNAME( 0x0f, 0x0f, DEF_STR( Coin_A ) ) PORT_DIPLOCATION("SW1:1,2,3,4") PORT_DIPSETTING( 0x02, DEF_STR( 4C_1C ) ) PORT_DIPSETTING( 0x05, DEF_STR( 3C_1C ) ) PORT_DIPSETTING( 0x08, DEF_STR( 2C_1C ) ) PORT_DIPSETTING( 0x04, DEF_STR( 3C_2C ) ) PORT_DIPSETTING( 0x01, DEF_STR( 4C_3C ) ) PORT_DIPSETTING( 0x0f, DEF_STR( 1C_1C ) ) PORT_DIPSETTING( 0x03, DEF_STR( 3C_4C ) ) PORT_DIPSETTING( 0x07, DEF_STR( 2C_3C ) ) PORT_DIPSETTING( 0x0e, DEF_STR( 1C_2C ) ) PORT_DIPSETTING( 0x06, DEF_STR( 2C_5C ) ) PORT_DIPSETTING( 0x0d, DEF_STR( 1C_3C ) ) PORT_DIPSETTING( 0x0c, DEF_STR( 1C_4C ) ) PORT_DIPSETTING( 0x0b, DEF_STR( 1C_5C ) ) PORT_DIPSETTING( 0x0a, DEF_STR( 1C_6C ) ) PORT_DIPSETTING( 0x09, DEF_STR( 1C_7C ) ) PORT_DIPSETTING( 0x00, DEF_STR( Free_Play ) ) PORT_DIPNAME( 0xf0, 0xf0, DEF_STR( Coin_B ) ) PORT_DIPLOCATION("SW1:5,6,7,8") PORT_DIPSETTING( 0x00, DEF_STR( Off ) ) PORT_DIPSETTING( 0x20, DEF_STR( 4C_1C ) ) PORT_DIPSETTING( 0x50, DEF_STR( 3C_1C ) ) PORT_DIPSETTING( 0x80, DEF_STR( 2C_1C ) ) PORT_DIPSETTING( 0x40, DEF_STR( 3C_2C ) ) PORT_DIPSETTING( 0x10, DEF_STR( 4C_3C ) ) PORT_DIPSETTING( 0xf0, DEF_STR( 1C_1C ) ) PORT_DIPSETTING( 0x30, DEF_STR( 3C_4C ) ) PORT_DIPSETTING( 0x70, DEF_STR( 2C_3C ) ) PORT_DIPSETTING( 0xe0, DEF_STR( 1C_2C ) ) PORT_DIPSETTING( 0x60, DEF_STR( 2C_5C ) ) PORT_DIPSETTING( 0xd0, DEF_STR( 1C_3C ) ) PORT_DIPSETTING( 0xc0, DEF_STR( 1C_4C ) ) PORT_DIPSETTING( 0xb0, DEF_STR( 1C_5C ) ) PORT_DIPSETTING( 0xa0, DEF_STR( 1C_6C ) ) PORT_DIPSETTING( 0x90, DEF_STR( 1C_7C ) ) PORT_START("I41") /* inport $41 */ PORT_DIPNAME( 0x01, 0x01, DEF_STR(Allow_Continue ) ) PORT_DIPLOCATION("SW2:1") PORT_DIPSETTING( 0x01, DEF_STR( No ) ) PORT_DIPSETTING( 0x00, DEF_STR( Yes ) ) PORT_DIPNAME( 0x02, 0x02, "Obstruction Car" ) PORT_DIPLOCATION("SW2:2") PORT_DIPSETTING( 0x02, DEF_STR( Normal ) ) PORT_DIPSETTING( 0x00, DEF_STR( Hard ) ) PORT_DIPNAME( 0x0c, 0x08, "Time Limit" ) PORT_DIPLOCATION("SW2:3,4") PORT_DIPSETTING( 0x00, "1:10" ) PORT_DIPSETTING( 0x04, "1:20" ) PORT_DIPSETTING( 0x08, "1:30" ) PORT_DIPSETTING( 0x0c, "1:40" ) PORT_DIPNAME( 0x30, 0x20, "Power Down" ) PORT_DIPLOCATION("SW2:5,6") PORT_DIPSETTING( 0x30, "Slow" ) PORT_DIPSETTING( 0x20, DEF_STR( Normal ) ) PORT_DIPSETTING( 0x10, "Fast" ) PORT_DIPSETTING( 0x00, "Fastest" ) PORT_DIPNAME( 0x40, 0x00, DEF_STR( Demo_Sounds ) ) PORT_DIPLOCATION("SW2:7") PORT_DIPSETTING( 0x40, DEF_STR( Off ) ) PORT_DIPSETTING( 0x00, DEF_STR( On ) ) PORT_DIPUNUSED_DIPLOC( 0x80, 0x80, "SW2:8" ) /* Listed as "Unused" */ PORT_START("I42") /* inport $42 */ PORT_BIT( 0x01, IP_ACTIVE_LOW, IPT_UNKNOWN ) PORT_BIT( 0x02, IP_ACTIVE_LOW, IPT_UNKNOWN ) PORT_BIT( 0x04, IP_ACTIVE_LOW, IPT_UNKNOWN ) PORT_BIT( 0x08, IP_ACTIVE_LOW, IPT_UNKNOWN ) PORT_BIT( 0x10, IP_ACTIVE_LOW, IPT_UNKNOWN ) PORT_BIT( 0x20, IP_ACTIVE_LOW, IPT_UNKNOWN ) PORT_BIT( 0x40, IP_ACTIVE_LOW, IPT_UNKNOWN ) PORT_BIT( 0x80, IP_ACTIVE_LOW, IPT_UNKNOWN ) PORT_START("I80") /* inport $80 */ PORT_BIT( 0x01, IP_ACTIVE_LOW, IPT_COIN1 ) PORT_BIT( 0x02, IP_ACTIVE_LOW, IPT_COIN2 ) PORT_BIT( 0x04, IP_ACTIVE_LOW, IPT_SERVICE1 ) PORT_BIT( 0x08, IP_ACTIVE_LOW, IPT_START1 ) PORT_BIT( 0x10, IP_ACTIVE_LOW, IPT_START2 ) PORT_BIT( 0x20, IP_ACTIVE_LOW, IPT_UNKNOWN ) PORT_BIT( 0x40, IP_ACTIVE_LOW, IPT_UNKNOWN ) PORT_SERVICE( 0x80, IP_ACTIVE_LOW ) PORT_START("I81") /* inport $81 */ PORT_BIT( 0x01, IP_ACTIVE_LOW, IPT_JOYSTICK_UP ) PORT_PLAYER(1) PORT_8WAY PORT_BIT( 0x02, IP_ACTIVE_LOW, IPT_JOYSTICK_DOWN ) PORT_PLAYER(1) PORT_8WAY PORT_BIT( 0x04, IP_ACTIVE_LOW, IPT_JOYSTICK_LEFT ) PORT_PLAYER(1) PORT_8WAY PORT_BIT( 0x08, IP_ACTIVE_LOW, IPT_JOYSTICK_RIGHT ) PORT_PLAYER(1) PORT_8WAY PORT_BIT( 0x10, IP_ACTIVE_LOW, IPT_BUTTON1 ) PORT_PLAYER(1) PORT_BIT( 0x20, IP_ACTIVE_LOW, IPT_BUTTON2 ) PORT_PLAYER(1) PORT_BIT( 0x40, IP_ACTIVE_LOW, IPT_UNUSED ) PORT_PLAYER(1) // probably unused PORT_BIT( 0x80, IP_ACTIVE_LOW, IPT_UNUSED ) PORT_PLAYER(1) // probably unused PORT_START("I82") /* inport $82 */ PORT_BIT( 0x01, IP_ACTIVE_LOW, IPT_JOYSTICK_UP ) PORT_PLAYER(2) PORT_8WAY PORT_BIT( 0x02, IP_ACTIVE_LOW, IPT_JOYSTICK_DOWN ) PORT_PLAYER(2) PORT_8WAY PORT_BIT( 0x04, IP_ACTIVE_LOW, IPT_JOYSTICK_LEFT ) PORT_PLAYER(2) PORT_8WAY PORT_BIT( 0x08, IP_ACTIVE_LOW, IPT_JOYSTICK_RIGHT ) PORT_PLAYER(2) PORT_8WAY PORT_BIT( 0x10, IP_ACTIVE_LOW, IPT_BUTTON1 ) PORT_PLAYER(2) PORT_BIT( 0x20, IP_ACTIVE_LOW, IPT_BUTTON2 ) PORT_PLAYER(2) PORT_BIT( 0x40, IP_ACTIVE_LOW, IPT_UNUSED ) PORT_PLAYER(2) // probably unused PORT_BIT( 0x80, IP_ACTIVE_LOW, IPT_UNUSED ) PORT_PLAYER(2) // probably unused INPUT_PORTS_END /*** Sound Hardware todo: verify / correct things seems a bit strange are all the addresses really sound related ? */ static WRITE8_HANDLER( angelkds_main_sound_w ) { angelkds_state *state = space->machine().driver_data<angelkds_state>(); state->m_sound[offset] = data; } static READ8_HANDLER( angelkds_main_sound_r ) { angelkds_state *state = space->machine().driver_data<angelkds_state>(); return state->m_sound2[offset]; } static WRITE8_HANDLER( angelkds_sub_sound_w ) { angelkds_state *state = space->machine().driver_data<angelkds_state>(); state->m_sound2[offset] = data; } static READ8_HANDLER( angelkds_sub_sound_r ) { angelkds_state *state = space->machine().driver_data<angelkds_state>(); return state->m_sound[offset]; } static void irqhandler( device_t *device, int irq ) { angelkds_state *state = device->machine().driver_data<angelkds_state>(); device_set_input_line(state->m_subcpu, 0, irq ? ASSERT_LINE : CLEAR_LINE); } static const ym2203_interface ym2203_config = { { AY8910_LEGACY_OUTPUT, AY8910_DEFAULT_LOADS, DEVCB_NULL, DEVCB_NULL, DEVCB_NULL, DEVCB_NULL }, irqhandler }; /*** Graphics Decoding all the 8x8 tiles are in one format, the 16x16 sprites in another */ static const gfx_layout angelkds_charlayout = { 8,8, RGN_FRAC(1,1), 4, { 0,1,2,3 }, { 0, 4, 8, 12, 16, 20, 24, 28 }, { 0*32, 1*32, 2*32, 3*32, 4*32, 5*32, 6*32, 7*32 }, 8*32 }; static const gfx_layout angelkds_spritelayout = { 16,16, RGN_FRAC(1,2), 4, { 0,4, RGN_FRAC(1,2)+0, RGN_FRAC(1,2)+4 }, { 0, 1, 2, 3, 8, 9, 10, 11, 16,17,18,19, 24,25,26,27 }, { 0*32, 1*32, 2*32, 3*32, 4*32, 5*32, 6*32, 7*32, 8*32, 9*32, 10*32, 11*32, 12*32, 13*32, 14*32, 15*32 }, 16*32 }; static GFXDECODE_START( angelkds ) GFXDECODE_ENTRY( "gfx1", 0, angelkds_charlayout, 0x30, 1 ) GFXDECODE_ENTRY( "gfx3", 0, angelkds_charlayout, 0, 16 ) GFXDECODE_ENTRY( "gfx4", 0, angelkds_charlayout, 0, 16 ) GFXDECODE_ENTRY( "gfx2", 0, angelkds_spritelayout, 0x20, 0x0d ) GFXDECODE_END /*** Machine Driver 2 x z80 (one for game, one for sound) 2 x YM2203 (for sound) all fairly straightforward */ static MACHINE_START( angelkds ) { angelkds_state *state = machine.driver_data<angelkds_state>(); state->m_subcpu = machine.device("sub"); state->save_item(NAME(state->m_layer_ctrl)); state->save_item(NAME(state->m_txbank)); state->save_item(NAME(state->m_bgbotbank)); state->save_item(NAME(state->m_bgtopbank)); state->save_item(NAME(state->m_sound)); state->save_item(NAME(state->m_sound2)); } static MACHINE_RESET( angelkds ) { angelkds_state *state = machine.driver_data<angelkds_state>(); int i; for (i = 0; i < 4; i++) { state->m_sound[i] = 0; state->m_sound2[i] = 0; } state->m_layer_ctrl = 0; state->m_txbank = 0; state->m_bgbotbank = 0; state->m_bgtopbank = 0; } static MACHINE_CONFIG_START( angelkds, angelkds_state ) MCFG_CPU_ADD("maincpu", Z80, 8000000) /* 8MHz? 6 seems too slow? */ MCFG_CPU_PROGRAM_MAP(main_map) MCFG_CPU_IO_MAP(main_portmap) MCFG_CPU_VBLANK_INT("screen", irq0_line_hold) MCFG_CPU_ADD("sub", Z80, 4000000) /* 8 MHz? */ MCFG_CPU_PROGRAM_MAP(sub_map) MCFG_CPU_IO_MAP(sub_portmap) MCFG_MACHINE_START(angelkds) MCFG_MACHINE_RESET(angelkds) MCFG_QUANTUM_TIME(attotime::from_hz(6000)) /* video hardware */ MCFG_SCREEN_ADD("screen", RASTER) MCFG_SCREEN_REFRESH_RATE(60) MCFG_SCREEN_VBLANK_TIME(ATTOSECONDS_IN_USEC(0)) MCFG_SCREEN_FORMAT(BITMAP_FORMAT_INDEXED16) MCFG_SCREEN_SIZE(32*8, 32*8) MCFG_SCREEN_VISIBLE_AREA(0*8, 32*8-1, 1*8, 31*8-1) MCFG_SCREEN_UPDATE(angelkds) MCFG_GFXDECODE(angelkds) MCFG_PALETTE_LENGTH(0x100) MCFG_VIDEO_START(angelkds) MCFG_SPEAKER_STANDARD_MONO("mono") MCFG_SOUND_ADD("ym1", YM2203, 4000000) MCFG_SOUND_CONFIG(ym2203_config) MCFG_SOUND_ROUTE(0, "mono", 0.65) MCFG_SOUND_ROUTE(1, "mono", 0.65) MCFG_SOUND_ROUTE(2, "mono", 0.65) MCFG_SOUND_ROUTE(3, "mono", 0.45) MCFG_SOUND_ADD("ym2", YM2203, 4000000) MCFG_SOUND_ROUTE(0, "mono", 0.65) MCFG_SOUND_ROUTE(1, "mono", 0.65) MCFG_SOUND_ROUTE(2, "mono", 0.65) MCFG_SOUND_ROUTE(3, "mono", 0.45) MACHINE_CONFIG_END /*** Rom Loading "maincpu" for the main code "user1" for the banked data "sub" for the sound cpu code "gfx1" for the 8x8 Txt Layer Tiles "gfx2" for the 16x16 Sprites "gfx3" for the 8x8 Bg Layer Tiles (top tilemap) "gfx4" for the 8x8 Bg Layer Tiles (bottom tilemap) "proms" for the Prom (same between games) */ ROM_START( angelkds ) /* Nasco X090-PC-A (Sega 837-6600) */ ROM_REGION( 0x10000, "maincpu", 0 ) ROM_LOAD( "11428.c10", 0x00000, 0x08000, CRC(90daacd2) SHA1(7e50ad1cbed0c1e6bad04ef1611cad25538c905f) ) ROM_REGION( 0x20000, "user1", 0 ) /* Banked Code */ ROM_LOAD( "11424.c1", 0x00000, 0x08000, CRC(b55997f6) SHA1(7ed746becac1851f39591f1fdbeff64aa97d6206) ) ROM_LOAD( "11425.c2", 0x08000, 0x08000, CRC(299359de) SHA1(f531dd3bfe6f64e9e043cb4f85d5657455241dc7) ) ROM_LOAD( "11426.c3", 0x10000, 0x08000, CRC(5fad8bd3) SHA1(4d865342eb10dcfb779eee4ac1e159bb9ec140cb) ) ROM_LOAD( "11427.c4", 0x18000, 0x08000, CRC(ef920c74) SHA1(81c0fbe4ace5441e4cd99ba423e0190cc541da31) ) ROM_REGION( 0x10000, "sub", 0 ) ROM_LOAD( "11429.d4", 0x00000, 0x08000, CRC(0ca50a66) SHA1(cccb081b447419138b1ebd309e7f291e392a44d5) ) /* Nasco X090-PC-B */ ROM_REGION( 0x08000, "gfx1", 0 ) ROM_LOAD( "11446", 0x00000, 0x08000, CRC(45052470) SHA1(c2312a9f814d6dbe42aa465147a04a2bd9b2aa1b) ) ROM_REGION( 0x10000, "gfx2", 0 ) ROM_LOAD( "11447.f7", 0x08000, 0x08000, CRC(b3afc5b3) SHA1(376d527f60e9044f18d19a5535bca77606efbd4c) ) ROM_LOAD( "11448.h7", 0x00000, 0x08000, CRC(05dab626) SHA1(73feaca6e23c673a7d8c9e972714b20bd8f2d51e) ) /* both tilemaps on angelkds use the same gfx */ ROM_REGION( 0x40000, "gfx3", 0 ) ROM_LOAD( "11437", 0x00000, 0x08000, CRC(a520b628) SHA1(2b51f59e760e740e5e6b06dad61bbc23fc84a72b) ) ROM_LOAD( "11436", 0x08000, 0x08000, CRC(469ab216) SHA1(8223f072a6f9135ff84841c95410368bcea073d8) ) ROM_LOAD( "11435", 0x10000, 0x08000, CRC(b0f8c245) SHA1(882e27eaceac46c397fdae8427a082caa7d6b7dc) ) ROM_LOAD( "11434", 0x18000, 0x08000, CRC(cbde81f5) SHA1(5d5b8e709c9dd09a45dfced6f3d4a9c52500da6b) ) ROM_LOAD( "11433", 0x20000, 0x08000, CRC(b63fa414) SHA1(25adcafd7e17ab0be0fed2ec44245124febd74b3) ) ROM_LOAD( "11432", 0x28000, 0x08000, CRC(00dc747b) SHA1(041b73aa48b45162af33b5f416ccc0c0dbbd995b) ) ROM_LOAD( "11431", 0x30000, 0x08000, CRC(ac2025af) SHA1(2aba145df3ccdb1a7f0fec524bd2de3f9aab4161) ) ROM_LOAD( "11430", 0x38000, 0x08000, CRC(d640f89e) SHA1(38fb67bcb2a3d1ad614fc62e42f22a66bc757137) ) ROM_REGION( 0x40000, "gfx4", 0 ) ROM_LOAD( "11445", 0x00000, 0x08000, CRC(a520b628) SHA1(2b51f59e760e740e5e6b06dad61bbc23fc84a72b) ) ROM_LOAD( "11444", 0x08000, 0x08000, CRC(469ab216) SHA1(8223f072a6f9135ff84841c95410368bcea073d8) ) ROM_LOAD( "11443", 0x10000, 0x08000, CRC(b0f8c245) SHA1(882e27eaceac46c397fdae8427a082caa7d6b7dc) ) ROM_LOAD( "11442", 0x18000, 0x08000, CRC(cbde81f5) SHA1(5d5b8e709c9dd09a45dfced6f3d4a9c52500da6b) ) ROM_LOAD( "11441", 0x20000, 0x08000, CRC(b63fa414) SHA1(25adcafd7e17ab0be0fed2ec44245124febd74b3) ) ROM_LOAD( "11440", 0x28000, 0x08000, CRC(00dc747b) SHA1(041b73aa48b45162af33b5f416ccc0c0dbbd995b) ) ROM_LOAD( "11439", 0x30000, 0x08000, CRC(ac2025af) SHA1(2aba145df3ccdb1a7f0fec524bd2de3f9aab4161) ) ROM_LOAD( "11438", 0x38000, 0x08000, CRC(d640f89e) SHA1(38fb67bcb2a3d1ad614fc62e42f22a66bc757137) ) ROM_REGION( 0x20, "proms", 0 ) ROM_LOAD( "63s081n.u5", 0x00, 0x20, CRC(36b98627) SHA1(d2d54d92d1d47e7cc85104989ee421ce5d80a42a) ) ROM_END ROM_START( spcpostn ) /* X090-PC-A 171-5383 */ ROM_REGION( 2*0x10000, "maincpu", 0 ) /* D317-0005 (NEC Z80 Custom) */ ROM_LOAD( "epr10125.c10", 0x00000, 0x08000, CRC(bffd38c6) SHA1(af02907124343ddecd21439d25f1ebb81ef9f51a) ) /* encrypted */ ROM_REGION( 0x28000, "user1", 0 ) /* Banked Code */ ROM_LOAD( "epr10120.c1", 0x00000, 0x08000, CRC(d6399f99) SHA1(4c7d19a8798e5a10b688bf793ca74f5170fd9b51) ) ROM_LOAD( "epr10121.c2", 0x08000, 0x08000, CRC(d4861560) SHA1(74d28c36a08880abbd3c398cc3e990e8986caccb) ) ROM_LOAD( "epr10122.c3", 0x10000, 0x08000, CRC(7a1bff1b) SHA1(e1bda8430fd632c1813dd78e0f210a358e1b0d2f) ) ROM_LOAD( "epr10123.c4", 0x18000, 0x08000, CRC(6aed2925) SHA1(75848c8086c460b72494da2367f592d7d5dcf9f1) ) ROM_LOAD( "epr10124.c5", 0x20000, 0x08000, CRC(a1d7ae6b) SHA1(ec81fecf63e0515cae2077e2623262227adfdf37) ) ROM_REGION( 0x10000, "sub", 0 ) /* NEC D780C-1 */ ROM_LOAD( "epr10126.d4", 0x00000, 0x08000, CRC(ab17f852) SHA1(dc0db427ddb4df97bb40dfb6fc65cb9354a6b9ad) ) /* X090-PC-B 171-5384 */ ROM_REGION( 0x08000, "gfx1", 0 ) ROM_LOAD( "epr10133.17", 0x00000, 0x08000, CRC(642e6609) SHA1(2dfb4cc66f89543b55ed2a5b914e2c9304e821ca) ) ROM_REGION( 0x10000, "gfx2", 0 ) ROM_LOAD( "epr10134.18", 0x08000, 0x08000, CRC(c674ff88) SHA1(9f240910a1ffb7c9e09d2326de280e6a5dd84565) ) ROM_LOAD( "epr10135.19", 0x00000, 0x08000, CRC(0685c4fa) SHA1(6950d9ad9ec13236cf24e83e87adb62aa53af7bb) ) ROM_REGION( 0x30000, "gfx3", 0 ) ROM_LOAD( "epr10130.14", 0x10000, 0x08000, CRC(b68fcb36) SHA1(3943dd550b13f2911d56d8dad675410da79196e6) ) ROM_LOAD( "epr10131.15", 0x08000, 0x08000, CRC(de223817) SHA1(1860db0a19c926fcfaabe676cb57fff38c4df8e6) ) ROM_LOAD( "epr10132.16", 0x00000, 0x08000, CRC(2df8b1bd) SHA1(cad8befa3f2c158d2aa74073066ccd2b54e68825) ) ROM_REGION( 0x18000, "gfx4", 0 ) ROM_LOAD( "epr10127.06", 0x10000, 0x08000, CRC(b68fcb36) SHA1(3943dd550b13f2911d56d8dad675410da79196e6) ) ROM_LOAD( "epr10128.07", 0x08000, 0x08000, CRC(de223817) SHA1(1860db0a19c926fcfaabe676cb57fff38c4df8e6) ) ROM_LOAD( "epr10129.08", 0x00000, 0x08000, CRC(a6f21023) SHA1(8d573446a2d3d3428409707d0c59b118d1463131) ) ROM_REGION( 0x20, "proms", 0 ) ROM_LOAD( "63s081n.u5", 0x00, 0x20, CRC(36b98627) SHA1(d2d54d92d1d47e7cc85104989ee421ce5d80a42a) ) ROM_END static DRIVER_INIT( angelkds ) { UINT8 *RAM = machine.region("user1")->base(); memory_configure_bank(machine, "bank1", 0, 8, &RAM[0x0000], 0x4000); } static DRIVER_INIT( spcpostn ) { UINT8 *RAM = machine.region("user1")->base(); sega_317_0005_decode(machine, "maincpu"); memory_configure_bank(machine, "bank1", 0, 10, &RAM[0x0000], 0x4000); } GAME( 1988, angelkds, 0, angelkds, angelkds, angelkds, ROT90, "Sega / Nasco?", "Angel Kids (Japan)" , GAME_SUPPORTS_SAVE) /* Nasco not displayed but 'Exa Planning' is */ GAME( 1986, spcpostn, 0, angelkds, spcpostn, spcpostn, ROT90, "Sega / Nasco", "Space Position (Japan)" , GAME_SUPPORTS_SAVE) /* encrypted */
# pylint: disable=g-bad-file-header # Copyright 2015 The Bazel Authors. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Installs an Android application, possibly in an incremental way.""" import collections import hashlib import logging import os import posixpath import re import shutil import subprocess import sys import tempfile import time import zipfile from third_party.py import gflags from third_party.py.concurrent import futures gflags.DEFINE_string("split_main_apk", None, "The main APK for split install") gflags.DEFINE_multistring("split_apk", [], "Split APKs to install") gflags.DEFINE_string("dexmanifest", None, "The .dex manifest") gflags.DEFINE_multistring("native_lib", None, "Native libraries to install") gflags.DEFINE_string("resource_apk", None, "The resource .apk") gflags.DEFINE_string("apk", None, "The app .apk. If not specified, " "do incremental deployment") gflags.DEFINE_string("adb", None, "ADB to use") gflags.DEFINE_string("stub_datafile", None, "The stub data file") gflags.DEFINE_string("output_marker", None, "The output marker file") gflags.DEFINE_multistring("extra_adb_arg", [], "Extra arguments to adb") gflags.DEFINE_string("execroot", ".", "The exec root") gflags.DEFINE_integer("adb_jobs", 2, "The number of instances of adb to use in parallel to " "update files on the device", lower_bound=1) gflags.DEFINE_enum("start", "no", ["no", "cold", "warm", "debug"], "Whether/how to start the app after installing it. 'cold' " "and 'warm' will both cause the app to be started, 'warm' " "will start it with previously saved application state, " "'debug' will wait for the debugger before a clean start.") gflags.DEFINE_boolean("start_app", False, "Deprecated, use 'start'.") gflags.DEFINE_string("user_home_dir", None, "Path to the user's home directory") gflags.DEFINE_string("flagfile", None, "Path to a file to read additional flags from") gflags.DEFINE_string("verbosity", None, "Logging verbosity") FLAGS = gflags.FLAGS DEVICE_DIRECTORY = "/data/local/tmp/incrementaldeployment" # Some devices support ABIs other than those reported by getprop. In this case, # if the most specific ABI is not available in the .apk, we push the more # general ones. COMPATIBLE_ABIS = { "armeabi-v7a": ["armeabi"], "arm64-v8a": ["armeabi-v7a", "armeabi"] } class AdbError(Exception): """An exception class signaling an error in an adb invocation.""" def __init__(self, args, returncode, stdout, stderr): self.args = args self.returncode = returncode self.stdout = stdout self.stderr = stderr details = "\n".join([ "adb command: %s" % args, "return code: %s" % returncode, "stdout: %s" % stdout, "stderr: %s" % stderr, ]) super(AdbError, self).__init__(details) class DeviceNotFoundError(Exception): """Raised when the device could not be found.""" class MultipleDevicesError(Exception): """Raised when > 1 device is attached and no device serial was given.""" @staticmethod def CheckError(s): return re.search("more than one (device and emulator|device|emulator)", s) class DeviceUnauthorizedError(Exception): """Raised when the local machine is not authorized to the device.""" class TimestampException(Exception): """Raised when there is a problem with timestamp reading/writing.""" class OldSdkException(Exception): """Raised when the SDK on the target device is older than the app allows.""" hostpath = os.path targetpath = posixpath class Adb(object): """A class to handle interaction with adb.""" def __init__(self, adb_path, temp_dir, adb_jobs, user_home_dir): self._adb_path = adb_path self._temp_dir = temp_dir self._user_home_dir = user_home_dir self._file_counter = 1 self._executor = futures.ThreadPoolExecutor(max_workers=adb_jobs) def _Exec(self, adb_args): """Executes the given adb command + args.""" args = [self._adb_path] + FLAGS.extra_adb_arg + adb_args # TODO(ahumesky): Because multiple instances of adb are executed in # parallel, these debug logging lines will get interleaved. logging.debug("Executing: %s", " ".join(args)) # adb sometimes requires the user's home directory to access things in # $HOME/.android (e.g. keys to authorize with the device). To avoid any # potential problems with python picking up things in the user's home # directory, HOME is not set in the environment around python and is instead # passed explicitly as a flag. env = {} if self._user_home_dir: env["HOME"] = self._user_home_dir adb = subprocess.Popen( args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env) stdout, stderr = adb.communicate() stdout = stdout.strip() stderr = stderr.strip() logging.debug("adb ret: %s", adb.returncode) logging.debug("adb out: %s", stdout) logging.debug("adb err: %s", stderr) # Check these first so that the more specific error gets raised instead of # the more generic AdbError. if "device not found" in stderr: raise DeviceNotFoundError() elif "device unauthorized" in stderr: raise DeviceUnauthorizedError() elif MultipleDevicesError.CheckError(stderr): # The error messages are from adb's transport.c, but something adds # "error: " to the beginning, so take it off so that we don't end up # printing "Error: error: ..." raise MultipleDevicesError(re.sub("^error: ", "", stderr)) elif "INSTALL_FAILED_OLDER_SDK" in stdout: raise OldSdkException() if adb.returncode != 0: raise AdbError(args, adb.returncode, stdout, stderr) return adb.returncode, stdout, stderr, args def _ExecParallel(self, adb_args): return self._executor.submit(self._Exec, adb_args) def _CreateLocalFile(self): """Returns a path to a temporary local file in the temp directory.""" local = hostpath.join(self._temp_dir, "adbfile_%d" % self._file_counter) self._file_counter += 1 return local def GetInstallTime(self, package): """Get the installation time of a package.""" _, stdout, _, _ = self._Shell("dumpsys package %s" % package) match = re.search("firstInstallTime=(.*)$", stdout, re.MULTILINE) if match: return match.group(1) else: return None def GetAbi(self): """Returns the ABI the device supports.""" _, stdout, _, _ = self._Shell("getprop ro.product.cpu.abi") return stdout def Push(self, local, remote): """Invoke 'adb push' in parallel.""" return self._ExecParallel(["push", local, remote]) def PushString(self, contents, remote): """Push a given string to a given path on the device in parallel.""" local = self._CreateLocalFile() with file(local, "w") as f: f.write(contents) return self.Push(local, remote) def Pull(self, remote): """Invoke 'adb pull'. Args: remote: The path to the remote file to pull. Returns: The contents of a file or None if the file didn't exist. """ local = self._CreateLocalFile() try: self._Exec(["pull", remote, local]) with file(local) as f: return f.read() except (AdbError, IOError): return None def InstallMultiple(self, apk, pkg=None): """Invoke 'adb install-multiple'.""" pkg_args = ["-p", pkg] if pkg else [] ret, stdout, stderr, args = self._Exec( ["install-multiple", "-r"] + pkg_args + [apk]) if "FAILED" in stdout or "FAILED" in stderr: raise AdbError(args, ret, stdout, stderr) def Install(self, apk): """Invoke 'adb install'.""" ret, stdout, stderr, args = self._Exec(["install", "-r", apk]) # adb install could fail with a message on stdout like this: # # pkg: /data/local/tmp/Gmail_dev_sharded_incremental.apk # Failure [INSTALL_PARSE_FAILED_INCONSISTENT_CERTIFICATES] # # and yet it will still have a return code of 0. At least for the install # command, it will print "Success" if it succeeded, so check for that in # standard out instead of relying on the return code. if "FAILED" in stdout or "FAILED" in stderr: raise AdbError(args, ret, stdout, stderr) def Uninstall(self, pkg): """Invoke 'adb uninstall'.""" self._Exec(["uninstall", pkg]) # No error checking. If this fails, we assume that the app was not installed # in the first place. def Delete(self, remote): """Delete the given file (or directory) on the device.""" self.DeleteMultiple([remote]) def DeleteMultiple(self, remote_files): """Delete the given files (or directories) on the device.""" files_str = " ".join(remote_files) if files_str: self._Shell("rm -fr %s" % files_str) def Mkdir(self, d): """Invokes mkdir with the specified directory on the device.""" self._Shell("mkdir -p %s" % d) def StopApp(self, package): """Force stops the app with the given package.""" self._Shell("am force-stop %s" % package) def StopAppAndSaveState(self, package): """Stops the app with the given package, saving state for the next run.""" # 'am kill' will only kill processes in the background, so we must make sure # our process is in the background first. We accomplish this by bringing up # the app switcher. self._Shell("input keyevent KEYCODE_APP_SWITCH") self._Shell("am kill %s" % package) def StartApp(self, package, start_type): """Starts the app with the given package.""" if start_type == "debug": self._Shell("am set-debug-app -w --persistent %s" % package) else: self._Shell("am clear-debug-app %s" % package) self._Shell("monkey -p %s -c android.intent.category.LAUNCHER 1" % package) def _Shell(self, cmd): """Invoke 'adb shell'.""" return self._Exec(["shell", cmd]) ManifestEntry = collections.namedtuple( "ManifestEntry", ["input_file", "zippath", "installpath", "sha256"]) def ParseManifest(contents): """Parses a dexmanifest file. Args: contents: the contents of the manifest file to be parsed. Returns: A dict of install path -> ManifestEntry. """ result = {} for l in contents.split("\n"): entry = ManifestEntry(*(l.strip().split(" "))) result[entry.installpath] = entry return result def GetAppPackage(stub_datafile): """Returns the app package specified in a stub data file.""" with file(stub_datafile) as f: return f.readlines()[1].strip() def UploadDexes(adb, execroot, app_dir, temp_dir, dexmanifest, full_install): """Uploads dexes to the device so that the state. Does the minimum amount of work necessary to make the state of the device consistent with what was built. Args: adb: the Adb instance representing the device to install to execroot: the execroot app_dir: the directory things should be installed under on the device temp_dir: a local temporary directory dexmanifest: contents of the dex manifest full_install: whether to do a full install Returns: None. """ # Fetch the manifest on the device dex_dir = targetpath.join(app_dir, "dex") adb.Mkdir(dex_dir) old_manifest = None if not full_install: logging.info("Fetching dex manifest from device...") old_manifest_contents = adb.Pull(targetpath.join(dex_dir, "manifest")) if old_manifest_contents: old_manifest = ParseManifest(old_manifest_contents) else: logging.info("Dex manifest not found on device") if old_manifest is None: # If the manifest is not found, maybe a previous installation attempt # was interrupted. Wipe the slate clean. Do this also in case we do a full # installation. old_manifest = {} adb.Delete(targetpath.join(dex_dir, "*")) new_manifest = ParseManifest(dexmanifest) dexes_to_delete = set(old_manifest) - set(new_manifest) # Figure out which dexes to upload: those that are present in the new manifest # but not in the old one and those whose checksum was changed common_dexes = set(new_manifest).intersection(old_manifest) dexes_to_upload = set(d for d in common_dexes if new_manifest[d].sha256 != old_manifest[d].sha256) dexes_to_upload.update(set(new_manifest) - set(old_manifest)) if not dexes_to_delete and not dexes_to_upload: # If we have nothing to do, don't bother removing and rewriting the manifest logging.info("Application dexes up-to-date") return # Delete the manifest so that we know how to get back to a consistent state # if we are interrupted. adb.Delete(targetpath.join(dex_dir, "manifest")) # Tuple of (local, remote) files to push to the device. files_to_push = [] # Sort dexes to be uploaded by the zip file they are in so that we only need # to open each zip only once. dexzips_in_upload = set(new_manifest[d].input_file for d in dexes_to_upload if new_manifest[d].zippath != "-") for i, dexzip_name in enumerate(dexzips_in_upload): zip_dexes = [ d for d in dexes_to_upload if new_manifest[d].input_file == dexzip_name] dexzip_tempdir = hostpath.join(temp_dir, "dex", str(i)) with zipfile.ZipFile(hostpath.join(execroot, dexzip_name)) as dexzip: for dex in zip_dexes: zippath = new_manifest[dex].zippath dexzip.extract(zippath, dexzip_tempdir) files_to_push.append((hostpath.join(dexzip_tempdir, zippath), targetpath.join(dex_dir, dex))) # Now gather all the dexes that are not within a .zip file. dexes_to_upload = set( d for d in dexes_to_upload if new_manifest[d].zippath == "-") for dex in dexes_to_upload: files_to_push.append((new_manifest[dex].input_file, targetpath.join( dex_dir, dex))) num_files = len(dexes_to_delete) + len(files_to_push) logging.info("Updating %d dex%s...", num_files, "es" if num_files > 1 else "") # Delete the dexes that are not in the new manifest adb.DeleteMultiple(targetpath.join(dex_dir, dex) for dex in dexes_to_delete) # Upload all the files. upload_walltime_start = time.time() fs = [adb.Push(local, remote) for local, remote in files_to_push] done, not_done = futures.wait(fs, return_when=futures.FIRST_EXCEPTION) upload_walltime = time.time() - upload_walltime_start logging.debug("Dex upload walltime: %s seconds", upload_walltime) # If there is anything in not_done, then some adb call failed and we # can cancel the rest. if not_done: for f in not_done: f.cancel() # If any adb call resulted in an exception, re-raise it. for f in done: f.result() # If no dex upload failed, upload the manifest. If any upload failed, the # exception should have been re-raised above. # Call result() to raise the exception if there was one. adb.PushString(dexmanifest, targetpath.join(dex_dir, "manifest")).result() def Checksum(filename): """Compute the SHA-256 checksum of a file.""" h = hashlib.sha256() with file(filename, "r") as f: while True: data = f.read(65536) if not data: break h.update(data) return h.hexdigest() def UploadResources(adb, resource_apk, app_dir): """Uploads resources to the device. Args: adb: The Adb instance representing the device to install to. resource_apk: Path to the resource apk. app_dir: The directory things should be installed under on the device. Returns: None. """ # Compute the checksum of the new resources file new_checksum = Checksum(resource_apk) # Fetch the checksum of the resources file on the device, if it exists device_checksum_file = targetpath.join(app_dir, "resources_checksum") old_checksum = adb.Pull(device_checksum_file) if old_checksum == new_checksum: logging.info("Application resources up-to-date") return logging.info("Updating application resources...") # Remove the checksum file on the device so that if the transfer is # interrupted, we know how to get the device back to a consistent state. adb.Delete(device_checksum_file) adb.Push(resource_apk, targetpath.join(app_dir, "resources.ap_")).result() # Write the new checksum to the device. adb.PushString(new_checksum, device_checksum_file).result() def ConvertNativeLibs(args): """Converts the --native_libs command line argument to an arch -> libs map.""" native_libs = {} if args is not None: for native_lib in args: abi, path = native_lib.split(":") if abi not in native_libs: native_libs[abi] = set() native_libs[abi].add(path) return native_libs def FindAbi(device_abi, app_abis): """Selects which ABI native libs should be installed for.""" if device_abi in app_abis: return device_abi if device_abi in COMPATIBLE_ABIS: for abi in COMPATIBLE_ABIS[device_abi]: if abi in app_abis: logging.warn("App does not have native libs for ABI '%s'. Using ABI " "'%s'.", device_abi, abi) return abi logging.warn("No native libs for device ABI '%s'. App has native libs for " "ABIs: %s", device_abi, ", ".join(app_abis)) return None def UploadNativeLibs(adb, native_lib_args, app_dir, full_install): """Uploads native libraries to the device.""" native_libs = ConvertNativeLibs(native_lib_args) libs = set() if native_libs: abi = FindAbi(adb.GetAbi(), native_libs.keys()) if abi: libs = native_libs[abi] basename_to_path = {} install_checksums = {} for lib in sorted(libs): install_checksums[os.path.basename(lib)] = Checksum(lib) basename_to_path[os.path.basename(lib)] = lib device_manifest = None if not full_install: device_manifest = adb.Pull( targetpath.join(app_dir, "native", "native_manifest")) device_checksums = {} if device_manifest is None: # If we couldn't fetch the device manifest or if this is a non-incremental # install, wipe the slate clean adb.Delete(targetpath.join(app_dir, "native")) else: # Otherwise, parse the manifest. Note that this branch is also taken if the # manifest is empty. for manifest_line in device_manifest.split("\n"): if manifest_line: name, checksum = manifest_line.split(" ") device_checksums[name] = checksum libs_to_delete = set(device_checksums) - set(install_checksums) libs_to_upload = set(install_checksums) - set(device_checksums) common_libs = set(install_checksums).intersection(set(device_checksums)) libs_to_upload.update([l for l in common_libs if install_checksums[l] != device_checksums[l]]) libs_to_push = [(basename_to_path[lib], targetpath.join( app_dir, "native", lib)) for lib in libs_to_upload] if not libs_to_delete and not libs_to_push and device_manifest is not None: logging.info("Native libs up-to-date") return num_files = len(libs_to_delete) + len(libs_to_push) logging.info("Updating %d native lib%s...", num_files, "s" if num_files != 1 else "") adb.Delete(targetpath.join(app_dir, "native", "native_manifest")) if libs_to_delete: adb.DeleteMultiple( [targetpath.join(app_dir, "native", lib) for lib in libs_to_delete]) upload_walltime_start = time.time() fs = [adb.Push(local, remote) for local, remote in libs_to_push] done, not_done = futures.wait(fs, return_when=futures.FIRST_EXCEPTION) upload_walltime = time.time() - upload_walltime_start logging.debug("Native library upload walltime: %s seconds", upload_walltime) # If there is anything in not_done, then some adb call failed and we # can cancel the rest. if not_done: for f in not_done: f.cancel() # If any adb call resulted in an exception, re-raise it. for f in done: f.result() install_manifest = [ name + " " + checksum for name, checksum in install_checksums.iteritems()] adb.PushString("\n".join(install_manifest), targetpath.join(app_dir, "native", "native_manifest")).result() def VerifyInstallTimestamp(adb, app_package): """Verifies that the app is unchanged since the last mobile-install.""" expected_timestamp = adb.Pull( targetpath.join(DEVICE_DIRECTORY, app_package, "install_timestamp")) if not expected_timestamp: raise TimestampException( "Cannot verify last mobile install. At least one non-incremental " "'mobile-install' must precede incremental installs") actual_timestamp = adb.GetInstallTime(app_package) if actual_timestamp is None: raise TimestampException( "Package '%s' is not installed on the device. At least one " "non-incremental 'mobile-install' must precede incremental " "installs." % app_package) if actual_timestamp != expected_timestamp: raise TimestampException("Installed app '%s' has an unexpected timestamp. " "Did you last install the app in a way other than " "'mobile-install'?" % app_package) def SplitIncrementalInstall(adb, app_package, execroot, split_main_apk, split_apks): """Does incremental installation using split packages.""" app_dir = targetpath.join(DEVICE_DIRECTORY, app_package) device_manifest_path = targetpath.join(app_dir, "split_manifest") device_manifest = adb.Pull(device_manifest_path) expected_timestamp = adb.Pull(targetpath.join(app_dir, "install_timestamp")) actual_timestamp = adb.GetInstallTime(app_package) device_checksums = {} if device_manifest is not None: for manifest_line in device_manifest.split("\n"): if manifest_line: name, checksum = manifest_line.split(" ") device_checksums[name] = checksum install_checksums = {} install_checksums["__MAIN__"] = Checksum( hostpath.join(execroot, split_main_apk)) for apk in split_apks: install_checksums[apk] = Checksum(hostpath.join(execroot, apk)) reinstall_main = False if (device_manifest is None or actual_timestamp is None or actual_timestamp != expected_timestamp or install_checksums["__MAIN__"] != device_checksums["__MAIN__"] or set(device_checksums.keys()) != set(install_checksums.keys())): # The main app is not up to date or not present or something happened # with the on-device manifest. Start from scratch. Notably, we cannot # uninstall a split package, so if the set of packages changes, we also # need to do a full reinstall. reinstall_main = True device_checksums = {} apks_to_update = [ apk for apk in split_apks if apk not in device_checksums or device_checksums[apk] != install_checksums[apk]] if not apks_to_update and not reinstall_main: # Nothing to do return # Delete the device manifest so that if something goes wrong, we do a full # reinstall next time adb.Delete(device_manifest_path) if reinstall_main: logging.info("Installing main APK...") adb.Uninstall(app_package) adb.InstallMultiple(targetpath.join(execroot, split_main_apk)) adb.PushString( adb.GetInstallTime(app_package), targetpath.join(app_dir, "install_timestamp")).result() logging.info("Reinstalling %s APKs...", len(apks_to_update)) for apk in apks_to_update: adb.InstallMultiple(targetpath.join(execroot, apk), app_package) install_manifest = [ name + " " + checksum for name, checksum in install_checksums.iteritems()] adb.PushString("\n".join(install_manifest), targetpath.join(app_dir, "split_manifest")).result() def IncrementalInstall(adb_path, execroot, stub_datafile, output_marker, adb_jobs, start_type, dexmanifest=None, apk=None, native_libs=None, resource_apk=None, split_main_apk=None, split_apks=None, user_home_dir=None): """Performs an incremental install. Args: adb_path: Path to the adb executable. execroot: Exec root. stub_datafile: The stub datafile containing the app's package name. output_marker: Path to the output marker file. adb_jobs: The number of instances of adb to use in parallel. start_type: A string describing whether/how to start the app after installing it. Can be 'no', 'cold', or 'warm'. dexmanifest: Path to the .dex manifest file. apk: Path to the .apk file. May be None to perform an incremental install. native_libs: Native libraries to install. resource_apk: Path to the apk containing the app's resources. split_main_apk: the split main .apk if split installation is desired. split_apks: the list of split .apks to be installed. user_home_dir: Path to the user's home directory. """ temp_dir = tempfile.mkdtemp() try: adb = Adb(adb_path, temp_dir, adb_jobs, user_home_dir) app_package = GetAppPackage(hostpath.join(execroot, stub_datafile)) app_dir = targetpath.join(DEVICE_DIRECTORY, app_package) if split_main_apk: SplitIncrementalInstall(adb, app_package, execroot, split_main_apk, split_apks) else: if not apk: VerifyInstallTimestamp(adb, app_package) with file(hostpath.join(execroot, dexmanifest)) as f: dexmanifest = f.read() UploadDexes(adb, execroot, app_dir, temp_dir, dexmanifest, bool(apk)) # TODO(ahumesky): UploadDexes waits for all the dexes to be uploaded, and # then UploadResources is called. We could instead enqueue everything # onto the threadpool so that uploading resources happens sooner. UploadResources(adb, hostpath.join(execroot, resource_apk), app_dir) UploadNativeLibs(adb, native_libs, app_dir, bool(apk)) if apk: apk_path = targetpath.join(execroot, apk) adb.Install(apk_path) future = adb.PushString( adb.GetInstallTime(app_package), targetpath.join(DEVICE_DIRECTORY, app_package, "install_timestamp")) future.result() else: if start_type == "warm": adb.StopAppAndSaveState(app_package) else: adb.StopApp(app_package) if start_type in ["cold", "warm", "debug"]: logging.info("Starting application %s", app_package) adb.StartApp(app_package, start_type) with file(output_marker, "w") as _: pass except DeviceNotFoundError: sys.exit("Error: Device not found") except DeviceUnauthorizedError: sys.exit("Error: Device unauthorized. Please check the confirmation " "dialog on your device.") except MultipleDevicesError as e: sys.exit("Error: " + e.message + "\nTry specifying a device serial with " "\"blaze mobile-install --adb_arg=-s --adb_arg=$ANDROID_SERIAL\"") except OldSdkException as e: sys.exit("Error: The device does not support the API level specified in " "the application's manifest. Check minSdkVersion in " "AndroidManifest.xml") except TimestampException as e: sys.exit("Error:\n%s" % e.message) except AdbError as e: sys.exit("Error:\n%s" % e.message) finally: shutil.rmtree(temp_dir, True) def main(): if FLAGS.verbosity == "1": level = logging.DEBUG fmt = "%(levelname)-5s %(asctime)s %(module)s:%(lineno)3d] %(message)s" else: level = logging.INFO fmt = "%(message)s" logging.basicConfig(stream=sys.stdout, level=level, format=fmt) start_type = FLAGS.start if FLAGS.start_app and start_type == "no": start_type = "cold" IncrementalInstall( adb_path=FLAGS.adb, adb_jobs=FLAGS.adb_jobs, execroot=FLAGS.execroot, stub_datafile=FLAGS.stub_datafile, output_marker=FLAGS.output_marker, start_type=start_type, native_libs=FLAGS.native_lib, split_main_apk=FLAGS.split_main_apk, split_apks=FLAGS.split_apk, dexmanifest=FLAGS.dexmanifest, apk=FLAGS.apk, resource_apk=FLAGS.resource_apk, user_home_dir=FLAGS.user_home_dir) if __name__ == "__main__": FLAGS(sys.argv) # process any additional flags in --flagfile if FLAGS.flagfile: with open(FLAGS.flagfile) as flagsfile: FLAGS.Reset() FLAGS(sys.argv + [line.strip() for line in flagsfile.readlines()]) main()
/** * Load required plugins. */ window.$ = window.jQuery = require('jquery'); window.Popper = require('popper.js/dist/umd/popper'); require('bootstrap'); window.SmoothScroll = require('smoothscroll-for-websites'); require('objectFitPolyfill'); /** * Create window.page and init the application. */ +function($, window){ var page = { name: 'TheSaaS', version: '2.1.4', vendors: [], // Cache popular elements body: $('body'), navbar: $('.navbar'), header: $('.header'), footer: $('.footer'), } page.defaults = { googleApiKey: null, googleAnalyticsKey: null, reCaptchaSiteKey: null, reCaptchaLanguage: null, disableAOSonMobile: true, smoothScroll: false, } /** * Call all the required initializers. */ page.init = function() { // Vendors // page.initVendors(); // Partials // page.initBind(); page.initDrawer(); page.initFont(); page.initForm(); page.initMailer(); page.initModal(); page.initNavbar(); page.initOffcanvas(); page.initPopup(); page.initScroll(); page.initSection(); page.initSidebar(); page.initVideo(); // Anchor for headings // $('[data-provide="anchor"]').each(function() { var heading = $(this); heading.append('<a class="anchor" href="#'+ heading.attr('id') +'"></a>'); }); } /** * Initialize all of the loaded vendors. */ page.initVendors = function() { page.vendors.forEach(function(vendor) { var fn = window.page[ "init"+ vendor ]; if(typeof fn === 'function') { fn(); } }); } /** * Register loaded vendor to be initialized after DOM load. * It's case sensitive, since it calls "initVendorName" method. */ page.registerVendor = function($name) { page.vendors.push($name); } window.page = page; }(jQuery, window); /** * Once the DOM is loaded, start the magic. */ $(function () { //page.init(); });
/** * Copyright (c) INOVUA SOFTWARE TECHNOLOGIES. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ import Menu from '../Menu'; /** * Give an wrapper instance from enzyme.mount * it will return it's submenu */ export default wrapper => { const subMenu = wrapper.find(Menu).reduce((acc, menu) => { if (menu.props().subMenu) { acc = menu; } return acc; }, null); return subMenu; };
// Copyright (c) 2019 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. import React, {PureComponent, createRef} from 'react'; import styled from 'styled-components'; import PropTypes from 'prop-types'; import LayerHoverInfoFactory from './layer-hover-info'; import CoordinateInfoFactory from './coordinate-info'; import {Pin} from 'components/common/icons'; import ErrorBoundary from 'components/common/error-boundary'; const MAX_WIDTH = 500; const MAX_HEIGHT = 600; const StyledMapPopover = styled.div` ${props => props.theme.scrollBar} font-size: 11px; font-weight: 500; background-color: ${props => props.theme.panelBackground}; color: ${props => props.theme.textColor}; z-index: 1001; position: absolute; overflow-x: auto; .gutter { height: 6px; } table { margin: 2px 12px 12px 12px; width: auto; tbody { border-top: transparent; border-bottom: transparent; } td { border-color: transparent; padding: 4px; color: ${props => props.theme.textColor}; } td.row__value { text-align: right; font-weight: 500; color: ${props => props.theme.textColorHl}; } } `; const StyledPin = styled.div` position: absolute; left: 50%; transform: rotate(30deg); top: 10px; color: ${props => props.theme.primaryBtnBgd}; :hover { cursor: pointer; color: ${props => props.theme.linkBtnColor}; } `; MapPopoverFactory.deps = [LayerHoverInfoFactory, CoordinateInfoFactory]; export default function MapPopoverFactory(LayerHoverInfo, CoordinateInfo) { class MapPopover extends PureComponent { static propTypes = { layerHoverProp: PropTypes.object, coordinate: PropTypes.oneOfType([PropTypes.array, PropTypes.bool]), freezed: PropTypes.bool, x: PropTypes.number, y: PropTypes.number, mapW: PropTypes.number.isRequired, mapH: PropTypes.number.isRequired, onClose: PropTypes.func.isRequired }; constructor(props) { super(props); this.state = { width: 380, height: 160 }; } componentDidMount() { this._setContainerSize(); } componentDidUpdate() { this._setContainerSize(); } popover = createRef(); _setContainerSize() { const node = this.popover.current; if (!node) { return; } const width = Math.min(Math.round(node.scrollWidth), MAX_WIDTH); const height = Math.min(Math.round(node.scrollHeight), MAX_HEIGHT); if (width !== this.state.width || height !== this.state.height) { this.setState({width, height}); } } _getPosition(x, y) { const topOffset = 20; const leftOffset = 20; const {mapW, mapH} = this.props; const {width, height} = this.state; const pos = {}; if (x + leftOffset + width > mapW) { pos.right = mapW - x + leftOffset; } else { pos.left = x + leftOffset; } if (y + topOffset + height > mapH) { pos.bottom = 10; } else { pos.top = y + topOffset; } return pos; } render() { const {x, y, freezed, coordinate, layerHoverProp} = this.props; const style = Number.isFinite(x) && Number.isFinite(y) ? this._getPosition(x, y) : {}; return ( <ErrorBoundary> <StyledMapPopover ref={this.popover} className="map-popover" style={{ ...style, maxWidth: MAX_WIDTH }} > {freezed ? ( <div className="map-popover__top"> <div className="gutter" /> <StyledPin className="popover-pin" onClick={this.props.onClose}> <Pin height="16px" /> </StyledPin> </div> ) : null} {Array.isArray(coordinate) && ( <CoordinateInfo coordinate={coordinate} /> )} {layerHoverProp && <LayerHoverInfo {...layerHoverProp} />} </StyledMapPopover> </ErrorBoundary> ); } } return MapPopover; }
import $ from 'jquery'; class MobileMenu { constructor() { this.siteHeader = $('.site-header'); this.menuIcon = $('.site-header__menu-icon'); this.menuContent = $('.site-header__menu-content'); this.events(); } events() { this.menuIcon.click(this.toggleMenu.bind(this)); } toggleMenu() { this.menuContent.toggleClass("site-header__menu-content--is-visible"); this.siteHeader.toggleClass("site-header--is-expanded"); this.menuIcon.toggleClass("site-header__menu-icon--close-x"); } } export default MobileMenu;
// Copyright (c) 2009-2010 Satoshi Nakamoto // Copyright (c) 2009-2012 The Bitcoin developers // Distributed under the MIT/X11 software license, see the accompanying // file COPYING or http://www.opensource.org/licenses/mit-license.php. #ifndef BITCOIN_MAIN_H #define BITCOIN_MAIN_H #include "bignum.h" #include "sync.h" #include "net.h" #include "script.h" #include "scrypt.h" #include "zerocoin/Zerocoin.h" #include <list> class CWallet; class CBlock; class CBlockIndex; class CKeyItem; class CReserveKey; class COutPoint; class CAddress; class CInv; class CRequestTracker; class CNode; class CTxMemPool; static const int LAST_POW_BLOCK = 1000; /** The maximum allowed size for a serialized block, in bytes (network rule) */ static const unsigned int MAX_BLOCK_SIZE = 1000000; /** The maximum size for mined blocks */ static const unsigned int MAX_BLOCK_SIZE_GEN = MAX_BLOCK_SIZE/2; /** The maximum size for transactions we're willing to relay/mine **/ static const unsigned int MAX_STANDARD_TX_SIZE = MAX_BLOCK_SIZE_GEN/5; /** The maximum allowed number of signature check operations in a block (network rule) */ static const unsigned int MAX_BLOCK_SIGOPS = MAX_BLOCK_SIZE/50; /** The maximum number of orphan transactions kept in memory */ static const unsigned int MAX_ORPHAN_TRANSACTIONS = MAX_BLOCK_SIZE/100; /** The maximum number of entries in an 'inv' protocol message */ static const unsigned int MAX_INV_SZ = 50000; /** Fees smaller than this (in satoshi) are considered zero fee (for transaction creation) */ static const int64_t MIN_TX_FEE = 10000; /** Fees smaller than this (in satoshi) are considered zero fee (for relaying) */ static const int64_t MIN_RELAY_TX_FEE = MIN_TX_FEE; /** No amount larger than this (in satoshi) is valid */ static const int64_t MAX_MONEY = 7000000000 * COIN; inline bool MoneyRange(int64_t nValue) { return (nValue >= 0 && nValue <= MAX_MONEY); } /** Threshold for nLockTime: below this value it is interpreted as block number, otherwise as UNIX timestamp. */ static const unsigned int LOCKTIME_THRESHOLD = 500000000; // Tue Nov 5 00:53:20 1985 UTC static const int64_t COIN_YEAR_REWARD = 5 * CENT; static const uint256 hashGenesisBlock("0x00000e7dfb02571e4460d7075ab7576c9e5ff4a165fee8ebb1a367a6d2272e41"); static const uint256 hashGenesisBlockTestNet("0x00000e7dfb02571e4460d7075ab7576c9e5ff4a165fee8ebb1a367a6d2272e41"); inline int64_t PastDrift(int64_t nTime) { return nTime - 10 * 60; } // up to 10 minutes from the past inline int64_t FutureDrift(int64_t nTime) { return nTime + 10 * 60; } // up to 10 minutes from the future extern libzerocoin::Params* ZCParams; extern CScript COINBASE_FLAGS; extern CCriticalSection cs_main; extern std::map<uint256, CBlockIndex*> mapBlockIndex; extern std::set<std::pair<COutPoint, unsigned int> > setStakeSeen; extern CBlockIndex* pindexGenesisBlock; extern unsigned int nTargetSpacing; extern unsigned int nStakeMinAge; extern unsigned int nStakeMaxAge; extern unsigned int nNodeLifespan; extern int nCoinbaseMaturity; extern int nBestHeight; extern uint256 nBestChainTrust; extern uint256 nBestInvalidTrust; extern uint256 hashBestChain; extern CBlockIndex* pindexBest; extern unsigned int nTransactionsUpdated; extern uint64_t nLastBlockTx; extern uint64_t nLastBlockSize; extern int64_t nLastCoinStakeSearchInterval; extern const std::string strMessageMagic; extern int64_t nTimeBestReceived; extern CCriticalSection cs_setpwalletRegistered; extern std::set<CWallet*> setpwalletRegistered; extern unsigned char pchMessageStart[4]; extern std::map<uint256, CBlock*> mapOrphanBlocks; // Settings extern int64_t nTransactionFee; extern int64_t nReserveBalance; extern int64_t nMinimumInputValue; extern bool fUseFastIndex; extern unsigned int nDerivationMethodIndex; extern bool fEnforceCanonical; // Minimum disk space required - used in CheckDiskSpace() static const uint64_t nMinDiskSpace = 52428800; class CReserveKey; class CTxDB; class CTxIndex; void RegisterWallet(CWallet* pwalletIn); void UnregisterWallet(CWallet* pwalletIn); void SyncWithWallets(const CTransaction& tx, const CBlock* pblock = NULL, bool fUpdate = false, bool fConnect = true); bool ProcessBlock(CNode* pfrom, CBlock* pblock); bool CheckDiskSpace(uint64_t nAdditionalBytes=0); FILE* OpenBlockFile(unsigned int nFile, unsigned int nBlockPos, const char* pszMode="rb"); FILE* AppendBlockFile(unsigned int& nFileRet); bool LoadBlockIndex(bool fAllowNew=true); void PrintBlockTree(); CBlockIndex* FindBlockByHeight(int nHeight); bool ProcessMessages(CNode* pfrom); bool SendMessages(CNode* pto, bool fSendTrickle); bool LoadExternalBlockFile(FILE* fileIn); bool CheckProofOfWork(uint256 hash, unsigned int nBits); unsigned int GetNextTargetRequired(const CBlockIndex* pindexLast, bool fProofOfStake); int64_t GetProofOfWorkReward(int64_t nFees); int64_t GetProofOfStakeReward(int64_t nCoinAge, int64_t nFees); unsigned int ComputeMinWork(unsigned int nBase, int64_t nTime); unsigned int ComputeMinStake(unsigned int nBase, int64_t nTime, unsigned int nBlockTime); int GetNumBlocksOfPeers(); bool IsInitialBlockDownload(); std::string GetWarnings(std::string strFor); bool GetTransaction(const uint256 &hash, CTransaction &tx, uint256 &hashBlock); uint256 WantedByOrphan(const CBlock* pblockOrphan); const CBlockIndex* GetLastBlockIndex(const CBlockIndex* pindex, bool fProofOfStake); void StakeMiner(CWallet *pwallet); void ResendWalletTransactions(bool fForce = false); /** (try to) add transaction to memory pool **/ bool AcceptToMemoryPool(CTxMemPool& pool, CTransaction &tx, bool* pfMissingInputs); bool GetWalletFile(CWallet* pwallet, std::string &strWalletFileOut); /** Position on disk for a particular transaction. */ class CDiskTxPos { public: unsigned int nFile; unsigned int nBlockPos; unsigned int nTxPos; CDiskTxPos() { SetNull(); } CDiskTxPos(unsigned int nFileIn, unsigned int nBlockPosIn, unsigned int nTxPosIn) { nFile = nFileIn; nBlockPos = nBlockPosIn; nTxPos = nTxPosIn; } IMPLEMENT_SERIALIZE( READWRITE(FLATDATA(*this)); ) void SetNull() { nFile = (unsigned int) -1; nBlockPos = 0; nTxPos = 0; } bool IsNull() const { return (nFile == (unsigned int) -1); } friend bool operator==(const CDiskTxPos& a, const CDiskTxPos& b) { return (a.nFile == b.nFile && a.nBlockPos == b.nBlockPos && a.nTxPos == b.nTxPos); } friend bool operator!=(const CDiskTxPos& a, const CDiskTxPos& b) { return !(a == b); } std::string ToString() const { if (IsNull()) return "null"; else return strprintf("(nFile=%u, nBlockPos=%u, nTxPos=%u)", nFile, nBlockPos, nTxPos); } void print() const { printf("%s", ToString().c_str()); } }; /** An inpoint - a combination of a transaction and an index n into its vin */ class CInPoint { public: CTransaction* ptx; unsigned int n; CInPoint() { SetNull(); } CInPoint(CTransaction* ptxIn, unsigned int nIn) { ptx = ptxIn; n = nIn; } void SetNull() { ptx = NULL; n = (unsigned int) -1; } bool IsNull() const { return (ptx == NULL && n == (unsigned int) -1); } }; /** An outpoint - a combination of a transaction hash and an index n into its vout */ class COutPoint { public: uint256 hash; unsigned int n; COutPoint() { SetNull(); } COutPoint(uint256 hashIn, unsigned int nIn) { hash = hashIn; n = nIn; } IMPLEMENT_SERIALIZE( READWRITE(FLATDATA(*this)); ) void SetNull() { hash = 0; n = (unsigned int) -1; } bool IsNull() const { return (hash == 0 && n == (unsigned int) -1); } friend bool operator<(const COutPoint& a, const COutPoint& b) { return (a.hash < b.hash || (a.hash == b.hash && a.n < b.n)); } friend bool operator==(const COutPoint& a, const COutPoint& b) { return (a.hash == b.hash && a.n == b.n); } friend bool operator!=(const COutPoint& a, const COutPoint& b) { return !(a == b); } std::string ToString() const { return strprintf("COutPoint(%s, %u)", hash.ToString().substr(0,10).c_str(), n); } void print() const { printf("%s\n", ToString().c_str()); } }; /** An input of a transaction. It contains the location of the previous * transaction's output that it claims and a signature that matches the * output's public key. */ class CTxIn { public: COutPoint prevout; CScript scriptSig; unsigned int nSequence; CTxIn() { nSequence = std::numeric_limits<unsigned int>::max(); } explicit CTxIn(COutPoint prevoutIn, CScript scriptSigIn=CScript(), unsigned int nSequenceIn=std::numeric_limits<unsigned int>::max()) { prevout = prevoutIn; scriptSig = scriptSigIn; nSequence = nSequenceIn; } CTxIn(uint256 hashPrevTx, unsigned int nOut, CScript scriptSigIn=CScript(), unsigned int nSequenceIn=std::numeric_limits<unsigned int>::max()) { prevout = COutPoint(hashPrevTx, nOut); scriptSig = scriptSigIn; nSequence = nSequenceIn; } IMPLEMENT_SERIALIZE ( READWRITE(prevout); READWRITE(scriptSig); READWRITE(nSequence); ) bool IsFinal() const { return (nSequence == std::numeric_limits<unsigned int>::max()); } friend bool operator==(const CTxIn& a, const CTxIn& b) { return (a.prevout == b.prevout && a.scriptSig == b.scriptSig && a.nSequence == b.nSequence); } friend bool operator!=(const CTxIn& a, const CTxIn& b) { return !(a == b); } std::string ToStringShort() const { return strprintf(" %s %d", prevout.hash.ToString().c_str(), prevout.n); } std::string ToString() const { std::string str; str += "CTxIn("; str += prevout.ToString(); if (prevout.IsNull()) str += strprintf(", coinbase %s", HexStr(scriptSig).c_str()); else str += strprintf(", scriptSig=%s", scriptSig.ToString().substr(0,24).c_str()); if (nSequence != std::numeric_limits<unsigned int>::max()) str += strprintf(", nSequence=%u", nSequence); str += ")"; return str; } void print() const { printf("%s\n", ToString().c_str()); } }; /** An output of a transaction. It contains the public key that the next input * must be able to sign with to claim it. */ class CTxOut { public: int64_t nValue; CScript scriptPubKey; CTxOut() { SetNull(); } CTxOut(int64_t nValueIn, CScript scriptPubKeyIn) { nValue = nValueIn; scriptPubKey = scriptPubKeyIn; } IMPLEMENT_SERIALIZE ( READWRITE(nValue); READWRITE(scriptPubKey); ) void SetNull() { nValue = -1; scriptPubKey.clear(); } bool IsNull() { return (nValue == -1); } void SetEmpty() { nValue = 0; scriptPubKey.clear(); } bool IsEmpty() const { return (nValue == 0 && scriptPubKey.empty()); } uint256 GetHash() const { return SerializeHash(*this); } friend bool operator==(const CTxOut& a, const CTxOut& b) { return (a.nValue == b.nValue && a.scriptPubKey == b.scriptPubKey); } friend bool operator!=(const CTxOut& a, const CTxOut& b) { return !(a == b); } std::string ToStringShort() const { return strprintf(" out %s %s", FormatMoney(nValue).c_str(), scriptPubKey.ToString(true).c_str()); } std::string ToString() const { if (IsEmpty()) return "CTxOut(empty)"; return strprintf("CTxOut(nValue=%s, scriptPubKey=%s)", FormatMoney(nValue).c_str(), scriptPubKey.ToString().c_str()); } void print() const { printf("%s\n", ToString().c_str()); } }; enum GetMinFee_mode { GMF_BLOCK, GMF_RELAY, GMF_SEND, }; typedef std::map<uint256, std::pair<CTxIndex, CTransaction> > MapPrevTx; /** The basic transaction that is broadcasted on the network and contained in * blocks. A transaction can contain multiple inputs and outputs. */ class CTransaction { public: static const int CURRENT_VERSION=1; int nVersion; unsigned int nTime; std::vector<CTxIn> vin; std::vector<CTxOut> vout; unsigned int nLockTime; // Denial-of-service detection: mutable int nDoS; bool DoS(int nDoSIn, bool fIn) const { nDoS += nDoSIn; return fIn; } CTransaction() { SetNull(); } IMPLEMENT_SERIALIZE ( READWRITE(this->nVersion); nVersion = this->nVersion; READWRITE(nTime); READWRITE(vin); READWRITE(vout); READWRITE(nLockTime); ) void SetNull() { nVersion = CTransaction::CURRENT_VERSION; nTime = GetAdjustedTime(); vin.clear(); vout.clear(); nLockTime = 0; nDoS = 0; // Denial-of-service prevention } bool IsNull() const { return (vin.empty() && vout.empty()); } uint256 GetHash() const { return SerializeHash(*this); } bool IsNewerThan(const CTransaction& old) const { if (vin.size() != old.vin.size()) return false; for (unsigned int i = 0; i < vin.size(); i++) if (vin[i].prevout != old.vin[i].prevout) return false; bool fNewer = false; unsigned int nLowest = std::numeric_limits<unsigned int>::max(); for (unsigned int i = 0; i < vin.size(); i++) { if (vin[i].nSequence != old.vin[i].nSequence) { if (vin[i].nSequence <= nLowest) { fNewer = false; nLowest = vin[i].nSequence; } if (old.vin[i].nSequence < nLowest) { fNewer = true; nLowest = old.vin[i].nSequence; } } } return fNewer; } bool IsCoinBase() const { return (vin.size() == 1 && vin[0].prevout.IsNull() && vout.size() >= 1); } bool IsCoinStake() const { // ppcoin: the coin stake transaction is marked with the first output empty return (vin.size() > 0 && (!vin[0].prevout.IsNull()) && vout.size() >= 2 && vout[0].IsEmpty()); } /** Check for standard transaction types @param[in] mapInputs Map of previous transactions that have outputs we're spending @return True if all inputs (scriptSigs) use only standard transaction forms @see CTransaction::FetchInputs */ bool AreInputsStandard(const MapPrevTx& mapInputs) const; /** Count ECDSA signature operations the old-fashioned (pre-0.6) way @return number of sigops this transaction's outputs will produce when spent @see CTransaction::FetchInputs */ unsigned int GetLegacySigOpCount() const; /** Count ECDSA signature operations in pay-to-script-hash inputs. @param[in] mapInputs Map of previous transactions that have outputs we're spending @return maximum number of sigops required to validate this transaction's inputs @see CTransaction::FetchInputs */ unsigned int GetP2SHSigOpCount(const MapPrevTx& mapInputs) const; /** Amount of bitcoins spent by this transaction. @return sum of all outputs (note: does not include fees) */ int64_t GetValueOut() const { int64_t nValueOut = 0; BOOST_FOREACH(const CTxOut& txout, vout) { nValueOut += txout.nValue; if (!MoneyRange(txout.nValue) || !MoneyRange(nValueOut)) throw std::runtime_error("CTransaction::GetValueOut() : value out of range"); } return nValueOut; } /** Amount of bitcoins coming in to this transaction Note that lightweight clients may not know anything besides the hash of previous transactions, so may not be able to calculate this. @param[in] mapInputs Map of previous transactions that have outputs we're spending @return Sum of value of all inputs (scriptSigs) @see CTransaction::FetchInputs */ int64_t GetValueIn(const MapPrevTx& mapInputs) const; int64_t GetMinFee(unsigned int nBlockSize=1, enum GetMinFee_mode mode=GMF_BLOCK, unsigned int nBytes = 0) const; bool ReadFromDisk(CDiskTxPos pos, FILE** pfileRet=NULL) { CAutoFile filein = CAutoFile(OpenBlockFile(pos.nFile, 0, pfileRet ? "rb+" : "rb"), SER_DISK, CLIENT_VERSION); if (!filein) return error("CTransaction::ReadFromDisk() : OpenBlockFile failed"); // Read transaction if (fseek(filein, pos.nTxPos, SEEK_SET) != 0) return error("CTransaction::ReadFromDisk() : fseek failed"); try { filein >> *this; } catch (std::exception &e) { return error("%s() : deserialize or I/O error", __PRETTY_FUNCTION__); } // Return file pointer if (pfileRet) { if (fseek(filein, pos.nTxPos, SEEK_SET) != 0) return error("CTransaction::ReadFromDisk() : second fseek failed"); *pfileRet = filein.release(); } return true; } friend bool operator==(const CTransaction& a, const CTransaction& b) { return (a.nVersion == b.nVersion && a.nTime == b.nTime && a.vin == b.vin && a.vout == b.vout && a.nLockTime == b.nLockTime); } friend bool operator!=(const CTransaction& a, const CTransaction& b) { return !(a == b); } std::string ToStringShort() const { std::string str; str += strprintf("%s %s", GetHash().ToString().c_str(), IsCoinBase()? "base" : (IsCoinStake()? "stake" : "user")); return str; } std::string ToString() const { std::string str; str += IsCoinBase()? "Coinbase" : (IsCoinStake()? "Coinstake" : "CTransaction"); str += strprintf("(hash=%s, nTime=%d, ver=%d, vin.size=%"PRIszu", vout.size=%"PRIszu", nLockTime=%d)\n", GetHash().ToString().substr(0,10).c_str(), nTime, nVersion, vin.size(), vout.size(), nLockTime); for (unsigned int i = 0; i < vin.size(); i++) str += " " + vin[i].ToString() + "\n"; for (unsigned int i = 0; i < vout.size(); i++) str += " " + vout[i].ToString() + "\n"; return str; } void print() const { printf("%s", ToString().c_str()); } bool ReadFromDisk(CTxDB& txdb, COutPoint prevout, CTxIndex& txindexRet); bool ReadFromDisk(CTxDB& txdb, COutPoint prevout); bool ReadFromDisk(COutPoint prevout); bool DisconnectInputs(CTxDB& txdb); /** Fetch from memory and/or disk. inputsRet keys are transaction hashes. @param[in] txdb Transaction database @param[in] mapTestPool List of pending changes to the transaction index database @param[in] fBlock True if being called to add a new best-block to the chain @param[in] fMiner True if being called by CreateNewBlock @param[out] inputsRet Pointers to this transaction's inputs @param[out] fInvalid returns true if transaction is invalid @return Returns true if all inputs are in txdb or mapTestPool */ bool FetchInputs(CTxDB& txdb, const std::map<uint256, CTxIndex>& mapTestPool, bool fBlock, bool fMiner, MapPrevTx& inputsRet, bool& fInvalid); /** Sanity check previous transactions, then, if all checks succeed, mark them as spent by this transaction. @param[in] inputs Previous transactions (from FetchInputs) @param[out] mapTestPool Keeps track of inputs that need to be updated on disk @param[in] posThisTx Position of this transaction on disk @param[in] pindexBlock @param[in] fBlock true if called from ConnectBlock @param[in] fMiner true if called from CreateNewBlock @return Returns true if all checks succeed */ bool ConnectInputs(CTxDB& txdb, MapPrevTx inputs, std::map<uint256, CTxIndex>& mapTestPool, const CDiskTxPos& posThisTx, const CBlockIndex* pindexBlock, bool fBlock, bool fMiner); bool CheckTransaction() const; bool GetCoinAge(CTxDB& txdb, uint64_t& nCoinAge) const; // ppcoin: get transaction coin age protected: const CTxOut& GetOutputFor(const CTxIn& input, const MapPrevTx& inputs) const; }; /** Check for standard transaction types @return True if all outputs (scriptPubKeys) use only standard transaction forms */ bool IsStandardTx(const CTransaction& tx); bool IsFinalTx(const CTransaction &tx, int nBlockHeight = 0, int64_t nBlockTime = 0); /** A transaction with a merkle branch linking it to the block chain. */ class CMerkleTx : public CTransaction { private: int GetDepthInMainChainINTERNAL(CBlockIndex* &pindexRet) const; public: uint256 hashBlock; std::vector<uint256> vMerkleBranch; int nIndex; // memory only mutable bool fMerkleVerified; CMerkleTx() { Init(); } CMerkleTx(const CTransaction& txIn) : CTransaction(txIn) { Init(); } void Init() { hashBlock = 0; nIndex = -1; fMerkleVerified = false; } IMPLEMENT_SERIALIZE ( nSerSize += SerReadWrite(s, *(CTransaction*)this, nType, nVersion, ser_action); nVersion = this->nVersion; READWRITE(hashBlock); READWRITE(vMerkleBranch); READWRITE(nIndex); ) int SetMerkleBranch(const CBlock* pblock=NULL); // Return depth of transaction in blockchain: // -1 : not in blockchain, and not in memory pool (conflicted transaction) // 0 : in memory pool, waiting to be included in a block // >=1 : this many blocks deep in the main chain int GetDepthInMainChain(CBlockIndex* &pindexRet) const; int GetDepthInMainChain() const { CBlockIndex *pindexRet; return GetDepthInMainChain(pindexRet); } bool IsInMainChain() const { CBlockIndex *pindexRet; return GetDepthInMainChainINTERNAL(pindexRet) > 0; } int GetBlocksToMaturity() const; bool AcceptToMemoryPool(); }; /** A txdb record that contains the disk location of a transaction and the * locations of transactions that spend its outputs. vSpent is really only * used as a flag, but having the location is very helpful for debugging. */ class CTxIndex { public: CDiskTxPos pos; std::vector<CDiskTxPos> vSpent; CTxIndex() { SetNull(); } CTxIndex(const CDiskTxPos& posIn, unsigned int nOutputs) { pos = posIn; vSpent.resize(nOutputs); } IMPLEMENT_SERIALIZE ( if (!(nType & SER_GETHASH)) READWRITE(nVersion); READWRITE(pos); READWRITE(vSpent); ) void SetNull() { pos.SetNull(); vSpent.clear(); } bool IsNull() { return pos.IsNull(); } friend bool operator==(const CTxIndex& a, const CTxIndex& b) { return (a.pos == b.pos && a.vSpent == b.vSpent); } friend bool operator!=(const CTxIndex& a, const CTxIndex& b) { return !(a == b); } int GetDepthInMainChain() const; }; /** Nodes collect new transactions into a block, hash them into a hash tree, * and scan through nonce values to make the block's hash satisfy proof-of-work * requirements. When they solve the proof-of-work, they broadcast the block * to everyone and the block is added to the block chain. The first transaction * in the block is a special one that creates a new coin owned by the creator * of the block. * * Blocks are appended to blk0001.dat files on disk. Their location on disk * is indexed by CBlockIndex objects in memory. */ class CBlock { public: // header static const int CURRENT_VERSION=6; int nVersion; uint256 hashPrevBlock; uint256 hashMerkleRoot; unsigned int nTime; unsigned int nBits; unsigned int nNonce; // network and disk std::vector<CTransaction> vtx; // ppcoin: block signature - signed by one of the coin base txout[N]'s owner std::vector<unsigned char> vchBlockSig; // memory only mutable std::vector<uint256> vMerkleTree; // Denial-of-service detection: mutable int nDoS; bool DoS(int nDoSIn, bool fIn) const { nDoS += nDoSIn; return fIn; } CBlock() { SetNull(); } IMPLEMENT_SERIALIZE ( READWRITE(this->nVersion); nVersion = this->nVersion; READWRITE(hashPrevBlock); READWRITE(hashMerkleRoot); READWRITE(nTime); READWRITE(nBits); READWRITE(nNonce); // ConnectBlock depends on vtx following header to generate CDiskTxPos if (!(nType & (SER_GETHASH|SER_BLOCKHEADERONLY))) { READWRITE(vtx); READWRITE(vchBlockSig); } else if (fRead) { const_cast<CBlock*>(this)->vtx.clear(); const_cast<CBlock*>(this)->vchBlockSig.clear(); } ) void SetNull() { nVersion = CBlock::CURRENT_VERSION; hashPrevBlock = 0; hashMerkleRoot = 0; nTime = 0; nBits = 0; nNonce = 0; vtx.clear(); vchBlockSig.clear(); vMerkleTree.clear(); nDoS = 0; } bool IsNull() const { return (nBits == 0); } uint256 GetHash() const { return GetPoWHash(); } uint256 GetPoWHash() const { return scrypt_blockhash(CVOIDBEGIN(nVersion)); } int64_t GetBlockTime() const { return (int64_t)nTime; } void UpdateTime(const CBlockIndex* pindexPrev); // entropy bit for stake modifier if chosen by modifier unsigned int GetStakeEntropyBit() const { // Take last bit of block hash as entropy bit unsigned int nEntropyBit = ((GetHash().Get64()) & 1llu); if (fDebug && GetBoolArg("-printstakemodifier")) printf("GetStakeEntropyBit: hashBlock=%s nEntropyBit=%u\n", GetHash().ToString().c_str(), nEntropyBit); return nEntropyBit; } // ppcoin: two types of block: proof-of-work or proof-of-stake bool IsProofOfStake() const { return (vtx.size() > 1 && vtx[1].IsCoinStake()); } bool IsProofOfWork() const { return !IsProofOfStake(); } std::pair<COutPoint, unsigned int> GetProofOfStake() const { return IsProofOfStake()? std::make_pair(vtx[1].vin[0].prevout, vtx[1].nTime) : std::make_pair(COutPoint(), (unsigned int)0); } // ppcoin: get max transaction timestamp int64_t GetMaxTransactionTime() const { int64_t maxTransactionTime = 0; BOOST_FOREACH(const CTransaction& tx, vtx) maxTransactionTime = std::max(maxTransactionTime, (int64_t)tx.nTime); return maxTransactionTime; } uint256 BuildMerkleTree() const { vMerkleTree.clear(); BOOST_FOREACH(const CTransaction& tx, vtx) vMerkleTree.push_back(tx.GetHash()); int j = 0; for (int nSize = vtx.size(); nSize > 1; nSize = (nSize + 1) / 2) { for (int i = 0; i < nSize; i += 2) { int i2 = std::min(i+1, nSize-1); vMerkleTree.push_back(Hash(BEGIN(vMerkleTree[j+i]), END(vMerkleTree[j+i]), BEGIN(vMerkleTree[j+i2]), END(vMerkleTree[j+i2]))); } j += nSize; } return (vMerkleTree.empty() ? 0 : vMerkleTree.back()); } std::vector<uint256> GetMerkleBranch(int nIndex) const { if (vMerkleTree.empty()) BuildMerkleTree(); std::vector<uint256> vMerkleBranch; int j = 0; for (int nSize = vtx.size(); nSize > 1; nSize = (nSize + 1) / 2) { int i = std::min(nIndex^1, nSize-1); vMerkleBranch.push_back(vMerkleTree[j+i]); nIndex >>= 1; j += nSize; } return vMerkleBranch; } static uint256 CheckMerkleBranch(uint256 hash, const std::vector<uint256>& vMerkleBranch, int nIndex) { if (nIndex == -1) return 0; BOOST_FOREACH(const uint256& otherside, vMerkleBranch) { if (nIndex & 1) hash = Hash(BEGIN(otherside), END(otherside), BEGIN(hash), END(hash)); else hash = Hash(BEGIN(hash), END(hash), BEGIN(otherside), END(otherside)); nIndex >>= 1; } return hash; } bool WriteToDisk(unsigned int& nFileRet, unsigned int& nBlockPosRet) { // Open history file to append CAutoFile fileout = CAutoFile(AppendBlockFile(nFileRet), SER_DISK, CLIENT_VERSION); if (!fileout) return error("CBlock::WriteToDisk() : AppendBlockFile failed"); // Write index header unsigned int nSize = fileout.GetSerializeSize(*this); fileout << FLATDATA(pchMessageStart) << nSize; // Write block long fileOutPos = ftell(fileout); if (fileOutPos < 0) return error("CBlock::WriteToDisk() : ftell failed"); nBlockPosRet = fileOutPos; fileout << *this; // Flush stdio buffers and commit to disk before returning fflush(fileout); if (!IsInitialBlockDownload() || (nBestHeight+1) % 500 == 0) FileCommit(fileout); return true; } bool ReadFromDisk(unsigned int nFile, unsigned int nBlockPos, bool fReadTransactions=true) { SetNull(); // Open history file to read CAutoFile filein = CAutoFile(OpenBlockFile(nFile, nBlockPos, "rb"), SER_DISK, CLIENT_VERSION); if (!filein) return error("CBlock::ReadFromDisk() : OpenBlockFile failed"); if (!fReadTransactions) filein.nType |= SER_BLOCKHEADERONLY; // Read block try { filein >> *this; } catch (std::exception &e) { return error("%s() : deserialize or I/O error", __PRETTY_FUNCTION__); } // Check the header if (fReadTransactions && IsProofOfWork() && !CheckProofOfWork(GetPoWHash(), nBits)) return error("CBlock::ReadFromDisk() : errors in block header"); return true; } void print() const { printf("CBlock(hash=%s, ver=%d, hashPrevBlock=%s, hashMerkleRoot=%s, nTime=%u, nBits=%08x, nNonce=%u, vtx=%"PRIszu", vchBlockSig=%s)\n", GetHash().ToString().c_str(), nVersion, hashPrevBlock.ToString().c_str(), hashMerkleRoot.ToString().c_str(), nTime, nBits, nNonce, vtx.size(), HexStr(vchBlockSig.begin(), vchBlockSig.end()).c_str()); for (unsigned int i = 0; i < vtx.size(); i++) { printf(" "); vtx[i].print(); } printf(" vMerkleTree: "); for (unsigned int i = 0; i < vMerkleTree.size(); i++) printf("%s ", vMerkleTree[i].ToString().substr(0,10).c_str()); printf("\n"); } bool DisconnectBlock(CTxDB& txdb, CBlockIndex* pindex); bool ConnectBlock(CTxDB& txdb, CBlockIndex* pindex, bool fJustCheck=false); bool ReadFromDisk(const CBlockIndex* pindex, bool fReadTransactions=true); bool SetBestChain(CTxDB& txdb, CBlockIndex* pindexNew); bool AddToBlockIndex(unsigned int nFile, unsigned int nBlockPos, const uint256& hashProof); bool CheckBlock(bool fCheckPOW=true, bool fCheckMerkleRoot=true, bool fCheckSig=true) const; bool AcceptBlock(); bool GetCoinAge(uint64_t& nCoinAge) const; // ppcoin: calculate total coin age spent in block bool SignBlock(CWallet& keystore, int64_t nFees); bool CheckBlockSignature() const; private: bool SetBestChainInner(CTxDB& txdb, CBlockIndex *pindexNew); }; /** The block chain is a tree shaped structure starting with the * genesis block at the root, with each block potentially having multiple * candidates to be the next block. pprev and pnext link a path through the * main/longest chain. A blockindex may have multiple pprev pointing back * to it, but pnext will only point forward to the longest branch, or will * be null if the block is not part of the longest chain. */ class CBlockIndex { public: const uint256* phashBlock; CBlockIndex* pprev; CBlockIndex* pnext; unsigned int nFile; unsigned int nBlockPos; uint256 nChainTrust; // ppcoin: trust score of block chain int nHeight; int64_t nMint; int64_t nMoneySupply; unsigned int nFlags; // ppcoin: block index flags enum { BLOCK_PROOF_OF_STAKE = (1 << 0), // is proof-of-stake block BLOCK_STAKE_ENTROPY = (1 << 1), // entropy bit for stake modifier BLOCK_STAKE_MODIFIER = (1 << 2), // regenerated stake modifier }; uint64_t nStakeModifier; // hash modifier for proof-of-stake unsigned int nStakeModifierChecksum; // checksum of index; in-memeory only // proof-of-stake specific fields COutPoint prevoutStake; unsigned int nStakeTime; uint256 hashProof; // block header int nVersion; uint256 hashMerkleRoot; unsigned int nTime; unsigned int nBits; unsigned int nNonce; CBlockIndex() { phashBlock = NULL; pprev = NULL; pnext = NULL; nFile = 0; nBlockPos = 0; nHeight = 0; nChainTrust = 0; nMint = 0; nMoneySupply = 0; nFlags = 0; nStakeModifier = 0; nStakeModifierChecksum = 0; hashProof = 0; prevoutStake.SetNull(); nStakeTime = 0; nVersion = 0; hashMerkleRoot = 0; nTime = 0; nBits = 0; nNonce = 0; } CBlockIndex(unsigned int nFileIn, unsigned int nBlockPosIn, CBlock& block) { phashBlock = NULL; pprev = NULL; pnext = NULL; nFile = nFileIn; nBlockPos = nBlockPosIn; nHeight = 0; nChainTrust = 0; nMint = 0; nMoneySupply = 0; nFlags = 0; nStakeModifier = 0; nStakeModifierChecksum = 0; hashProof = 0; if (block.IsProofOfStake()) { SetProofOfStake(); prevoutStake = block.vtx[1].vin[0].prevout; nStakeTime = block.vtx[1].nTime; } else { prevoutStake.SetNull(); nStakeTime = 0; } nVersion = block.nVersion; hashMerkleRoot = block.hashMerkleRoot; nTime = block.nTime; nBits = block.nBits; nNonce = block.nNonce; } CBlock GetBlockHeader() const { CBlock block; block.nVersion = nVersion; if (pprev) block.hashPrevBlock = pprev->GetBlockHash(); block.hashMerkleRoot = hashMerkleRoot; block.nTime = nTime; block.nBits = nBits; block.nNonce = nNonce; return block; } uint256 GetBlockHash() const { return *phashBlock; } int64_t GetBlockTime() const { return (int64_t)nTime; } uint256 GetBlockTrust() const; bool IsInMainChain() const { return (pnext || this == pindexBest); } bool CheckIndex() const { return true; } int64_t GetPastTimeLimit() const { return GetMedianTimePast(); } enum { nMedianTimeSpan=11 }; int64_t GetMedianTimePast() const { int64_t pmedian[nMedianTimeSpan]; int64_t* pbegin = &pmedian[nMedianTimeSpan]; int64_t* pend = &pmedian[nMedianTimeSpan]; const CBlockIndex* pindex = this; for (int i = 0; i < nMedianTimeSpan && pindex; i++, pindex = pindex->pprev) *(--pbegin) = pindex->GetBlockTime(); std::sort(pbegin, pend); return pbegin[(pend - pbegin)/2]; } /** * Returns true if there are nRequired or more blocks of minVersion or above * in the last nToCheck blocks, starting at pstart and going backwards. */ static bool IsSuperMajority(int minVersion, const CBlockIndex* pstart, unsigned int nRequired, unsigned int nToCheck); bool IsProofOfWork() const { return !(nFlags & BLOCK_PROOF_OF_STAKE); } bool IsProofOfStake() const { return (nFlags & BLOCK_PROOF_OF_STAKE); } void SetProofOfStake() { nFlags |= BLOCK_PROOF_OF_STAKE; } unsigned int GetStakeEntropyBit() const { return ((nFlags & BLOCK_STAKE_ENTROPY) >> 1); } bool SetStakeEntropyBit(unsigned int nEntropyBit) { if (nEntropyBit > 1) return false; nFlags |= (nEntropyBit? BLOCK_STAKE_ENTROPY : 0); return true; } bool GeneratedStakeModifier() const { return (nFlags & BLOCK_STAKE_MODIFIER); } void SetStakeModifier(uint64_t nModifier, bool fGeneratedStakeModifier) { nStakeModifier = nModifier; if (fGeneratedStakeModifier) nFlags |= BLOCK_STAKE_MODIFIER; } std::string ToString() const { return strprintf("CBlockIndex(nprev=%p, pnext=%p, nFile=%u, nBlockPos=%-6d nHeight=%d, nMint=%s, nMoneySupply=%s, nFlags=(%s)(%d)(%s), nStakeModifier=%016"PRIx64", nStakeModifierChecksum=%08x, hashProof=%s, prevoutStake=(%s), nStakeTime=%d merkle=%s, hashBlock=%s)", pprev, pnext, nFile, nBlockPos, nHeight, FormatMoney(nMint).c_str(), FormatMoney(nMoneySupply).c_str(), GeneratedStakeModifier() ? "MOD" : "-", GetStakeEntropyBit(), IsProofOfStake()? "PoS" : "PoW", nStakeModifier, nStakeModifierChecksum, hashProof.ToString().c_str(), prevoutStake.ToString().c_str(), nStakeTime, hashMerkleRoot.ToString().c_str(), GetBlockHash().ToString().c_str()); } void print() const { printf("%s\n", ToString().c_str()); } }; /** Used to marshal pointers into hashes for db storage. */ class CDiskBlockIndex : public CBlockIndex { private: uint256 blockHash; public: uint256 hashPrev; uint256 hashNext; CDiskBlockIndex() { hashPrev = 0; hashNext = 0; blockHash = 0; } explicit CDiskBlockIndex(CBlockIndex* pindex) : CBlockIndex(*pindex) { hashPrev = (pprev ? pprev->GetBlockHash() : 0); hashNext = (pnext ? pnext->GetBlockHash() : 0); } IMPLEMENT_SERIALIZE ( if (!(nType & SER_GETHASH)) READWRITE(nVersion); READWRITE(hashNext); READWRITE(nFile); READWRITE(nBlockPos); READWRITE(nHeight); READWRITE(nMint); READWRITE(nMoneySupply); READWRITE(nFlags); READWRITE(nStakeModifier); if (IsProofOfStake()) { READWRITE(prevoutStake); READWRITE(nStakeTime); } else if (fRead) { const_cast<CDiskBlockIndex*>(this)->prevoutStake.SetNull(); const_cast<CDiskBlockIndex*>(this)->nStakeTime = 0; } READWRITE(hashProof); // block header READWRITE(this->nVersion); READWRITE(hashPrev); READWRITE(hashMerkleRoot); READWRITE(nTime); READWRITE(nBits); READWRITE(nNonce); READWRITE(blockHash); ) uint256 GetBlockHash() const { if (fUseFastIndex && (nTime < GetAdjustedTime() - 24 * 60 * 60) && blockHash != 0) return blockHash; CBlock block; block.nVersion = nVersion; block.hashPrevBlock = hashPrev; block.hashMerkleRoot = hashMerkleRoot; block.nTime = nTime; block.nBits = nBits; block.nNonce = nNonce; const_cast<CDiskBlockIndex*>(this)->blockHash = block.GetHash(); return blockHash; } std::string ToString() const { std::string str = "CDiskBlockIndex("; str += CBlockIndex::ToString(); str += strprintf("\n hashBlock=%s, hashPrev=%s, hashNext=%s)", GetBlockHash().ToString().c_str(), hashPrev.ToString().c_str(), hashNext.ToString().c_str()); return str; } void print() const { printf("%s\n", ToString().c_str()); } }; /** Describes a place in the block chain to another node such that if the * other node doesn't have the same branch, it can find a recent common trunk. * The further back it is, the further before the fork it may be. */ class CBlockLocator { protected: std::vector<uint256> vHave; public: CBlockLocator() { } explicit CBlockLocator(const CBlockIndex* pindex) { Set(pindex); } explicit CBlockLocator(uint256 hashBlock) { std::map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hashBlock); if (mi != mapBlockIndex.end()) Set((*mi).second); } CBlockLocator(const std::vector<uint256>& vHaveIn) { vHave = vHaveIn; } IMPLEMENT_SERIALIZE ( if (!(nType & SER_GETHASH)) READWRITE(nVersion); READWRITE(vHave); ) void SetNull() { vHave.clear(); } bool IsNull() { return vHave.empty(); } void Set(const CBlockIndex* pindex) { vHave.clear(); int nStep = 1; while (pindex) { vHave.push_back(pindex->GetBlockHash()); // Exponentially larger steps back for (int i = 0; pindex && i < nStep; i++) pindex = pindex->pprev; if (vHave.size() > 10) nStep *= 2; } vHave.push_back((!fTestNet ? hashGenesisBlock : hashGenesisBlockTestNet)); } int GetDistanceBack() { // Retrace how far back it was in the sender's branch int nDistance = 0; int nStep = 1; BOOST_FOREACH(const uint256& hash, vHave) { std::map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hash); if (mi != mapBlockIndex.end()) { CBlockIndex* pindex = (*mi).second; if (pindex->IsInMainChain()) return nDistance; } nDistance += nStep; if (nDistance > 10) nStep *= 2; } return nDistance; } CBlockIndex* GetBlockIndex() { // Find the first block the caller has in the main chain BOOST_FOREACH(const uint256& hash, vHave) { std::map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hash); if (mi != mapBlockIndex.end()) { CBlockIndex* pindex = (*mi).second; if (pindex->IsInMainChain()) return pindex; } } return pindexGenesisBlock; } uint256 GetBlockHash() { // Find the first block the caller has in the main chain BOOST_FOREACH(const uint256& hash, vHave) { std::map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hash); if (mi != mapBlockIndex.end()) { CBlockIndex* pindex = (*mi).second; if (pindex->IsInMainChain()) return hash; } } return (!fTestNet ? hashGenesisBlock : hashGenesisBlockTestNet); } int GetHeight() { CBlockIndex* pindex = GetBlockIndex(); if (!pindex) return 0; return pindex->nHeight; } }; class CTxMemPool { public: mutable CCriticalSection cs; std::map<uint256, CTransaction> mapTx; std::map<COutPoint, CInPoint> mapNextTx; bool addUnchecked(const uint256& hash, CTransaction &tx); bool remove(const CTransaction &tx, bool fRecursive = false); bool removeConflicts(const CTransaction &tx); void clear(); void queryHashes(std::vector<uint256>& vtxid); unsigned long size() const { LOCK(cs); return mapTx.size(); } bool exists(uint256 hash) const { LOCK(cs); return (mapTx.count(hash) != 0); } bool lookup(uint256 hash, CTransaction& result) const { LOCK(cs); std::map<uint256, CTransaction>::const_iterator i = mapTx.find(hash); if (i == mapTx.end()) return false; result = i->second; return true; } }; extern CTxMemPool mempool; #endif
# -*- coding: utf-8 -*- """ babel.core ~~~~~~~~~~ Core locale representation and locale data access. :copyright: (c) 2013 by the Babel Team. :license: BSD, see LICENSE for more details. """ import os from babel import localedata from babel._compat import pickle, string_types from babel.plural import PluralRule __all__ = ['UnknownLocaleError', 'Locale', 'default_locale', 'negotiate_locale', 'parse_locale'] _global_data = None _default_plural_rule = PluralRule({}) def _raise_no_data_error(): raise RuntimeError('The babel data files are not available. ' 'This usually happens because you are using ' 'a source checkout from Babel and you did ' 'not build the data files. Just make sure ' 'to run "python setup.py import_cldr" before ' 'installing the library.') def get_global(key): """Return the dictionary for the given key in the global data. The global data is stored in the ``babel/global.dat`` file and contains information independent of individual locales. >>> get_global('zone_aliases')['UTC'] u'Etc/GMT' >>> get_global('zone_territories')['Europe/Berlin'] u'DE' The keys available are: - ``currency_fractions`` - ``language_aliases`` - ``likely_subtags`` - ``parent_exceptions`` - ``script_aliases`` - ``territory_aliases`` - ``territory_currencies`` - ``territory_languages`` - ``territory_zones`` - ``variant_aliases`` - ``windows_zone_mapping`` - ``zone_aliases`` - ``zone_territories`` .. note:: The internal structure of the data may change between versions. .. versionadded:: 0.9 :param key: the data key """ global _global_data if _global_data is None: dirname = os.path.join(os.path.dirname(__file__)) filename = os.path.join(dirname, 'global.dat') if not os.path.isfile(filename): _raise_no_data_error() with open(filename, 'rb') as fileobj: _global_data = pickle.load(fileobj) return _global_data.get(key, {}) LOCALE_ALIASES = { 'ar': 'ar_SY', 'bg': 'bg_BG', 'bs': 'bs_BA', 'ca': 'ca_ES', 'cs': 'cs_CZ', 'da': 'da_DK', 'de': 'de_DE', 'el': 'el_GR', 'en': 'en_US', 'es': 'es_ES', 'et': 'et_EE', 'fa': 'fa_IR', 'fi': 'fi_FI', 'fr': 'fr_FR', 'gl': 'gl_ES', 'he': 'he_IL', 'hu': 'hu_HU', 'id': 'id_ID', 'is': 'is_IS', 'it': 'it_IT', 'ja': 'ja_JP', 'km': 'km_KH', 'ko': 'ko_KR', 'lt': 'lt_LT', 'lv': 'lv_LV', 'mk': 'mk_MK', 'nl': 'nl_NL', 'nn': 'nn_NO', 'no': 'nb_NO', 'pl': 'pl_PL', 'pt': 'pt_PT', 'ro': 'ro_RO', 'ru': 'ru_RU', 'sk': 'sk_SK', 'sl': 'sl_SI', 'sv': 'sv_SE', 'th': 'th_TH', 'tr': 'tr_TR', 'uk': 'uk_UA' } class UnknownLocaleError(Exception): """Exception thrown when a locale is requested for which no locale data is available. """ def __init__(self, identifier): """Create the exception. :param identifier: the identifier string of the unsupported locale """ Exception.__init__(self, 'unknown locale %r' % identifier) #: The identifier of the locale that could not be found. self.identifier = identifier class Locale(object): """Representation of a specific locale. >>> locale = Locale('en', 'US') >>> repr(locale) "Locale('en', territory='US')" >>> locale.display_name u'English (United States)' A `Locale` object can also be instantiated from a raw locale string: >>> locale = Locale.parse('en-US', sep='-') >>> repr(locale) "Locale('en', territory='US')" `Locale` objects provide access to a collection of locale data, such as territory and language names, number and date format patterns, and more: >>> locale.number_symbols['decimal'] u'.' If a locale is requested for which no locale data is available, an `UnknownLocaleError` is raised: >>> Locale.parse('en_XX') Traceback (most recent call last): ... UnknownLocaleError: unknown locale 'en_XX' For more information see :rfc:`3066`. """ def __init__(self, language, territory=None, script=None, variant=None): """Initialize the locale object from the given identifier components. >>> locale = Locale('en', 'US') >>> locale.language 'en' >>> locale.territory 'US' :param language: the language code :param territory: the territory (country or region) code :param script: the script code :param variant: the variant code :raise `UnknownLocaleError`: if no locale data is available for the requested locale """ #: the language code self.language = language #: the territory (country or region) code self.territory = territory #: the script code self.script = script #: the variant code self.variant = variant self.__data = None identifier = str(self) if not localedata.exists(identifier): raise UnknownLocaleError(identifier) @classmethod def default(cls, category=None, aliases=LOCALE_ALIASES): """Return the system default locale for the specified category. >>> for name in ['LANGUAGE', 'LC_ALL', 'LC_CTYPE', 'LC_MESSAGES']: ... os.environ[name] = '' >>> os.environ['LANG'] = 'fr_FR.UTF-8' >>> Locale.default('LC_MESSAGES') Locale('fr', territory='FR') The following fallbacks to the variable are always considered: - ``LANGUAGE`` - ``LC_ALL`` - ``LC_CTYPE`` - ``LANG`` :param category: one of the ``LC_XXX`` environment variable names :param aliases: a dictionary of aliases for locale identifiers """ # XXX: use likely subtag expansion here instead of the # aliases dictionary. locale_string = default_locale(category, aliases=aliases) return cls.parse(locale_string) @classmethod def negotiate(cls, preferred, available, sep='_', aliases=LOCALE_ALIASES): """Find the best match between available and requested locale strings. >>> Locale.negotiate(['de_DE', 'en_US'], ['de_DE', 'de_AT']) Locale('de', territory='DE') >>> Locale.negotiate(['de_DE', 'en_US'], ['en', 'de']) Locale('de') >>> Locale.negotiate(['de_DE', 'de'], ['en_US']) You can specify the character used in the locale identifiers to separate the differnet components. This separator is applied to both lists. Also, case is ignored in the comparison: >>> Locale.negotiate(['de-DE', 'de'], ['en-us', 'de-de'], sep='-') Locale('de', territory='DE') :param preferred: the list of locale identifers preferred by the user :param available: the list of locale identifiers available :param aliases: a dictionary of aliases for locale identifiers """ identifier = negotiate_locale(preferred, available, sep=sep, aliases=aliases) if identifier: return Locale.parse(identifier, sep=sep) @classmethod def parse(cls, identifier, sep='_', resolve_likely_subtags=True): """Create a `Locale` instance for the given locale identifier. >>> l = Locale.parse('de-DE', sep='-') >>> l.display_name u'Deutsch (Deutschland)' If the `identifier` parameter is not a string, but actually a `Locale` object, that object is returned: >>> Locale.parse(l) Locale('de', territory='DE') This also can perform resolving of likely subtags which it does by default. This is for instance useful to figure out the most likely locale for a territory you can use ``'und'`` as the language tag: >>> Locale.parse('und_AT') Locale('de', territory='AT') :param identifier: the locale identifier string :param sep: optional component separator :param resolve_likely_subtags: if this is specified then a locale will have its likely subtag resolved if the locale otherwise does not exist. For instance ``zh_TW`` by itself is not a locale that exists but Babel can automatically expand it to the full form of ``zh_hant_TW``. Note that this expansion is only taking place if no locale exists otherwise. For instance there is a locale ``en`` that can exist by itself. :raise `ValueError`: if the string does not appear to be a valid locale identifier :raise `UnknownLocaleError`: if no locale data is available for the requested locale """ if identifier is None: return None elif isinstance(identifier, Locale): return identifier elif not isinstance(identifier, string_types): raise TypeError('Unexpected value for identifier: %r' % (identifier,)) parts = parse_locale(identifier, sep=sep) input_id = get_locale_identifier(parts) def _try_load(parts): try: return cls(*parts) except UnknownLocaleError: return None def _try_load_reducing(parts): # Success on first hit, return it. locale = _try_load(parts) if locale is not None: return locale # Now try without script and variant locale = _try_load(parts[:2]) if locale is not None: return locale locale = _try_load(parts) if locale is not None: return locale if not resolve_likely_subtags: raise UnknownLocaleError(input_id) # From here onwards is some very bad likely subtag resolving. This # whole logic is not entirely correct but good enough (tm) for the # time being. This has been added so that zh_TW does not cause # errors for people when they upgrade. Later we should properly # implement ICU like fuzzy locale objects and provide a way to # maximize and minimize locale tags. language, territory, script, variant = parts language = get_global('language_aliases').get(language, language) territory = get_global('territory_aliases').get(territory, (territory,))[0] script = get_global('script_aliases').get(script, script) variant = get_global('variant_aliases').get(variant, variant) if territory == 'ZZ': territory = None if script == 'Zzzz': script = None parts = language, territory, script, variant # First match: try the whole identifier new_id = get_locale_identifier(parts) likely_subtag = get_global('likely_subtags').get(new_id) if likely_subtag is not None: locale = _try_load_reducing(parse_locale(likely_subtag)) if locale is not None: return locale # If we did not find anything so far, try again with a # simplified identifier that is just the language likely_subtag = get_global('likely_subtags').get(language) if likely_subtag is not None: language2, _, script2, variant2 = parse_locale(likely_subtag) locale = _try_load_reducing((language2, territory, script2, variant2)) if locale is not None: return locale raise UnknownLocaleError(input_id) def __eq__(self, other): for key in ('language', 'territory', 'script', 'variant'): if not hasattr(other, key): return False return (self.language == other.language) and \ (self.territory == other.territory) and \ (self.script == other.script) and \ (self.variant == other.variant) def __ne__(self, other): return not self.__eq__(other) def __hash__(self): return hash((self.language, self.territory, self.script, self.variant)) def __repr__(self): parameters = [''] for key in ('territory', 'script', 'variant'): value = getattr(self, key) if value is not None: parameters.append('%s=%r' % (key, value)) parameter_string = '%r' % self.language + ', '.join(parameters) return 'Locale(%s)' % parameter_string def __str__(self): return get_locale_identifier((self.language, self.territory, self.script, self.variant)) @property def _data(self): if self.__data is None: self.__data = localedata.LocaleDataDict(localedata.load(str(self))) return self.__data def get_display_name(self, locale=None): """Return the display name of the locale using the given locale. The display name will include the language, territory, script, and variant, if those are specified. >>> Locale('zh', 'CN', script='Hans').get_display_name('en') u'Chinese (Simplified, China)' :param locale: the locale to use """ if locale is None: locale = self locale = Locale.parse(locale) retval = locale.languages.get(self.language) if self.territory or self.script or self.variant: details = [] if self.script: details.append(locale.scripts.get(self.script)) if self.territory: details.append(locale.territories.get(self.territory)) if self.variant: details.append(locale.variants.get(self.variant)) details = filter(None, details) if details: retval += ' (%s)' % u', '.join(details) return retval display_name = property(get_display_name, doc="""\ The localized display name of the locale. >>> Locale('en').display_name u'English' >>> Locale('en', 'US').display_name u'English (United States)' >>> Locale('sv').display_name u'svenska' :type: `unicode` """) def get_language_name(self, locale=None): """Return the language of this locale in the given locale. >>> Locale('zh', 'CN', script='Hans').get_language_name('de') u'Chinesisch' .. versionadded:: 1.0 :param locale: the locale to use """ if locale is None: locale = self locale = Locale.parse(locale) return locale.languages.get(self.language) language_name = property(get_language_name, doc="""\ The localized language name of the locale. >>> Locale('en', 'US').language_name u'English' """) def get_territory_name(self, locale=None): """Return the territory name in the given locale.""" if locale is None: locale = self locale = Locale.parse(locale) return locale.territories.get(self.territory) territory_name = property(get_territory_name, doc="""\ The localized territory name of the locale if available. >>> Locale('de', 'DE').territory_name u'Deutschland' """) def get_script_name(self, locale=None): """Return the script name in the given locale.""" if locale is None: locale = self locale = Locale.parse(locale) return locale.scripts.get(self.script) script_name = property(get_script_name, doc="""\ The localized script name of the locale if available. >>> Locale('sr', 'ME', script='Latn').script_name u'latinica' """) @property def english_name(self): """The english display name of the locale. >>> Locale('de').english_name u'German' >>> Locale('de', 'DE').english_name u'German (Germany)' :type: `unicode`""" return self.get_display_name(Locale('en')) # { General Locale Display Names @property def languages(self): """Mapping of language codes to translated language names. >>> Locale('de', 'DE').languages['ja'] u'Japanisch' See `ISO 639 <http://www.loc.gov/standards/iso639-2/>`_ for more information. """ return self._data['languages'] @property def scripts(self): """Mapping of script codes to translated script names. >>> Locale('en', 'US').scripts['Hira'] u'Hiragana' See `ISO 15924 <http://www.evertype.com/standards/iso15924/>`_ for more information. """ return self._data['scripts'] @property def territories(self): """Mapping of script codes to translated script names. >>> Locale('es', 'CO').territories['DE'] u'Alemania' See `ISO 3166 <http://www.iso.org/iso/en/prods-services/iso3166ma/>`_ for more information. """ return self._data['territories'] @property def variants(self): """Mapping of script codes to translated script names. >>> Locale('de', 'DE').variants['1901'] u'Alte deutsche Rechtschreibung' """ return self._data['variants'] # { Number Formatting @property def currencies(self): """Mapping of currency codes to translated currency names. This only returns the generic form of the currency name, not the count specific one. If an actual number is requested use the :func:`babel.numbers.get_currency_name` function. >>> Locale('en').currencies['COP'] u'Colombian Peso' >>> Locale('de', 'DE').currencies['COP'] u'Kolumbianischer Peso' """ return self._data['currency_names'] @property def currency_symbols(self): """Mapping of currency codes to symbols. >>> Locale('en', 'US').currency_symbols['USD'] u'$' >>> Locale('es', 'CO').currency_symbols['USD'] u'US$' """ return self._data['currency_symbols'] @property def number_symbols(self): """Symbols used in number formatting. .. note:: The format of the value returned may change between Babel versions. >>> Locale('fr', 'FR').number_symbols['decimal'] u',' """ return self._data['number_symbols'] @property def decimal_formats(self): """Locale patterns for decimal number formatting. .. note:: The format of the value returned may change between Babel versions. >>> Locale('en', 'US').decimal_formats[None] <NumberPattern u'#,##0.###'> """ return self._data['decimal_formats'] @property def currency_formats(self): """Locale patterns for currency number formatting. .. note:: The format of the value returned may change between Babel versions. >>> Locale('en', 'US').currency_formats['standard'] <NumberPattern u'\\xa4#,##0.00'> >>> Locale('en', 'US').currency_formats['accounting'] <NumberPattern u'\\xa4#,##0.00;(\\xa4#,##0.00)'> """ return self._data['currency_formats'] @property def percent_formats(self): """Locale patterns for percent number formatting. .. note:: The format of the value returned may change between Babel versions. >>> Locale('en', 'US').percent_formats[None] <NumberPattern u'#,##0%'> """ return self._data['percent_formats'] @property def scientific_formats(self): """Locale patterns for scientific number formatting. .. note:: The format of the value returned may change between Babel versions. >>> Locale('en', 'US').scientific_formats[None] <NumberPattern u'#E0'> """ return self._data['scientific_formats'] # { Calendar Information and Date Formatting @property def periods(self): """Locale display names for day periods (AM/PM). >>> Locale('en', 'US').periods['am'] u'AM' """ try: return self._data['day_periods']['stand-alone']['wide'] except KeyError: return {} @property def day_periods(self): """Locale display names for various day periods (not necessarily only AM/PM). These are not meant to be used without the relevant `day_period_rules`. """ return self._data['day_periods'] @property def day_period_rules(self): """Day period rules for the locale. Used by `get_period_id`. """ return self._data.get('day_period_rules', {}) @property def days(self): """Locale display names for weekdays. >>> Locale('de', 'DE').days['format']['wide'][3] u'Donnerstag' """ return self._data['days'] @property def months(self): """Locale display names for months. >>> Locale('de', 'DE').months['format']['wide'][10] u'Oktober' """ return self._data['months'] @property def quarters(self): """Locale display names for quarters. >>> Locale('de', 'DE').quarters['format']['wide'][1] u'1. Quartal' """ return self._data['quarters'] @property def eras(self): """Locale display names for eras. .. note:: The format of the value returned may change between Babel versions. >>> Locale('en', 'US').eras['wide'][1] u'Anno Domini' >>> Locale('en', 'US').eras['abbreviated'][0] u'BC' """ return self._data['eras'] @property def time_zones(self): """Locale display names for time zones. .. note:: The format of the value returned may change between Babel versions. >>> Locale('en', 'US').time_zones['Europe/London']['long']['daylight'] u'British Summer Time' >>> Locale('en', 'US').time_zones['America/St_Johns']['city'] u'St. John\u2019s' """ return self._data['time_zones'] @property def meta_zones(self): """Locale display names for meta time zones. Meta time zones are basically groups of different Olson time zones that have the same GMT offset and daylight savings time. .. note:: The format of the value returned may change between Babel versions. >>> Locale('en', 'US').meta_zones['Europe_Central']['long']['daylight'] u'Central European Summer Time' .. versionadded:: 0.9 """ return self._data['meta_zones'] @property def zone_formats(self): """Patterns related to the formatting of time zones. .. note:: The format of the value returned may change between Babel versions. >>> Locale('en', 'US').zone_formats['fallback'] u'%(1)s (%(0)s)' >>> Locale('pt', 'BR').zone_formats['region'] u'Hor\\xe1rio %s' .. versionadded:: 0.9 """ return self._data['zone_formats'] @property def first_week_day(self): """The first day of a week, with 0 being Monday. >>> Locale('de', 'DE').first_week_day 0 >>> Locale('en', 'US').first_week_day 6 """ return self._data['week_data']['first_day'] @property def weekend_start(self): """The day the weekend starts, with 0 being Monday. >>> Locale('de', 'DE').weekend_start 5 """ return self._data['week_data']['weekend_start'] @property def weekend_end(self): """The day the weekend ends, with 0 being Monday. >>> Locale('de', 'DE').weekend_end 6 """ return self._data['week_data']['weekend_end'] @property def min_week_days(self): """The minimum number of days in a week so that the week is counted as the first week of a year or month. >>> Locale('de', 'DE').min_week_days 4 """ return self._data['week_data']['min_days'] @property def date_formats(self): """Locale patterns for date formatting. .. note:: The format of the value returned may change between Babel versions. >>> Locale('en', 'US').date_formats['short'] <DateTimePattern u'M/d/yy'> >>> Locale('fr', 'FR').date_formats['long'] <DateTimePattern u'd MMMM y'> """ return self._data['date_formats'] @property def time_formats(self): """Locale patterns for time formatting. .. note:: The format of the value returned may change between Babel versions. >>> Locale('en', 'US').time_formats['short'] <DateTimePattern u'h:mm a'> >>> Locale('fr', 'FR').time_formats['long'] <DateTimePattern u'HH:mm:ss z'> """ return self._data['time_formats'] @property def datetime_formats(self): """Locale patterns for datetime formatting. .. note:: The format of the value returned may change between Babel versions. >>> Locale('en').datetime_formats['full'] u"{1} 'at' {0}" >>> Locale('th').datetime_formats['medium'] u'{1} {0}' """ return self._data['datetime_formats'] @property def datetime_skeletons(self): """Locale patterns for formatting parts of a datetime. >>> Locale('en').datetime_skeletons['MEd'] <DateTimePattern u'E, M/d'> >>> Locale('fr').datetime_skeletons['MEd'] <DateTimePattern u'E dd/MM'> >>> Locale('fr').datetime_skeletons['H'] <DateTimePattern u"HH 'h'"> """ return self._data['datetime_skeletons'] @property def interval_formats(self): """Locale patterns for interval formatting. .. note:: The format of the value returned may change between Babel versions. How to format date intervals in Finnish when the day is the smallest changing component: >>> Locale('fi_FI').interval_formats['MEd']['d'] [u'E d. \u2013 ', u'E d.M.'] .. seealso:: The primary API to use this data is :py:func:`babel.dates.format_interval`. :rtype: dict[str, dict[str, list[str]]] """ return self._data['interval_formats'] @property def plural_form(self): """Plural rules for the locale. >>> Locale('en').plural_form(1) 'one' >>> Locale('en').plural_form(0) 'other' >>> Locale('fr').plural_form(0) 'one' >>> Locale('ru').plural_form(100) 'many' """ return self._data.get('plural_form', _default_plural_rule) @property def list_patterns(self): """Patterns for generating lists .. note:: The format of the value returned may change between Babel versions. >>> Locale('en').list_patterns['start'] u'{0}, {1}' >>> Locale('en').list_patterns['end'] u'{0}, and {1}' >>> Locale('en_GB').list_patterns['end'] u'{0} and {1}' """ return self._data['list_patterns'] @property def ordinal_form(self): """Plural rules for the locale. >>> Locale('en').ordinal_form(1) 'one' >>> Locale('en').ordinal_form(2) 'two' >>> Locale('en').ordinal_form(3) 'few' >>> Locale('fr').ordinal_form(2) 'other' >>> Locale('ru').ordinal_form(100) 'other' """ return self._data.get('ordinal_form', _default_plural_rule) @property def measurement_systems(self): """Localized names for various measurement systems. >>> Locale('fr', 'FR').measurement_systems['US'] u'am\\xe9ricain' >>> Locale('en', 'US').measurement_systems['US'] u'US' """ return self._data['measurement_systems'] @property def character_order(self): """The text direction for the language. >>> Locale('de', 'DE').character_order 'left-to-right' >>> Locale('ar', 'SA').character_order 'right-to-left' """ return self._data['character_order'] @property def text_direction(self): """The text direction for the language in CSS short-hand form. >>> Locale('de', 'DE').text_direction 'ltr' >>> Locale('ar', 'SA').text_direction 'rtl' """ return ''.join(word[0] for word in self.character_order.split('-')) @property def unit_display_names(self): """Display names for units of measurement. .. seealso:: You may want to use :py:func:`babel.units.get_unit_name` instead. .. note:: The format of the value returned may change between Babel versions. """ return self._data['unit_display_names'] def default_locale(category=None, aliases=LOCALE_ALIASES): """Returns the system default locale for a given category, based on environment variables. >>> for name in ['LANGUAGE', 'LC_ALL', 'LC_CTYPE']: ... os.environ[name] = '' >>> os.environ['LANG'] = 'fr_FR.UTF-8' >>> default_locale('LC_MESSAGES') 'fr_FR' The "C" or "POSIX" pseudo-locales are treated as aliases for the "en_US_POSIX" locale: >>> os.environ['LC_MESSAGES'] = 'POSIX' >>> default_locale('LC_MESSAGES') 'en_US_POSIX' The following fallbacks to the variable are always considered: - ``LANGUAGE`` - ``LC_ALL`` - ``LC_CTYPE`` - ``LANG`` :param category: one of the ``LC_XXX`` environment variable names :param aliases: a dictionary of aliases for locale identifiers """ varnames = (category, 'LANGUAGE', 'LC_ALL', 'LC_CTYPE', 'LANG') for name in filter(None, varnames): locale = os.getenv(name) if locale: if name == 'LANGUAGE' and ':' in locale: # the LANGUAGE variable may contain a colon-separated list of # language codes; we just pick the language on the list locale = locale.split(':')[0] if locale.split('.')[0] in ('C', 'POSIX'): locale = 'en_US_POSIX' elif aliases and locale in aliases: locale = aliases[locale] try: return get_locale_identifier(parse_locale(locale)) except ValueError: pass def negotiate_locale(preferred, available, sep='_', aliases=LOCALE_ALIASES): """Find the best match between available and requested locale strings. >>> negotiate_locale(['de_DE', 'en_US'], ['de_DE', 'de_AT']) 'de_DE' >>> negotiate_locale(['de_DE', 'en_US'], ['en', 'de']) 'de' Case is ignored by the algorithm, the result uses the case of the preferred locale identifier: >>> negotiate_locale(['de_DE', 'en_US'], ['de_de', 'de_at']) 'de_DE' >>> negotiate_locale(['de_DE', 'en_US'], ['de_de', 'de_at']) 'de_DE' By default, some web browsers unfortunately do not include the territory in the locale identifier for many locales, and some don't even allow the user to easily add the territory. So while you may prefer using qualified locale identifiers in your web-application, they would not normally match the language-only locale sent by such browsers. To workaround that, this function uses a default mapping of commonly used langauge-only locale identifiers to identifiers including the territory: >>> negotiate_locale(['ja', 'en_US'], ['ja_JP', 'en_US']) 'ja_JP' Some browsers even use an incorrect or outdated language code, such as "no" for Norwegian, where the correct locale identifier would actually be "nb_NO" (Bokmål) or "nn_NO" (Nynorsk). The aliases are intended to take care of such cases, too: >>> negotiate_locale(['no', 'sv'], ['nb_NO', 'sv_SE']) 'nb_NO' You can override this default mapping by passing a different `aliases` dictionary to this function, or you can bypass the behavior althogher by setting the `aliases` parameter to `None`. :param preferred: the list of locale strings preferred by the user :param available: the list of locale strings available :param sep: character that separates the different parts of the locale strings :param aliases: a dictionary of aliases for locale identifiers """ available = [a.lower() for a in available if a] for locale in preferred: ll = locale.lower() if ll in available: return locale if aliases: alias = aliases.get(ll) if alias: alias = alias.replace('_', sep) if alias.lower() in available: return alias parts = locale.split(sep) if len(parts) > 1 and parts[0].lower() in available: return parts[0] return None def parse_locale(identifier, sep='_'): """Parse a locale identifier into a tuple of the form ``(language, territory, script, variant)``. >>> parse_locale('zh_CN') ('zh', 'CN', None, None) >>> parse_locale('zh_Hans_CN') ('zh', 'CN', 'Hans', None) The default component separator is "_", but a different separator can be specified using the `sep` parameter: >>> parse_locale('zh-CN', sep='-') ('zh', 'CN', None, None) If the identifier cannot be parsed into a locale, a `ValueError` exception is raised: >>> parse_locale('not_a_LOCALE_String') Traceback (most recent call last): ... ValueError: 'not_a_LOCALE_String' is not a valid locale identifier Encoding information and locale modifiers are removed from the identifier: >>> parse_locale('it_IT@euro') ('it', 'IT', None, None) >>> parse_locale('en_US.UTF-8') ('en', 'US', None, None) >>> parse_locale('de_DE.iso885915@euro') ('de', 'DE', None, None) See :rfc:`4646` for more information. :param identifier: the locale identifier string :param sep: character that separates the different components of the locale identifier :raise `ValueError`: if the string does not appear to be a valid locale identifier """ if '.' in identifier: # this is probably the charset/encoding, which we don't care about identifier = identifier.split('.', 1)[0] if '@' in identifier: # this is a locale modifier such as @euro, which we don't care about # either identifier = identifier.split('@', 1)[0] parts = identifier.split(sep) lang = parts.pop(0).lower() if not lang.isalpha(): raise ValueError('expected only letters, got %r' % lang) script = territory = variant = None if parts: if len(parts[0]) == 4 and parts[0].isalpha(): script = parts.pop(0).title() if parts: if len(parts[0]) == 2 and parts[0].isalpha(): territory = parts.pop(0).upper() elif len(parts[0]) == 3 and parts[0].isdigit(): territory = parts.pop(0) if parts: if len(parts[0]) == 4 and parts[0][0].isdigit() or \ len(parts[0]) >= 5 and parts[0][0].isalpha(): variant = parts.pop() if parts: raise ValueError('%r is not a valid locale identifier' % identifier) return lang, territory, script, variant def get_locale_identifier(tup, sep='_'): """The reverse of :func:`parse_locale`. It creates a locale identifier out of a ``(language, territory, script, variant)`` tuple. Items can be set to ``None`` and trailing ``None``\s can also be left out of the tuple. >>> get_locale_identifier(('de', 'DE', None, '1999')) 'de_DE_1999' .. versionadded:: 1.0 :param tup: the tuple as returned by :func:`parse_locale`. :param sep: the separator for the identifier. """ tup = tuple(tup[:4]) lang, territory, script, variant = tup + (None,) * (4 - len(tup)) return sep.join(filter(None, (lang, script, territory, variant)))
import Component from '@ember/component'; import { inject as service } from '@ember/service'; export default Component.extend({ blog: service(), tagName: 'ul', classNames: ['nav'] });
// This is a placeholder until we can test forwardRef with Enzyme. test('[Placeholder test for createHoC]', () => { expect(true).toEqual(true); });
const {DateTime} = require('luxon'), EventEmitter = require('events'); module.exports = class Clock extends EventEmitter { constructor (precision) { super(); this.interval = null; this.precision = precision; } stop () { clearInterval(this.interval); this.interval = null; return this.interval; } start () { this.interval = setInterval(() => { const now = DateTime.local().startOf('second'), timestamp = now.toMillis(); this.emit(timestamp); }, this.precision); return this.interval; } };
import os import pickle import uuid from dagster import ( AssetMaterialization, ExpectationResult, Failure, Materialization, ModeDefinition, PipelineDefinition, SolidDefinition, TypeCheck, ) from dagster import _check as check from dagster.core.definitions.dependency import NodeHandle from dagster.core.definitions.events import RetryRequested from dagster.core.definitions.pipeline_base import InMemoryPipeline from dagster.core.definitions.reconstruct import ReconstructablePipeline from dagster.core.definitions.resource_definition import ScopedResourcesBuilder from dagster.core.events import DagsterEvent from dagster.core.execution.api import scoped_pipeline_context from dagster.core.execution.plan.outputs import StepOutputHandle from dagster.core.execution.plan.plan import ExecutionPlan from dagster.core.execution.resources_init import ( get_required_resource_keys_to_init, resource_initialization_event_generator, ) from dagster.core.instance import DagsterInstance from dagster.core.storage.pipeline_run import DagsterRun, PipelineRunStatus from dagster.core.system_config.objects import ResolvedRunConfig from dagster.core.utils import make_new_run_id from dagster.loggers import colored_console_logger from dagster.serdes import unpack_value from dagster.utils import EventGenerationManager, ensure_gen from .context import DagstermillExecutionContext, DagstermillRuntimeExecutionContext from .errors import DagstermillError from .serialize import PICKLE_PROTOCOL class DagstermillResourceEventGenerationManager(EventGenerationManager): """Utility class to explicitly manage setup/teardown of resource events. Overrides the default `generate_teardown_events` method so that teardown is deferred until explicitly called by the dagstermill Manager """ def generate_teardown_events(self): return iter(()) def teardown(self): return [ teardown_event for teardown_event in super( DagstermillResourceEventGenerationManager, self ).generate_teardown_events() ] class Manager: def __init__(self): self.pipeline = None self.solid_def = None self.in_pipeline = False self.marshal_dir = None self.context = None self.resource_manager = None def _setup_resources( self, resource_defs, resource_configs, log_manager, execution_plan, pipeline_run, resource_keys_to_init, instance, emit_persistent_events, pipeline_def_for_backwards_compat, ): """ Drop-in replacement for `dagster.core.execution.resources_init.resource_initialization_manager`. It uses a `DagstermillResourceEventGenerationManager` and explicitly calls `teardown` on it """ generator = resource_initialization_event_generator( resource_defs=resource_defs, resource_configs=resource_configs, log_manager=log_manager, execution_plan=execution_plan, pipeline_run=pipeline_run, resource_keys_to_init=resource_keys_to_init, instance=instance, emit_persistent_events=emit_persistent_events, pipeline_def_for_backwards_compat=pipeline_def_for_backwards_compat, ) self.resource_manager = DagstermillResourceEventGenerationManager( generator, ScopedResourcesBuilder ) return self.resource_manager def reconstitute_pipeline_context( self, output_log_path=None, marshal_dir=None, run_config=None, executable_dict=None, pipeline_run_dict=None, solid_handle_kwargs=None, instance_ref_dict=None, step_key=None, ): """Reconstitutes a context for dagstermill-managed execution. You'll see this function called to reconstruct a pipeline context within the ``injected parameters`` cell of a dagstermill output notebook. Users should not call this function interactively except when debugging output notebooks. Use :func:`dagstermill.get_context` in the ``parameters`` cell of your notebook to define a context for interactive exploration and development. This call will be replaced by one to :func:`dagstermill.reconstitute_pipeline_context` when the notebook is executed by dagstermill. """ check.opt_str_param(output_log_path, "output_log_path") check.opt_str_param(marshal_dir, "marshal_dir") run_config = check.opt_dict_param(run_config, "run_config", key_type=str) check.dict_param(pipeline_run_dict, "pipeline_run_dict") check.dict_param(executable_dict, "executable_dict") check.dict_param(solid_handle_kwargs, "solid_handle_kwargs") check.dict_param(instance_ref_dict, "instance_ref_dict") check.str_param(step_key, "step_key") pipeline = ReconstructablePipeline.from_dict(executable_dict) pipeline_def = pipeline.get_definition() try: instance_ref = unpack_value(instance_ref_dict) instance = DagsterInstance.from_ref(instance_ref) except Exception as err: raise DagstermillError( "Error when attempting to resolve DagsterInstance from serialized InstanceRef" ) from err pipeline_run = unpack_value(pipeline_run_dict) solid_handle = NodeHandle.from_dict(solid_handle_kwargs) solid = pipeline_def.get_solid(solid_handle) solid_def = solid.definition self.marshal_dir = marshal_dir self.in_pipeline = True self.solid_def = solid_def self.pipeline = pipeline resolved_run_config = ResolvedRunConfig.build( pipeline_def, run_config, mode=pipeline_run.mode ) execution_plan = ExecutionPlan.build( self.pipeline, resolved_run_config, step_keys_to_execute=pipeline_run.step_keys_to_execute, ) with scoped_pipeline_context( execution_plan, pipeline, run_config, pipeline_run, instance, scoped_resources_builder_cm=self._setup_resources, # Set this flag even though we're not in test for clearer error reporting raise_on_error=True, ) as pipeline_context: self.context = DagstermillRuntimeExecutionContext( pipeline_context=pipeline_context, pipeline_def=pipeline_def, solid_config=run_config.get("solids", {}).get(solid.name, {}).get("config"), resource_keys_to_init=get_required_resource_keys_to_init( execution_plan, pipeline_def, resolved_run_config, ), solid_name=solid.name, solid_handle=solid_handle, step_context=pipeline_context.for_step(execution_plan.get_step_by_key(step_key)), ) return self.context def get_context(self, solid_config=None, mode_def=None, run_config=None): """Get a dagstermill execution context for interactive exploration and development. Args: solid_config (Optional[Any]): If specified, this value will be made available on the context as its ``solid_config`` property. mode_def (Optional[:class:`dagster.ModeDefinition`]): If specified, defines the mode to use to construct the context. Specify this if you would like a context constructed with specific ``resource_defs`` or ``logger_defs``. By default, an ephemeral mode with a console logger will be constructed. run_config(Optional[dict]): The config dict with which to construct the context. Returns: :py:class:`~dagstermill.DagstermillExecutionContext` """ check.opt_inst_param(mode_def, "mode_def", ModeDefinition) run_config = check.opt_dict_param(run_config, "run_config", key_type=str) # If we are running non-interactively, and there is already a context reconstituted, return # that context rather than overwriting it. if self.context is not None and isinstance( self.context, DagstermillRuntimeExecutionContext ): return self.context if not mode_def: mode_def = ModeDefinition(logger_defs={"dagstermill": colored_console_logger}) run_config["loggers"] = {"dagstermill": {}} solid_def = SolidDefinition( name="this_solid", input_defs=[], compute_fn=lambda *args, **kwargs: None, output_defs=[], description="Ephemeral solid constructed by dagstermill.get_context()", required_resource_keys=mode_def.resource_key_set, ) pipeline_def = PipelineDefinition( [solid_def], mode_defs=[mode_def], name="ephemeral_dagstermill_pipeline" ) run_id = make_new_run_id() # construct stubbed PipelineRun for notebook exploration... # The actual pipeline run during pipeline execution will be serialized and reconstituted # in the `reconstitute_pipeline_context` call pipeline_run = DagsterRun( pipeline_name=pipeline_def.name, run_id=run_id, run_config=run_config, mode=mode_def.name, step_keys_to_execute=None, status=PipelineRunStatus.NOT_STARTED, tags=None, ) self.in_pipeline = False self.solid_def = solid_def self.pipeline = pipeline_def resolved_run_config = ResolvedRunConfig.build(pipeline_def, run_config, mode=mode_def.name) pipeline = InMemoryPipeline(pipeline_def) execution_plan = ExecutionPlan.build(pipeline, resolved_run_config) with scoped_pipeline_context( execution_plan, pipeline, run_config, pipeline_run, DagsterInstance.ephemeral(), scoped_resources_builder_cm=self._setup_resources, ) as pipeline_context: self.context = DagstermillExecutionContext( pipeline_context=pipeline_context, pipeline_def=pipeline_def, solid_config=solid_config, resource_keys_to_init=get_required_resource_keys_to_init( execution_plan, pipeline_def, resolved_run_config, ), solid_name=solid_def.name, solid_handle=NodeHandle(solid_def.name, parent=None), ) return self.context def yield_result(self, value, output_name="result"): """Yield a result directly from notebook code. When called interactively or in development, returns its input. Args: value (Any): The value to yield. output_name (Optional[str]): The name of the result to yield (default: ``'result'``). """ if not self.in_pipeline: return value # deferred import for perf import scrapbook if not self.solid_def.has_output(output_name): raise DagstermillError( f"Solid {self.solid_def.name} does not have output named {output_name}." f"Expected one of {[str(output_def.name) for output_def in self.solid_def.output_defs]}" ) # pass output value cross process boundary using io manager step_context = self.context._step_context # pylint: disable=protected-access # Note: yield_result currently does not support DynamicOutput step_output_handle = StepOutputHandle( step_key=step_context.step.key, output_name=output_name ) output_context = step_context.get_output_context(step_output_handle) io_manager = step_context.get_io_manager(step_output_handle) # Note that we assume io manager is symmetric, i.e handle_input(handle_output(X)) == X io_manager.handle_output(output_context, value) # record that the output has been yielded scrapbook.glue(output_name, "") def yield_event(self, dagster_event): """Yield a dagster event directly from notebook code. When called interactively or in development, returns its input. Args: dagster_event (Union[:class:`dagster.AssetMaterialization`, :class:`dagster.ExpectationResult`, :class:`dagster.TypeCheck`, :class:`dagster.Failure`, :class:`dagster.RetryRequested`]): An event to yield back to Dagster. """ valid_types = ( Materialization, AssetMaterialization, ExpectationResult, TypeCheck, Failure, RetryRequested, ) if not isinstance(dagster_event, valid_types): raise DagstermillError( f"Received invalid type {dagster_event} in yield_event. Expected a Dagster event type, one of {valid_types}." ) if not self.in_pipeline: return dagster_event # deferred import for perf import scrapbook event_id = "event-{event_uuid}".format(event_uuid=str(uuid.uuid4())) out_file_path = os.path.join(self.marshal_dir, event_id) with open(out_file_path, "wb") as fd: fd.write(pickle.dumps(dagster_event, PICKLE_PROTOCOL)) scrapbook.glue(event_id, out_file_path) def teardown_resources(self): if self.resource_manager is not None: self.resource_manager.teardown() def load_input_parameter(self, input_name: str): # load input from source step_context = self.context._step_context # pylint: disable=protected-access step_input = step_context.step.step_input_named(input_name) input_def = step_context.solid_def.input_def_named(input_name) for event_or_input_value in ensure_gen( step_input.source.load_input_object(step_context, input_def) ): if isinstance(event_or_input_value, DagsterEvent): continue else: return event_or_input_value MANAGER_FOR_NOTEBOOK_INSTANCE = Manager()
from functools import partial from urllib.parse import urlencode from geopy.geocoders.base import DEFAULT_SENTINEL, Geocoder from geopy.location import Location from geopy.util import logger __all__ = ("BANFrance", ) class BANFrance(Geocoder): """Geocoder using the Base Adresse Nationale France API. Documentation at: https://adresse.data.gouv.fr/api """ geocode_path = '/search' reverse_path = '/reverse' def __init__( self, *, domain='api-adresse.data.gouv.fr', scheme=None, timeout=DEFAULT_SENTINEL, proxies=DEFAULT_SENTINEL, user_agent=None, ssl_context=DEFAULT_SENTINEL, adapter_factory=None ): """ :param str domain: Currently it is ``'api-adresse.data.gouv.fr'``, can be changed for testing purposes. :param str scheme: See :attr:`geopy.geocoders.options.default_scheme`. :param int timeout: See :attr:`geopy.geocoders.options.default_timeout`. :param dict proxies: See :attr:`geopy.geocoders.options.default_proxies`. :param str user_agent: See :attr:`geopy.geocoders.options.default_user_agent`. :type ssl_context: :class:`ssl.SSLContext` :param ssl_context: See :attr:`geopy.geocoders.options.default_ssl_context`. :param callable adapter_factory: See :attr:`geopy.geocoders.options.default_adapter_factory`. .. versionadded:: 2.0 """ super().__init__( scheme=scheme, timeout=timeout, proxies=proxies, user_agent=user_agent, ssl_context=ssl_context, adapter_factory=adapter_factory, ) self.domain = domain.strip('/') self.geocode_api = ( '%s://%s%s' % (self.scheme, self.domain, self.geocode_path) ) self.reverse_api = ( '%s://%s%s' % (self.scheme, self.domain, self.reverse_path) ) def geocode( self, query, *, limit=None, exactly_one=True, timeout=DEFAULT_SENTINEL ): """ Return a location point by address. :param str query: The address or query you wish to geocode. :param int limit: Defines the maximum number of items in the response structure. If not provided and there are multiple results the BAN API will return 5 results by default. This will be reset to one if ``exactly_one`` is True. :param int timeout: Time, in seconds, to wait for the geocoding service to respond before raising a :class:`geopy.exc.GeocoderTimedOut` exception. Set this only if you wish to override, on this call only, the value set during the geocoder's initialization. :param bool exactly_one: Return one result or a list of results, if available. :rtype: ``None``, :class:`geopy.location.Location` or a list of them, if ``exactly_one=False``. """ params = { 'q': query, } if limit is not None: params['limit'] = limit url = "?".join((self.geocode_api, urlencode(params))) logger.debug("%s.geocode: %s", self.__class__.__name__, url) callback = partial(self._parse_json, exactly_one=exactly_one) return self._call_geocoder(url, callback, timeout=timeout) def reverse( self, query, *, exactly_one=True, timeout=DEFAULT_SENTINEL ): """ Return an address by location point. :param query: The coordinates for which you wish to obtain the closest human-readable addresses. :type query: :class:`geopy.point.Point`, list or tuple of ``(latitude, longitude)``, or string as ``"%(latitude)s, %(longitude)s"``. :param bool exactly_one: Return one result or a list of results, if available. :param int timeout: Time, in seconds, to wait for the geocoding service to respond before raising a :class:`geopy.exc.GeocoderTimedOut` exception. Set this only if you wish to override, on this call only, the value set during the geocoder's initialization. :rtype: ``None``, :class:`geopy.location.Location` or a list of them, if ``exactly_one=False``. """ try: lat, lng = self._coerce_point_to_string(query).split(',') except ValueError: raise ValueError("Must be a coordinate pair or Point") params = { 'lat': lat, 'lng': lng, } url = "?".join((self.reverse_api, urlencode(params))) logger.debug("%s.reverse: %s", self.__class__.__name__, url) callback = partial(self._parse_json, exactly_one=exactly_one) return self._call_geocoder(url, callback, timeout=timeout) def _parse_feature(self, feature): # Parse each resource. latitude = feature.get('geometry', {}).get('coordinates', [])[1] longitude = feature.get('geometry', {}).get('coordinates', [])[0] placename = feature.get('properties', {}).get('label') return Location(placename, (latitude, longitude), feature) def _parse_json(self, response, exactly_one): if response is None or 'features' not in response: return None features = response['features'] if not len(features): return None if exactly_one: return self._parse_feature(features[0]) else: return [self._parse_feature(feature) for feature in features]
# -*- coding: utf-8 -*- """ /dms/exercisefile/utils.py .. enthaelt Hilfefunktionen fuer Aufgabendateien Django content Management System Hans Rauch hans.rauch@gmx.net Die Programme des dms-Systems koennen frei genutzt und den spezifischen Beduerfnissen entsprechend angepasst werden. 0.01 07.05.2008 Beginn der Arbeit """ import string, os, time from django.utils.translation import ugettext as _ from dms.queries import get_site_url from dms.settings import DOWNLOAD_PATH, DOWNLOAD_URL from dms.settings import DOWNLOAD_PROTECTED_PATH from dms_ext.extension import * # dms-Funktionen ueberschreiben # ----------------------------------------------------- def get_actions(request, user_perms, item_container): from django.template.loader import get_template from django.template import Context t = get_template('app/file/manage_options.html') nPos = max ( string.rfind ( request.path, '/add/' ), string.rfind ( request.path, '/edit/' ), ) if nPos > -1 : path = request.path[:nPos] show_mode = True else : path = request.path show_mode = False if ( string.find(request.path, '/add/') >= 0 ) : edit_mode = False elif ( string.find(request.path, '/edit/') >= 0 ) : edit_mode = False else : edit_mode = request.user.is_authenticated() c = Context ( { 'authenticated' : request.user.is_authenticated(), 'show_mode' : show_mode, 'edit_mode' : edit_mode, 'user_perms' : user_perms, 'user_name' : request.user, 'path' : get_site_url(item_container, item_container.item.name), } ) return t.render ( c) # ----------------------------------------------------- def get_folder_name(item_container, is_protected): """ liefert den Pfad der Datei """ if is_protected: return DOWNLOAD_PROTECTED_PATH + item_container.container.path else: return DOWNLOAD_PATH + item_container.container.path # ----------------------------------------------------- def get_file_name(item_container, is_protected): """ liefert den Pfad der Datei """ if is_protected: return DOWNLOAD_PROTECTED_PATH + item_container.container.path + item_container.item.name else: return DOWNLOAD_PATH + item_container.container.path + item_container.item.name # ----------------------------------------------------- def get_file_size(item_container, is_protected=False): """ liefert die Dateigroesse """ filename = get_file_name(item_container, is_protected) try: st = os.stat(filename) return st[6] except: return -1 # ----------------------------------------------------- def get_file_modification_date(item_container, format='german', is_protected=False): """ liefert das Datum der letzten Aenderung """ filename = get_file_name(item_container, is_protected) try: st = os.stat(filename) if format == '': return st[8] elif format == 'german': return time.strftime('%d.%m.%Y %H:%M',time.localtime(st[8])) return time.strftime('%Y-%m-%d %H:%M',time.localtime(st[8])) except: return '0.0.000' # ----------------------------------------------------- def get_file_url(item_container, is_protected=False): """ liefert die entsprechende URL """ filename = get_file_name(item_container, is_protected) if is_protected: return get_site_url(item_container, item_container.item.name) + '/download/' else: return DOWNLOAD_URL + item_container.container.path + item_container.item.name # ----------------------------------------------------- def get_file_path(item_container): """ """ if item_container.container.is_protected(): file_path = DOWNLOAD_PROTECTED_PATH else: file_path = DOWNLOAD_PATH if item_container.item.app.is_folderish: return file_path + item_container.container.path else: return file_path + item_container.container.path + item_container.item.name # ----------------------------------------------------- def save_file(name, files, item_container): """ Datei speichern """ content = files['fname']['content'] content_type = files['fname']['content-type'] filename = files['fname']['filename'] if item_container.container.is_protected(): file_path = DOWNLOAD_PROTECTED_PATH else: file_path = DOWNLOAD_PATH file_path += item_container.container.path try: os.makedirs(file_path) except: pass os.chmod(file_path, 0750) file_name = file_path + name f = open(file_name, 'wb') f.write(content) f.close() os.chmod(file_name, 0660) return file_name
# -*- coding: utf-8 -*- # Copyright 2020 Green Valley Belgium NV # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # @@license_version:1.7@@ import base64 from collections import defaultdict, OrderedDict from functools import wraps import hashlib import logging import os import threading import time import types from google.appengine.api import memcache as mod_memcache from google.appengine.ext import db, ndb, deferred from mcfw.consts import MISSING from mcfw.serialization import serializer, s_bool, get_serializer, s_any, deserializer, get_deserializer, ds_bool, \ ds_any, SerializedObjectOutOfDateException, get_list_serializer, List from mcfw.utils import get_readable_key try: from cStringIO import StringIO except ImportError: from StringIO import StringIO CACHE_ATTR = u'cache_key' CACHE_LOGGING = os.environ.get('SERVER_SOFTWARE', 'Development').startswith('Development') class CachedModelMixIn(object): on_trans_committed = None def invalidateCache(self): raise NotImplementedError() def updateCache(self): pass def _trigger_invalidate_cache(self): def invalidate_cache(): self.invalidateCache() logging.info("%s: Cache invalidated", self.__class__.__name__) if db.is_in_transaction() and self.on_trans_committed: self.updateCache() self.on_trans_committed(invalidate_cache) else: invalidate_cache() def put(self): super(CachedModelMixIn, self).put() self._trigger_invalidate_cache() def delete(self): if isinstance(self, ndb.Model): super(CachedModelMixIn, self).key.delete() else: super(CachedModelMixIn, self).delete() self._trigger_invalidate_cache() class _TLocal(threading.local): def __init__(self): self.request_cache = dict() _tlocal = _TLocal() del _TLocal def get_from_request_cache(key): return _tlocal.request_cache.get(key, MISSING) def add_to_request_cache(key, success, value): _tlocal.request_cache[key] = (success, value) def remove_from_request_cache(key): _tlocal.request_cache.pop(key, None) def flush_request_cache(): _tlocal.request_cache.clear() def set_cache_key(wrapped, f): key = lambda: f.meta[CACHE_ATTR] if hasattr(f, 'meta') and CACHE_ATTR in f.meta else '%s.%s' % ( f.__name__, f.__module__) if not hasattr(wrapped, 'meta'): wrapped.meta = {CACHE_ATTR: key()} return if CACHE_ATTR not in wrapped.meta: wrapped.meta[CACHE_ATTR] = key() def ds_key(version, cache_key): return "%s-%s" % (version, hashlib.sha256(cache_key).hexdigest()) class DSCache(db.Model): creation_timestamp = db.IntegerProperty() description = db.StringProperty(indexed=False) value = db.BlobProperty() def invalidate_cache(f, *args, **kwargs): f.invalidate_cache(*args, **kwargs) cache_key_locks = defaultdict(lambda: threading.RLock()) def cached(version, lifetime=600, request=True, memcache=True, key=None, datastore=None, read_cache_in_transaction=False): """ Caches the result of the decorated function and returns the cached version if it exists. @param version: Cache version, needs to bumped everytime the semantics @type version: integer @param lifetime: Number of seconds the cached entry remains in memcache after it was created. @type lifetime: int @param request: Whether it needs to be cached in memory for the current request processing. @type request: bool @param memcache: Whether it needs to be cached in memcache. @type memcache: bool @param key: Function to create cache_key @param key: function @param datastore: Content description of cache object in datastore. Leave none to ommit the datastore cache. @param datastore: str @param read_cache_in_transaction: bool Whether or not to read from the cache when the function is executed inside a transaction. @raise ValueError: if neither request nor memcache are True """ if not request and not memcache and not datastore: raise ValueError("Either request or memcache or datastore needs to be True") if datastore and lifetime != 0: raise ValueError("If datastore caching is used, values other than 0 for lifetime are not permitted.") def wrap(f): base_cache_key = f.meta[CACHE_ATTR] if base_cache_key == 'inner_wrapper.google.appengine.api.datastore': raise ValueError('Move @db.non_transactional inside the @cached method') f_args = f.meta["fargs"] f_ret = f.meta["return_type"] f_pure_default_args_dict = f.meta["pure_default_args_dict"] if isinstance(f_ret, list): f_ret = List(f_ret[0]) if memcache or datastore: result_serializer = get_serializer(f_ret) result_deserializer = get_deserializer(f_ret) key_function = key if not key_function: def key_(kwargs): stream = StringIO() stream.write(base_cache_key) kwargt = f.meta["kwarg_types"] for a in sorted(kwargt.keys()): if a in kwargs: effective_value = kwargs[a] else: effective_value = f_pure_default_args_dict[a] if isinstance(kwargt[a], list): get_list_serializer(get_serializer(kwargt[a][0]))(stream, effective_value) else: get_serializer(kwargt[a])(stream, effective_value) return stream.getvalue() key_function = key_ @serializer def serialize_result(stream, obj): s_bool(stream, obj[0]) if obj[0]: result_serializer(stream, obj[1]) else: s_any(stream, obj[1]) f.serializer = serialize_result @deserializer def deserialize_result(stream): success = ds_bool(stream) if success: result = result_deserializer(stream) else: result = ds_any(stream) return success, result f.deserializer = deserialize_result def cache_key(*args, **kwargs): kwargs_ = dict(kwargs) kwargs_.update(dict(((f_args[0][i], args[i]) for i in xrange(len(args))))) return "v%s.%s" % (version, base64.b64encode(key_function(kwargs_))) f.cache_key = cache_key def invalidate_cache(*args, **kwargs): ck = cache_key(*args, **kwargs) with cache_key_locks[ck]: if datastore: @db.non_transactional def clear_dscache(): db.delete(db.Key.from_path(DSCache.kind(), ds_key(version, ck))) clear_dscache() if memcache: attempt = 1 while not mod_memcache.delete(ck): # @UndefinedVariable if attempt >= 3: logging.critical("MEMCACHE FAILURE !!! COULD NOT INVALIDATE CACHE !!!") raise RuntimeError("Could not invalidate memcache!") logging.debug("Memcache failure. Retrying to invalidate cache.") time.sleep(0.25 * attempt) attempt += 1 if request and ck in _tlocal.request_cache: del _tlocal.request_cache[ck] def update_cache(*args, **kwargs): # update request cache only if not request: return if '_data' not in kwargs: raise ValueError('update_cache() takes a mandatory _data argument') data = kwargs.pop('_data') ck = cache_key(*args, **kwargs) with cache_key_locks[ck]: _tlocal.request_cache[ck] = (True, data) f.invalidate_cache = invalidate_cache f.update_cache = update_cache @wraps(f) def wrapped(*args, **kwargs): ck = cache_key(*args, **kwargs) log_ck = ck if len(ck) < 100 else '%s...(length=%d)' % (ck[:100], len(ck)) if not read_cache_in_transaction and db.is_in_transaction(): _log('Ignoring cache: %s, key %s', f.__name__, log_ck) return f(*args, **kwargs) ck = cache_key(*args, **kwargs) with cache_key_locks[ck]: if request and ck in _tlocal.request_cache: success, result = _tlocal.request_cache[ck] if success: _log('Hit(request): %s', f.__name__) return result if memcache: memcache_result = mod_memcache.get(ck) # @UndefinedVariable if memcache_result: buf = StringIO(memcache_result) try: success, result = deserialize_result(buf) if request: _tlocal.request_cache[ck] = (success, result) if success: _log('Hit(memcache): %s', f.__name__) return result except SerializedObjectOutOfDateException: pass if datastore: @db.non_transactional def get_from_dscache(): dscache = DSCache.get_by_key_name(ds_key(version, ck)) if dscache: buf = StringIO(str(dscache.value)) try: success, result = deserialize_result(buf) if request: _tlocal.request_cache[ck] = (success, result) if memcache: mod_memcache.set(ck, dscache.value, time=lifetime) # @UndefinedVariable if success: _log('Hit(ds): %s', f.__name__) return True, result except SerializedObjectOutOfDateException: pass return False, None cached, result = get_from_dscache() if cached: return result cache_value = None try: result = f(*args, **kwargs) if isinstance(result, types.GeneratorType): result = list(result) cache_value = (True, result) return result except Exception as e: cache_value = (False, e) raise finally: if cache_value and cache_value[0]: # Only store in request cache in case we're inside a transaction to avoid stale results if not db.is_in_transaction(): if datastore or memcache: buf = StringIO() serialize_result(buf, cache_value) serialized_cache_value = buf.getvalue() if datastore: _log('Saving(ds): %s, key %s', f.__name__, log_ck) @db.non_transactional def update_dscache(): dsm = DSCache(key_name=ds_key(version, ck)) dsm.description = datastore dsm.creation_timestamp = int(time.time()) dsm.value = db.Blob(serialized_cache_value) dsm.put() update_dscache() if memcache: _log('Saving(memcache): %s, key %s', f.__name__, log_ck) mod_memcache.set(ck, serialized_cache_value, time=lifetime) # @UndefinedVariable if request: _log('Saving(request): %s, key %s', f.__name__, log_ck) _tlocal.request_cache[ck] = cache_value return wrapped return wrap def _log(msg, *args): if CACHE_LOGGING: logging.debug('[Cache] %s', msg % args) def get_cached_model(model_key, cached_time=86400): # type: (db.Key, int) -> db.Model return get_cached_models([model_key], cached_time)[0] def get_cached_models(model_keys, cached_time=86400): """ Get models by their key from request cache, memcache or datastore. If the models weren't in either cache, they're cached in the request cache and memcache. The cache must be manually invalidated when the model is updated or deleted by calling invalidate_model_cache """ # type: (list[db.Key], int) -> list[db.Model] results = {} # type: dict[str, db.Model] cache_keys = OrderedDict() # type: dict[str, db.Key] for key in model_keys: cache_keys[get_readable_key(key)] = key # First try request cache for cache_key, model_key in cache_keys.iteritems(): result = get_from_request_cache(cache_key) if result is not MISSING: _log('Hit(request): %s', cache_key) results[cache_key] = result[1] # Try memcache & save results to request cache if len(results) != len(cache_keys): memcache_keys = [key for key in cache_keys if key not in results] memcache_result = mod_memcache.get_multi(memcache_keys) # type: dict for cache_key, value in memcache_result.iteritems(): model = ds_any(StringIO(value)) results[cache_key] = model add_to_request_cache(cache_key, True, model) _log('Hit(memcache): %s', cache_key) # Not found in either request cache or memcache, fallback to datastore & save results to memcache and request cache if len(results) != len(cache_keys): ds_keys = [key for key in cache_keys if key not in results] ds_models = db.get([cache_keys[key] for key in ds_keys]) for cache_key, model in zip(ds_keys, ds_models): if not model: continue add_to_request_cache(cache_key, True, model) results[cache_key] = model _log('Saving(all): %s', cache_key) mod_memcache.set_multi({key: _serialize_model(model) for key, model in zip(ds_keys, ds_models)}, cached_time) return [results.get(k) for k in cache_keys] # Keep original order def invalidate_model_cache(models_or_keys): if type(models_or_keys) is not list: models_or_keys = [models_or_keys] cache_keys = [get_readable_key(model.key() if isinstance(model, db.Model) else model) for model in models_or_keys] for cache_key in cache_keys: remove_from_request_cache(cache_key) if not mod_memcache.delete_multi(cache_keys): deferred.defer(mod_memcache.delete_multi, cache_keys) def _serialize_model(model): stream = StringIO() s_any(stream, model) return stream.getvalue()
/*! @file : sdk_hal_uart0.c * @author Luis José Castrillo Fernández * @version 1.0.0 * @date 30 ene. 2021 * @brief Driver para * @details * */ /******************************************************************************* * Includes ******************************************************************************/ #include "sdk_hal_uart0.h" #include "fsl_lpsci.h" /******************************************************************************* * Definitions ******************************************************************************/ /*! @brief Define nombre interno de funcion para gestionar interrupción por dato UART */ #define UART0_IRQ_FUNCTION UART0_IRQHandler /*! @brief Define nombre interno de funcion para gestionar interrupción por dato UART */ #define UART_IRQ_INDEX UART0_IRQn /******************************************************************************* * Private Prototypes ******************************************************************************/ /******************************************************************************* * External vars ******************************************************************************/ /******************************************************************************* * Local vars ******************************************************************************/ uint8_t uart0_buffer_circular[LONGITUD_BUFFER_CIRCULAR]; volatile uint16_t txIndex; /* Index of the data to send out. */ volatile uint16_t rxIndex; /* Index of the memory to save new arrived data. */ /******************************************************************************* * Private Source Code ******************************************************************************/ /*--------------------------------------------*/ /*! * @brief Funcion que gestiona IRQ por UART0 */ void UART0_IRQ_FUNCTION(void) { uint8_t data; if ((kLPSCI_RxDataRegFullFlag)&LPSCI_GetStatusFlags(UART0)) { data = LPSCI_ReadByte(UART0); if (((rxIndex + 1) % LONGITUD_BUFFER_CIRCULAR) != txIndex) { uart0_buffer_circular[rxIndex] = data; rxIndex++; rxIndex %= LONGITUD_BUFFER_CIRCULAR; } } } /*--------------------------------------------*/ /******************************************************************************* * Public Source Code ******************************************************************************/ /*--------------------------------------------*/ status_t uart0Inicializar(uint32_t baud_rate) { lpsci_config_t config; status_t status; LPSCI_GetDefaultConfig(&config); config.baudRate_Bps = baud_rate; config.enableTx = true; config.enableRx = true; status=LPSCI_Init(UART0, &config, CLOCK_GetFreq(kCLOCK_McgFllClk)); if (status != kStatus_Success) return (status); LPSCI_EnableInterrupts(UART0, kLPSCI_RxDataRegFullInterruptEnable); status=EnableIRQ(UART_IRQ_INDEX); return(status); } /*--------------------------------------------*/ uint16_t uart0CuantosDatosHayEnBuffer(void) { return ((uint16_t) (rxIndex - txIndex)); } /*--------------------------------------------*/ status_t uart0LeerByteDesdeBuffer(uint8_t *nuevo_byte) { if ((kLPSCI_TxDataRegEmptyFlag & LPSCI_GetStatusFlags(UART0)) && (rxIndex != txIndex)) { *nuevo_byte = uart0_buffer_circular[txIndex]; txIndex++; txIndex %= LONGITUD_BUFFER_CIRCULAR; return (kStatus_Success); } else { return (kStatus_Fail); } }
import sys import enso.platform if sys.platform != "darwin": raise enso.platform.PlatformUnsupportedError() def provideInterface( name ): if name == "input": import enso.platform.osx.input return enso.platform.osx.input elif name == "graphics": import enso.platform.osx.graphics return enso.platform.osx.graphics elif name == "cairo": import enso.platform.osx.cairo return enso.platform.osx.cairo elif name == "selection": import enso.platform.osx.selection return enso.platform.osx.selection elif name == "scripts_folder": from enso.platform.osx.scriptfolder import get_script_folder_name return get_script_folder_name else: return None
# -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from typing import Optional from canonicaljson import json from twisted.internet import defer from synapse.metrics.background_process_metrics import run_as_background_process from . import engines logger = logging.getLogger(__name__) class BackgroundUpdatePerformance(object): """Tracks the how long a background update is taking to update its items""" def __init__(self, name): self.name = name self.total_item_count = 0 self.total_duration_ms = 0 self.avg_item_count = 0 self.avg_duration_ms = 0 def update(self, item_count, duration_ms): """Update the stats after doing an update""" self.total_item_count += item_count self.total_duration_ms += duration_ms # Exponential moving averages for the number of items updated and # the duration. self.avg_item_count += 0.1 * (item_count - self.avg_item_count) self.avg_duration_ms += 0.1 * (duration_ms - self.avg_duration_ms) def average_items_per_ms(self): """An estimate of how long it takes to do a single update. Returns: A duration in ms as a float """ if self.avg_duration_ms == 0: return 0 elif self.total_item_count == 0: return None else: # Use the exponential moving average so that we can adapt to # changes in how long the update process takes. return float(self.avg_item_count) / float(self.avg_duration_ms) def total_items_per_ms(self): """An estimate of how long it takes to do a single update. Returns: A duration in ms as a float """ if self.total_duration_ms == 0: return 0 elif self.total_item_count == 0: return None else: return float(self.total_item_count) / float(self.total_duration_ms) class BackgroundUpdater(object): """ Background updates are updates to the database that run in the background. Each update processes a batch of data at once. We attempt to limit the impact of each update by monitoring how long each batch takes to process and autotuning the batch size. """ MINIMUM_BACKGROUND_BATCH_SIZE = 100 DEFAULT_BACKGROUND_BATCH_SIZE = 100 BACKGROUND_UPDATE_INTERVAL_MS = 1000 BACKGROUND_UPDATE_DURATION_MS = 100 def __init__(self, hs, database): self._clock = hs.get_clock() self.db = database self._background_update_performance = {} self._background_update_queue = [] self._background_update_handlers = {} self._all_done = False def start_doing_background_updates(self): run_as_background_process("background_updates", self.run_background_updates) async def run_background_updates(self, sleep=True): logger.info("Starting background schema updates") while True: if sleep: await self._clock.sleep(self.BACKGROUND_UPDATE_INTERVAL_MS / 1000.0) try: result = await self.do_next_background_update( self.BACKGROUND_UPDATE_DURATION_MS ) except Exception: logger.exception("Error doing update") else: if result is None: logger.info( "No more background updates to do." " Unscheduling background update task." ) self._all_done = True return None @defer.inlineCallbacks def has_completed_background_updates(self): """Check if all the background updates have completed Returns: Deferred[bool]: True if all background updates have completed """ # if we've previously determined that there is nothing left to do, that # is easy if self._all_done: return True # obviously, if we have things in our queue, we're not done. if self._background_update_queue: return False # otherwise, check if there are updates to be run. This is important, # as we may be running on a worker which doesn't perform the bg updates # itself, but still wants to wait for them to happen. updates = yield self.db.simple_select_onecol( "background_updates", keyvalues=None, retcol="1", desc="has_completed_background_updates", ) if not updates: self._all_done = True return True return False async def has_completed_background_update(self, update_name) -> bool: """Check if the given background update has finished running. """ if self._all_done: return True if update_name in self._background_update_queue: return False update_exists = await self.db.simple_select_one_onecol( "background_updates", keyvalues={"update_name": update_name}, retcol="1", desc="has_completed_background_update", allow_none=True, ) return not update_exists async def do_next_background_update( self, desired_duration_ms: float ) -> Optional[int]: """Does some amount of work on the next queued background update Returns once some amount of work is done. Args: desired_duration_ms(float): How long we want to spend updating. Returns: None if there is no more work to do, otherwise an int """ if not self._background_update_queue: updates = await self.db.simple_select_list( "background_updates", keyvalues=None, retcols=("update_name", "depends_on"), ) in_flight = set(update["update_name"] for update in updates) for update in updates: if update["depends_on"] not in in_flight: self._background_update_queue.append(update["update_name"]) if not self._background_update_queue: # no work left to do return None # pop from the front, and add back to the back update_name = self._background_update_queue.pop(0) self._background_update_queue.append(update_name) res = await self._do_background_update(update_name, desired_duration_ms) return res async def _do_background_update( self, update_name: str, desired_duration_ms: float ) -> int: logger.info("Starting update batch on background update '%s'", update_name) update_handler = self._background_update_handlers[update_name] performance = self._background_update_performance.get(update_name) if performance is None: performance = BackgroundUpdatePerformance(update_name) self._background_update_performance[update_name] = performance items_per_ms = performance.average_items_per_ms() if items_per_ms is not None: batch_size = int(desired_duration_ms * items_per_ms) # Clamp the batch size so that we always make progress batch_size = max(batch_size, self.MINIMUM_BACKGROUND_BATCH_SIZE) else: batch_size = self.DEFAULT_BACKGROUND_BATCH_SIZE progress_json = await self.db.simple_select_one_onecol( "background_updates", keyvalues={"update_name": update_name}, retcol="progress_json", ) progress = json.loads(progress_json) time_start = self._clock.time_msec() items_updated = await update_handler(progress, batch_size) time_stop = self._clock.time_msec() duration_ms = time_stop - time_start logger.info( "Running background update %r. Processed %r items in %rms." " (total_rate=%r/ms, current_rate=%r/ms, total_updated=%r, batch_size=%r)", update_name, items_updated, duration_ms, performance.total_items_per_ms(), performance.average_items_per_ms(), performance.total_item_count, batch_size, ) performance.update(items_updated, duration_ms) return len(self._background_update_performance) def register_background_update_handler(self, update_name, update_handler): """Register a handler for doing a background update. The handler should take two arguments: * A dict of the current progress * An integer count of the number of items to update in this batch. The handler should return a deferred or coroutine which returns an integer count of items updated. The handler is responsible for updating the progress of the update. Args: update_name(str): The name of the update that this code handles. update_handler(function): The function that does the update. """ self._background_update_handlers[update_name] = update_handler def register_noop_background_update(self, update_name): """Register a noop handler for a background update. This is useful when we previously did a background update, but no longer wish to do the update. In this case the background update should be removed from the schema delta files, but there may still be some users who have the background update queued, so this method should also be called to clear the update. Args: update_name (str): Name of update """ @defer.inlineCallbacks def noop_update(progress, batch_size): yield self._end_background_update(update_name) return 1 self.register_background_update_handler(update_name, noop_update) def register_background_index_update( self, update_name, index_name, table, columns, where_clause=None, unique=False, psql_only=False, ): """Helper for store classes to do a background index addition To use: 1. use a schema delta file to add a background update. Example: INSERT INTO background_updates (update_name, progress_json) VALUES ('my_new_index', '{}'); 2. In the Store constructor, call this method Args: update_name (str): update_name to register for index_name (str): name of index to add table (str): table to add index to columns (list[str]): columns/expressions to include in index unique (bool): true to make a UNIQUE index psql_only: true to only create this index on psql databases (useful for virtual sqlite tables) """ def create_index_psql(conn): conn.rollback() # postgres insists on autocommit for the index conn.set_session(autocommit=True) try: c = conn.cursor() # If a previous attempt to create the index was interrupted, # we may already have a half-built index. Let's just drop it # before trying to create it again. sql = "DROP INDEX IF EXISTS %s" % (index_name,) logger.debug("[SQL] %s", sql) c.execute(sql) sql = ( "CREATE %(unique)s INDEX CONCURRENTLY %(name)s" " ON %(table)s" " (%(columns)s) %(where_clause)s" ) % { "unique": "UNIQUE" if unique else "", "name": index_name, "table": table, "columns": ", ".join(columns), "where_clause": "WHERE " + where_clause if where_clause else "", } logger.debug("[SQL] %s", sql) c.execute(sql) finally: conn.set_session(autocommit=False) def create_index_sqlite(conn): # Sqlite doesn't support concurrent creation of indexes. # # We don't use partial indices on SQLite as it wasn't introduced # until 3.8, and wheezy and CentOS 7 have 3.7 # # We assume that sqlite doesn't give us invalid indices; however # we may still end up with the index existing but the # background_updates not having been recorded if synapse got shut # down at the wrong moment - hance we use IF NOT EXISTS. (SQLite # has supported CREATE TABLE|INDEX IF NOT EXISTS since 3.3.0.) sql = ( "CREATE %(unique)s INDEX IF NOT EXISTS %(name)s ON %(table)s" " (%(columns)s)" ) % { "unique": "UNIQUE" if unique else "", "name": index_name, "table": table, "columns": ", ".join(columns), } c = conn.cursor() logger.debug("[SQL] %s", sql) c.execute(sql) if isinstance(self.db.engine, engines.PostgresEngine): runner = create_index_psql elif psql_only: runner = None else: runner = create_index_sqlite @defer.inlineCallbacks def updater(progress, batch_size): if runner is not None: logger.info("Adding index %s to %s", index_name, table) yield self.db.runWithConnection(runner) yield self._end_background_update(update_name) return 1 self.register_background_update_handler(update_name, updater) def start_background_update(self, update_name, progress): """Starts a background update running. Args: update_name: The update to set running. progress: The initial state of the progress of the update. Returns: A deferred that completes once the task has been added to the queue. """ # Clear the background update queue so that we will pick up the new # task on the next iteration of do_background_update. self._background_update_queue = [] progress_json = json.dumps(progress) return self.db.simple_insert( "background_updates", {"update_name": update_name, "progress_json": progress_json}, ) def _end_background_update(self, update_name): """Removes a completed background update task from the queue. Args: update_name(str): The name of the completed task to remove Returns: A deferred that completes once the task is removed. """ self._background_update_queue = [ name for name in self._background_update_queue if name != update_name ] return self.db.simple_delete_one( "background_updates", keyvalues={"update_name": update_name} ) def _background_update_progress(self, update_name: str, progress: dict): """Update the progress of a background update Args: update_name: The name of the background update task progress: The progress of the update. """ return self.db.runInteraction( "background_update_progress", self._background_update_progress_txn, update_name, progress, ) def _background_update_progress_txn(self, txn, update_name, progress): """Update the progress of a background update Args: txn(cursor): The transaction. update_name(str): The name of the background update task progress(dict): The progress of the update. """ progress_json = json.dumps(progress) self.db.simple_update_one_txn( txn, "background_updates", keyvalues={"update_name": update_name}, updatevalues={"progress_json": progress_json}, )
nome = input('Nome: ') a = nome.upper() b = 'SILVA' in a print(f'Seu nome tem Silva: {b}')
# Copyright 2019 Atalaya Tech, Inc. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from urllib.parse import urlparse from simple_di import Provide, inject from bentoml.configuration.containers import BentoMLContainer from bentoml.exceptions import YataiRepositoryException from bentoml.yatai.proto.repository_pb2 import BentoUri from bentoml.yatai.repository.base_repository import BaseRepository logger = logging.getLogger(__name__) class GCSRepository(BaseRepository): @inject def __init__( self, base_url, expiration: int = Provide[ BentoMLContainer.config.yatai.repository.gcs.expiration ], ): try: from google.cloud import storage except ImportError: raise YataiRepositoryException( '"google-cloud-storage" package is required for Google Cloud ' 'Storage Repository. You can install it with pip: ' '"pip install google-cloud-storage"' ) self.uri_type = BentoUri.GCS parse_result = urlparse(base_url) self.bucket = parse_result.netloc self.base_path = parse_result.path.lstrip('/') self.gcs_client = storage.Client() self.expiration = expiration def _get_object_name(self, bento_name, bento_version): if self.base_path: return "/".join([self.base_path, bento_name, bento_version]) + '.tar.gz' else: return "/".join([bento_name, bento_version]) + '.tar.gz' def add(self, bento_name, bento_version): object_name = self._get_object_name(bento_name, bento_version) try: bucket = self.gcs_client.bucket(self.bucket) blob = bucket.blob(object_name) response = blob.generate_signed_url( version="v4", expiration=self.expiration, method="PUT", ) except Exception as e: raise YataiRepositoryException( "Not able to get pre-signed URL on GCS. Error: {}".format(e) ) return BentoUri( type=self.uri_type, uri='gs://{}/{}'.format(self.bucket, object_name), gcs_presigned_url=response, ) def get(self, bento_name, bento_version): # Return gcs path containing uploaded Bento files object_name = self._get_object_name(bento_name, bento_version) try: bucket = self.gcs_client.bucket(self.bucket) blob = bucket.blob(object_name) response = blob.generate_signed_url( version="v4", expiration=self.expiration, method="GET", ) return response except Exception: # pylint: disable=broad-except logger.error( "Failed generating presigned URL for downloading saved bundle from GCS," "falling back to using gs path and client side credential for" "downloading with google.cloud.storage" ) return 'gs://{}/{}'.format(self.bucket, object_name) def dangerously_delete(self, bento_name, bento_version): # Remove gcs path containing related Bento files object_name = self._get_object_name(bento_name, bento_version) try: bucket = self.gcs_client.bucket(self.bucket) blob = bucket.blob(object_name) blob.delete() except Exception as e: raise YataiRepositoryException( "Not able to delete object on GCS. Error: {}".format(e) )
'use strict'; function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } var _classCallCheck = _interopDefault(require('@babel/runtime/helpers/classCallCheck')); var _createClass = _interopDefault(require('@babel/runtime/helpers/createClass')); var _possibleConstructorReturn = _interopDefault(require('@babel/runtime/helpers/possibleConstructorReturn')); var _get = _interopDefault(require('@babel/runtime/helpers/get')); var _getPrototypeOf = _interopDefault(require('@babel/runtime/helpers/getPrototypeOf')); var _set = _interopDefault(require('@babel/runtime/helpers/set')); var _inherits = _interopDefault(require('@babel/runtime/helpers/inherits')); var web3Core = require('web3-core'); var web3Providers = require('web3-providers'); var Utils = require('web3-utils'); var web3Eth = require('web3-eth'); var web3Shh = require('web3-shh'); var web3Bzz = require('web3-bzz'); var web3Net = require('web3-net'); var web3EthPersonal = require('web3-eth-personal'); var version = "1.0.0-beta.52"; var Web3 = function (_AbstractWeb3Module) { _inherits(Web3, _AbstractWeb3Module); function Web3(provider, net) { var _this; var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}; _classCallCheck(this, Web3); _this = _possibleConstructorReturn(this, _getPrototypeOf(Web3).call(this, provider, options, null, net)); _this.eth = new web3Eth.Eth(_this.currentProvider, net, options); _this.shh = new web3Shh.Shh(_this.currentProvider, net, options); _this.bzz = new web3Bzz.Bzz(_this.currentProvider); _this.utils = Utils; _this.version = version; return _this; } _createClass(Web3, [{ key: "setProvider", value: function setProvider(provider, net) { return _get(_getPrototypeOf(Web3.prototype), "setProvider", this).call(this, provider, net) && this.eth.setProvider(provider, net) && this.shh.setProvider(provider, net) && this.bzz.setProvider(provider); } }, { key: "defaultGasPrice", set: function set(value) { _set(_getPrototypeOf(Web3.prototype), "defaultGasPrice", value, this, true); this.eth.defaultGasPrice = value; this.shh.defaultGasPrice = value; } , get: function get() { return _get(_getPrototypeOf(Web3.prototype), "defaultGasPrice", this); } }, { key: "defaultGas", set: function set(value) { _set(_getPrototypeOf(Web3.prototype), "defaultGas", value, this, true); this.eth.defaultGas = value; this.shh.defaultGas = value; } , get: function get() { return _get(_getPrototypeOf(Web3.prototype), "defaultGas", this); } }, { key: "transactionBlockTimeout", set: function set(value) { _set(_getPrototypeOf(Web3.prototype), "transactionBlockTimeout", value, this, true); this.eth.transactionBlockTimeout = value; this.shh.transactionBlockTimeout = value; } , get: function get() { return _get(_getPrototypeOf(Web3.prototype), "transactionBlockTimeout", this); } }, { key: "transactionConfirmationBlocks", set: function set(value) { _set(_getPrototypeOf(Web3.prototype), "transactionConfirmationBlocks", value, this, true); this.eth.transactionConfirmationBlocks = value; this.shh.transactionConfirmationBlocks = value; } , get: function get() { return _get(_getPrototypeOf(Web3.prototype), "transactionConfirmationBlocks", this); } }, { key: "transactionPollingTimeout", set: function set(value) { _set(_getPrototypeOf(Web3.prototype), "transactionPollingTimeout", value, this, true); this.eth.transactionPollingTimeout = value; this.shh.transactionPollingTimeout = value; } , get: function get() { return _get(_getPrototypeOf(Web3.prototype), "transactionPollingTimeout", this); } }, { key: "defaultAccount", set: function set(value) { _set(_getPrototypeOf(Web3.prototype), "defaultAccount", value, this, true); this.eth.defaultAccount = value; this.shh.defaultAccount = value; } , get: function get() { return _get(_getPrototypeOf(Web3.prototype), "defaultAccount", this); } }, { key: "defaultBlock", set: function set(value) { _set(_getPrototypeOf(Web3.prototype), "defaultBlock", value, this, true); this.eth.defaultBlock = value; this.shh.defaultBlock = value; } , get: function get() { return _get(_getPrototypeOf(Web3.prototype), "defaultBlock", this); } }], [{ key: "givenProvider", get: function get() { return web3Providers.ProviderDetector.detect(); } }, { key: "modules", get: function get() { var providerResolver = new web3Providers.ProvidersModuleFactory().createProviderResolver(); return { Eth: function Eth(provider, options, net) { return new web3Eth.Eth(providerResolver.resolve(provider, net), options); }, Net: function Net(provider, options, net) { return new web3Net.Network(providerResolver.resolve(provider, net), options); }, Personal: function Personal(provider, options, net) { return new web3EthPersonal.Personal(providerResolver.resolve(provider, net), options); }, Shh: function Shh(provider, options, net) { return new web3Shh.Shh(providerResolver.resolve(provider, net), options); }, Bzz: function Bzz(provider) { return new web3Bzz.Bzz(provider); } }; } }]); return Web3; }(web3Core.AbstractWeb3Module); module.exports = Web3;
# Copyright 2016 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 import json import os import pytest import main @pytest.fixture def client(monkeypatch): monkeypatch.chdir(os.path.dirname(main.__file__)) main.app.testing = True client = main.app.test_client() return client class Request: def __init__(self, method, form): self.method = method self.form = form def test__get_entities_from_text_expecting_none(): request = Request('GET', {}) entities, text = main._get_entities_from_text(request) assert text == main.PLACEHOLDER_TEXT assert entities == None def test__annotate_text_expecting_entities(): text = '''Beta-adrenergic agonist medicines may produce significant hypokalemia in some patients,''' annotations = [{'text': 'may', 'start_char': 34, 'end_char': 37, 'label': 'Factor'}, {'text': 'significant', 'start_char': 46, 'end_char': 57, 'label': 'Severity'}, {'text': 'hypokalemia', 'start_char': 58, 'end_char': 69, 'label': 'AdverseReaction'}] annotated_text_actual = main._annotate_text(annotations, text) annotated_text_expected = [{'text': 'Beta-adrenergic agonist medicines ', 'label': None, 'class': None}, {'text': 'may', 'label': 'Factor', 'class': 'factor'}, {'text': ' produce ', 'label': None, 'class': None}, {'text': 'significant', 'label': 'Severity', 'class': 'severity'}, {'text': ' ', 'label': None, 'class': None}, {'text': 'hypokalemia', 'label': 'AdverseReaction', 'class': 'adversereaction'}, {'text': ' in some patients,', 'label': None, 'class': None}] assert annotated_text_actual == annotated_text_expected def test__annotate_text_expecting_no_entities(): text = '''Beta-adrenergic agonist medicines may produce significant hypokalemia in some patients,''' annotations = [] annotated_text_actual = main._annotate_text(annotations, text) annotated_text_expected = [{'text': 'Beta-adrenergic agonist medicines may produce significant hypokalemia in some patients,', 'label': None, 'class': None}] assert annotated_text_actual == annotated_text_expected ''' def test_ner(client): r = client.post( '/ner', data='{"text": "Hello"}', headers={ 'Content-Type': 'application/json' }) assert r.status_code == 200 data = json.loads(r.data.decode('utf-8')) print(data) assert data['text'] == 'drug' '''
import React, { Component } from 'react'; import { DataTable } from '../../components/datatable/DataTable'; import { Column } from '../../components/column/Column'; import { ProductService } from '../service/ProductService'; import { TabView } from '../../components/tabview/TabView'; import { useLiveEditorTabs } from '../liveeditor/LiveEditor'; import { AppInlineHeader } from '../../AppInlineHeader'; import AppDemoActions from '../../AppDemoActions'; export class DataTableSortDemo extends Component { constructor(props) { super(props); this.state = { products: [], multiSortMeta: [{ field: 'category', order: -1 }] }; this.productService = new ProductService(); this.priceBodyTemplate = this.priceBodyTemplate.bind(this); } componentDidMount() { this.productService.getProductsSmall().then(data => this.setState({ products: data })); } formatCurrency(value) { return value.toLocaleString('en-US', { style: 'currency', currency: 'USD' }); } priceBodyTemplate(rowData) { return this.formatCurrency(rowData.price); } render() { return ( <div> <div className="content-section introduction"> <AppInlineHeader changelogText="dataTable"> <h1>DataTable <span>Sort</span></h1> <p>Enabling sortable property on a column is enough to make a column sortable. Multiple column sorting is enabled using sortMode property and used with metaKey.</p> </AppInlineHeader> <AppDemoActions github="datatable/DataTableSortDemo.js" /> </div> <div className="content-section implementation"> <div className="card"> <h5>Single Column</h5> <DataTable value={this.state.products}> <Column field="code" header="Code" sortable></Column> <Column field="name" header="Name" sortable></Column> <Column field="category" header="Category" sortable></Column> <Column field="quantity" header="Quantity" sortable></Column> <Column field="price" header="Price" body={this.priceBodyTemplate} sortable></Column> </DataTable> </div> <div className="card"> <h5>Multiple Columns</h5> <p>Use metakey to add a column to the sort selection.</p> <DataTable value={this.state.products} sortMode="multiple"> <Column field="code" header="Code" sortable></Column> <Column field="name" header="Name" sortable></Column> <Column field="category" header="Category" sortable></Column> <Column field="quantity" header="Quantity" sortable></Column> <Column field="price" header="Price" body={this.priceBodyTemplate} sortable></Column> </DataTable> </div> <div className="card"> <h5>Presort</h5> <DataTable value={this.state.products} sortField="category" sortOrder={-1}> <Column field="code" header="Code" sortable></Column> <Column field="name" header="Name" sortable></Column> <Column field="category" header="Category" sortable></Column> <Column field="quantity" header="Quantity" sortable></Column> <Column field="price" header="Price" body={this.priceBodyTemplate} sortable></Column> </DataTable> </div> <div className="card"> <h5>Removable Sort</h5> <DataTable value={this.state.products} removableSort> <Column field="code" header="Code" sortable></Column> <Column field="name" header="Name" sortable></Column> <Column field="category" header="Category" sortable></Column> <Column field="quantity" header="Quantity" sortable></Column> <Column field="price" header="Price" body={this.priceBodyTemplate} sortable></Column> </DataTable> </div> <div className="card"> <h5>Sortable Disabled</h5> <p>Use metakey to add a column to the multiple sort selection.</p> <DataTable value={this.state.products} sortMode="multiple" removableSort multiSortMeta={this.state.multiSortMeta} onSort={(e) => this.setState({multiSortMeta: e.multiSortMeta})}> <Column field="code" header="Code" sortable></Column> <Column field="name" header="Name" sortable></Column> <Column field="category" header="Category (Disabled)" sortable sortableDisabled></Column> <Column field="quantity" header="Quantity" sortable></Column> <Column field="price" header="Price" body={this.priceBodyTemplate} sortable></Column> </DataTable> </div> </div> <DataTableSortDemoDoc></DataTableSortDemoDoc> </div> ); } } export class DataTableSortDemoDoc extends Component { constructor(props) { super(props); this.sources = { 'class': { tabName: 'Class Source', content: ` import React, { Component } from 'react'; import { DataTable } from 'primereact/datatable'; import { Column } from 'primereact/column'; import { ProductService } from '../service/ProductService'; export class DataTableSortDemo extends Component { constructor(props) { super(props); this.state = { products: [], multiSortMeta: [{ field: 'category', order: -1 }] }; this.productService = new ProductService(); this.priceBodyTemplate = this.priceBodyTemplate.bind(this); } componentDidMount() { this.productService.getProductsSmall().then(data => this.setState({ products: data })); } formatCurrency(value) { return value.toLocaleString('en-US', {style: 'currency', currency: 'USD'}); } priceBodyTemplate(rowData) { return this.formatCurrency(rowData.price); } render() { return ( <div> <div className="card"> <h5>Single Column</h5> <DataTable value={this.state.products}> <Column field="code" header="Code" sortable></Column> <Column field="name" header="Name" sortable></Column> <Column field="category" header="Category" sortable></Column> <Column field="quantity" header="Quantity" sortable></Column> <Column field="price" header="Price" body={this.priceBodyTemplate} sortable></Column> </DataTable> </div> <div className="card"> <h5>Multiple Columns</h5> <p>Use metakey to add a column to the sort selection.</p> <DataTable value={this.state.products} sortMode="multiple"> <Column field="code" header="Code" sortable></Column> <Column field="name" header="Name" sortable></Column> <Column field="category" header="Category" sortable></Column> <Column field="quantity" header="Quantity" sortable></Column> <Column field="price" header="Price" body={this.priceBodyTemplate} sortable></Column> </DataTable> </div> <div className="card"> <h5>Presort</h5> <DataTable value={this.state.products} sortField="category" sortOrder={-1}> <Column field="code" header="Code" sortable></Column> <Column field="name" header="Name" sortable></Column> <Column field="category" header="Category" sortable></Column> <Column field="quantity" header="Quantity" sortable></Column> <Column field="price" header="Price" body={this.priceBodyTemplate} sortable></Column> </DataTable> </div> <div className="card"> <h5>Removable Sort</h5> <DataTable value={this.state.products} removableSort> <Column field="code" header="Code" sortable></Column> <Column field="name" header="Name" sortable></Column> <Column field="category" header="Category" sortable></Column> <Column field="quantity" header="Quantity" sortable></Column> <Column field="price" header="Price" body={this.priceBodyTemplate} sortable></Column> </DataTable> </div> <div className="card"> <h5>Sortable Disabled</h5> <p>Use metakey to add a column to the multiple sort selection.</p> <DataTable value={this.state.products} sortMode="multiple" removableSort multiSortMeta={this.state.multiSortMeta} onSort={(e) => this.setState({multiSortMeta: e.multiSortMeta})}> <Column field="code" header="Code" sortable></Column> <Column field="name" header="Name" sortable></Column> <Column field="category" header="Category (Disabled)" sortable sortableDisabled></Column> <Column field="quantity" header="Quantity" sortable></Column> <Column field="price" header="Price" body={this.priceBodyTemplate} sortable></Column> </DataTable> </div> </div> ); } } ` }, 'hooks': { tabName: 'Hooks Source', content: ` import React, { useState, useEffect } from 'react'; import { DataTable } from 'primereact/datatable'; import { Column } from 'primereact/column'; import { ProductService } from '../service/ProductService'; const DataTableSortDemo = () => { const [products, setProducts] = useState([]); const [multiSortMeta, setMultiSortMeta] = useState([{ field: 'category', order: -1 }]); const productService = new ProductService(); useEffect(() => { productService.getProductsSmall().then(data => setProducts(data)); }, []); // eslint-disable-line react-hooks/exhaustive-deps const formatCurrency = (value) => { return value.toLocaleString('en-US', {style: 'currency', currency: 'USD'}); } const priceBodyTemplate = (rowData) => { return formatCurrency(rowData.price); } return ( <div> <div className="card"> <h5>Single Column</h5> <DataTable value={products}> <Column field="code" header="Code" sortable></Column> <Column field="name" header="Name" sortable></Column> <Column field="category" header="Category" sortable></Column> <Column field="quantity" header="Quantity" sortable></Column> <Column field="price" header="Price" body={priceBodyTemplate} sortable></Column> </DataTable> </div> <div className="card"> <h5>Multiple Columns</h5> <p>Use metakey to add a column to the sort selection.</p> <DataTable value={products} sortMode="multiple"> <Column field="code" header="Code" sortable></Column> <Column field="name" header="Name" sortable></Column> <Column field="category" header="Category" sortable></Column> <Column field="quantity" header="Quantity" sortable></Column> <Column field="price" header="Price" body={priceBodyTemplate} sortable></Column> </DataTable> </div> <div className="card"> <h5>Presort</h5> <DataTable value={products} sortField="category" sortOrder={-1}> <Column field="code" header="Code" sortable></Column> <Column field="name" header="Name" sortable></Column> <Column field="category" header="Category" sortable></Column> <Column field="quantity" header="Quantity" sortable></Column> <Column field="price" header="Price" body={priceBodyTemplate} sortable></Column> </DataTable> </div> <div className="card"> <h5>Removable Sort</h5> <DataTable value={products} removableSort> <Column field="code" header="Code" sortable></Column> <Column field="name" header="Name" sortable></Column> <Column field="category" header="Category" sortable></Column> <Column field="quantity" header="Quantity" sortable></Column> <Column field="price" header="Price" body={priceBodyTemplate} sortable></Column> </DataTable> </div> <div className="card"> <h5>Sortable Disabled</h5> <p>Use metakey to add a column to the multiple sort selection.</p> <DataTable value={products} sortMode="multiple" removableSort multiSortMeta={multiSortMeta} onSort={(e) => setMultiSortMeta(e.multiSortMeta)}> <Column field="code" header="Code" sortable></Column> <Column field="name" header="Name" sortable></Column> <Column field="category" header="Category (Disabled)" sortable sortableDisabled></Column> <Column field="quantity" header="Quantity" sortable></Column> <Column field="price" header="Price" body={priceBodyTemplate} sortable></Column> </DataTable> </div> </div> ); } ` }, 'ts': { tabName: 'TS Source', content: ` import React, { useState, useEffect } from 'react'; import { DataTable } from 'primereact/datatable'; import { Column } from 'primereact/column'; import { ProductService } from '../service/ProductService'; const DataTableSortDemo = () => { const [products, setProducts] = useState([]); const [multiSortMeta, setMultiSortMeta] = useState([{ field: 'category', order: -1 }]); const productService = new ProductService(); useEffect(() => { productService.getProductsSmall().then(data => setProducts(data)); }, []); // eslint-disable-line react-hooks/exhaustive-deps const formatCurrency = (value) => { return value.toLocaleString('en-US', {style: 'currency', currency: 'USD'}); } const priceBodyTemplate = (rowData) => { return formatCurrency(rowData.price); } return ( <div> <div className="card"> <h5>Single Column</h5> <DataTable value={products}> <Column field="code" header="Code" sortable></Column> <Column field="name" header="Name" sortable></Column> <Column field="category" header="Category" sortable></Column> <Column field="quantity" header="Quantity" sortable></Column> <Column field="price" header="Price" body={priceBodyTemplate} sortable></Column> </DataTable> </div> <div className="card"> <h5>Multiple Columns</h5> <p>Use metakey to add a column to the sort selection.</p> <DataTable value={products} sortMode="multiple"> <Column field="code" header="Code" sortable></Column> <Column field="name" header="Name" sortable></Column> <Column field="category" header="Category" sortable></Column> <Column field="quantity" header="Quantity" sortable></Column> <Column field="price" header="Price" body={priceBodyTemplate} sortable></Column> </DataTable> </div> <div className="card"> <h5>Presort</h5> <DataTable value={products} sortField="category" sortOrder={-1}> <Column field="code" header="Code" sortable></Column> <Column field="name" header="Name" sortable></Column> <Column field="category" header="Category" sortable></Column> <Column field="quantity" header="Quantity" sortable></Column> <Column field="price" header="Price" body={priceBodyTemplate} sortable></Column> </DataTable> </div> <div className="card"> <h5>Removable Sort</h5> <DataTable value={products} removableSort> <Column field="code" header="Code" sortable></Column> <Column field="name" header="Name" sortable></Column> <Column field="category" header="Category" sortable></Column> <Column field="quantity" header="Quantity" sortable></Column> <Column field="price" header="Price" body={priceBodyTemplate} sortable></Column> </DataTable> </div> <div className="card"> <h5>Sortable Disabled</h5> <p>Use metakey to add a column to the multiple sort selection.</p> <DataTable value={products} sortMode="multiple" removableSort multiSortMeta={multiSortMeta} onSort={(e) => setMultiSortMeta(e.multiSortMeta)}> <Column field="code" header="Code" sortable></Column> <Column field="name" header="Name" sortable></Column> <Column field="category" header="Category (Disabled)" sortable sortableDisabled></Column> <Column field="quantity" header="Quantity" sortable></Column> <Column field="price" header="Price" body={priceBodyTemplate} sortable></Column> </DataTable> </div> </div> ); } ` }, 'browser': { tabName: 'Browser Source', imports: ` <script src="./ProductService.js"></script> <script src="https://unpkg.com/primereact/core/core.min.js"></script> <script src="https://unpkg.com/primereact/column/column.min.js"></script> <script src="https://unpkg.com/primereact/datatable/datatable.min.js"></script>`, content: ` const { useEffect, useState } = React; const { Column } = primereact.column; const { DataTable } = primereact.datatable; const DataTableSortDemo = () => { const [products, setProducts] = useState([]); const [multiSortMeta, setMultiSortMeta] = useState([{ field: 'category', order: -1 }]); const productService = new ProductService(); useEffect(() => { productService.getProductsSmall().then(data => setProducts(data)); }, []); // eslint-disable-line react-hooks/exhaustive-deps const formatCurrency = (value) => { return value.toLocaleString('en-US', {style: 'currency', currency: 'USD'}); } const priceBodyTemplate = (rowData) => { return formatCurrency(rowData.price); } return ( <div> <div className="card"> <h5>Single Column</h5> <DataTable value={products}> <Column field="code" header="Code" sortable></Column> <Column field="name" header="Name" sortable></Column> <Column field="category" header="Category" sortable></Column> <Column field="quantity" header="Quantity" sortable></Column> <Column field="price" header="Price" body={priceBodyTemplate} sortable></Column> </DataTable> </div> <div className="card"> <h5>Multiple Columns</h5> <p>Use metakey to add a column to the sort selection.</p> <DataTable value={products} sortMode="multiple"> <Column field="code" header="Code" sortable></Column> <Column field="name" header="Name" sortable></Column> <Column field="category" header="Category" sortable></Column> <Column field="quantity" header="Quantity" sortable></Column> <Column field="price" header="Price" body={priceBodyTemplate} sortable></Column> </DataTable> </div> <div className="card"> <h5>Presort</h5> <DataTable value={products} sortField="category" sortOrder={-1}> <Column field="code" header="Code" sortable></Column> <Column field="name" header="Name" sortable></Column> <Column field="category" header="Category" sortable></Column> <Column field="quantity" header="Quantity" sortable></Column> <Column field="price" header="Price" body={priceBodyTemplate} sortable></Column> </DataTable> </div> <div className="card"> <h5>Removable Sort</h5> <DataTable value={products} removableSort> <Column field="code" header="Code" sortable></Column> <Column field="name" header="Name" sortable></Column> <Column field="category" header="Category" sortable></Column> <Column field="quantity" header="Quantity" sortable></Column> <Column field="price" header="Price" body={priceBodyTemplate} sortable></Column> </DataTable> </div> <div className="card"> <h5>Sortable Disabled</h5> <p>Use metakey to add a column to the multiple sort selection.</p> <DataTable value={products} sortMode="multiple" removableSort multiSortMeta={multiSortMeta} onSort={(e) => setMultiSortMeta(e.multiSortMeta)}> <Column field="code" header="Code" sortable></Column> <Column field="name" header="Name" sortable></Column> <Column field="category" header="Category (Disabled)" sortable sortableDisabled></Column> <Column field="quantity" header="Quantity" sortable></Column> <Column field="price" header="Price" body={priceBodyTemplate} sortable></Column> </DataTable> </div> </div> ); } ` } } } shouldComponentUpdate() { return false; } render() { return ( <div className="content-section documentation" id="app-doc"> <TabView> { useLiveEditorTabs({ name: 'DataTableSortDemo', sources: this.sources, service: 'ProductService', data: 'products-small' }) } </TabView> </div> ) } }
module.exports = { env: { browser: true, es6: true }, extends: [ 'eslint:recommended', 'plugin:@typescript-eslint/eslint-recommended', 'plugin:prettier/recommended', 'prettier', 'prettier/@typescript-eslint' ], globals: { Atomics: 'readonly', SharedArrayBuffer: 'readonly' }, parser: '@typescript-eslint/parser', parserOptions: { ecmaVersion: 2018, sourceType: 'module' }, plugins: ['@typescript-eslint', 'prettier'], rules: { indent: ['error', 4], 'linebreak-style': ['error', 'unix'], quotes: ['error', 'single'], semi: ['error', 'never'] } }
# -*- coding: utf-8 -*- # # polib documentation build configuration file, created by # sphinx-quickstart on Sat Jan 1 16:45:49 2011. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath('..')) import polib # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.viewcode'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'polib' copyright = u'2011, David Jean Louis <izimobil@gmail.com>' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = polib.__version__ # The full version, including alpha/beta/rc tags. release = polib.__version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'polibdoc' # -- Options for LaTeX output -------------------------------------------------- # The paper size ('letter' or 'a4'). #latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). #latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'polib.tex', u'polib Documentation', u'David Jean Louis \\textless{}izimobil@gmail.com\\textgreater{}', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Additional stuff for the LaTeX preamble. #latex_preamble = '' # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'polib', u'polib Documentation', [u'David Jean Louis <izimobil@gmail.com>'], 1) ]
import os import sys import glob from multiprocessing import Pool from pytube import YouTube from time import sleep class Data: def __init__(self, url, seqname, list_timestamps): self.url = url self.list_seqnames = [] self.list_list_timestamps = [] self.list_seqnames.append(seqname) self.list_list_timestamps.append(list_timestamps) def add(self, seqname, list_timestamps): self.list_seqnames.append(seqname) self.list_list_timestamps.append(list_timestamps) def __len__(self): return len(self.list_seqnames) def process(data, seq_id, videoname, output_root): seqname = data.list_seqnames[seq_id] if not os.path.exists(output_root + seqname): os.makedirs(output_root + seqname) else: print("[INFO] Something Wrong, stop process") return True list_str_timestamps = [] for timestamp in data.list_list_timestamps[seq_id]: timestamp = int(timestamp / 1000) str_hour = str(int(timestamp / 3600000)).zfill(2) str_min = str(int(int(timestamp % 3600000) / 60000)).zfill(2) str_sec = str(int(int(int(timestamp % 3600000) % 60000) / 1000)).zfill(2) str_mill = str(int(int(int(timestamp % 3600000) % 60000) % 1000)).zfill(3) _str_timestamp = str_hour + ":" + str_min + ":" + str_sec + "." + str_mill list_str_timestamps.append(_str_timestamp) # extract frames from a video for idx, str_timestamp in enumerate(list_str_timestamps): command = 'ffmpeg -ss ' + str_timestamp + ' -i ' + videoname + ' -vframes 1 -f image2 ' + output_root + seqname + '/' + str( data.list_list_timestamps[seq_id][idx]) + '.png' # print("current command is {}".format(command)) os.system(command) # png_list = glob.glob(output_root+"/"+seqname+"/*.png") # for pngname in png_list: # image = io.imread(pngname) # if int(image.shape[1]/2) < 500: # break # image = imresize(image, (int(image.shape[0]/2), int(image.shape[1]/2)), interp='bilinear') # print(image.shape) # io.imsave(pngname, image) # In my case, the same issue happened. # https://github.com/skvark/opencv-python/issues/69 # img = cv2.imread(pngname, 1) # if int(img.shape[1]/2) < 500: # break # img = cv2.resize(img, (int(img.shape[1]/2), int(img.shape[0]/2))) # cv2.imwrite(pngname, img) return False def wrap_process(list_args): return process(*list_args) class DataDownloader: def __init__(self, dataroot, mode='test'): print("[INFO] Loading data list ... ", end='') self.dataroot = dataroot self.list_seqnames = sorted(glob.glob(dataroot + '/cameras/*.txt')) self.output_root = os.path.join(dataroot, 'frames/') self.mode = mode os.makedirs(self.output_root, exist_ok=True) self.isDone = False self.list_data = [] if not self.isDone: for txt_file in self.list_seqnames: dir_name = txt_file.split('/')[-1] seq_name = dir_name.split('.')[0] #if os.path.isdir(os.path.join(self.output_root, seq_name)): # print('{} finished, skip'.format(seq_name)) # continue # extract info from txt seq_file = open(txt_file, "r") lines = seq_file.readlines() youtube_url = "" list_timestamps = [] for idx, line in enumerate(lines): if idx == 0: youtube_url = line.strip() else: timestamp = int(line.split(' ')[0]) list_timestamps.append(timestamp) seq_file.close() self.list_data.append(Data(youtube_url, seq_name, list_timestamps)) # self.list_data.reverse() print(" Done! ") print("[INFO] {} movies are used in {} mode".format(len(self.list_data), self.mode)) def Run(self): print("[INFO] Start downloading {} movies".format(len(self.list_data))) for global_count, data in enumerate(self.list_data): print("[INFO] Downloading {} ".format(data.url)) try: # sometimes this fails because of known issues of pytube and unknown factors yt = YouTube(data.url) stream = yt.streams.filter(res='720p').first() stream.download(filename='current_' + mode) except: failure_log = open('failed_videos_' + mode + '.txt', 'a') for seqname in data.list_seqnames: failure_log.writelines(seqname + '\n') failure_log.close() continue sleep(1) videoname = 'current_' + mode if len(data) == 1: # len(data) is len(data.list_seqnames) process(data, 0, videoname, self.output_root) else: with Pool(processes=4) as pool: pool.map(wrap_process, [(data, seq_id, videoname, self.output_root) for seq_id in range(len(data))]) # remove videos command = "rm " + videoname os.system(command) if self.isDone: return False return True if __name__ == "__main__": if len(sys.argv) != 2: print("usage: this.py [test or train]") quit() if sys.argv[1] == "test": mode = "test" elif sys.argv[1] == "train": mode = "train" else: print("invalid mode") quit() dataroot = os.path.join("../RealEstate10K-subset/", mode) print(dataroot) downloader = DataDownloader(dataroot, mode) isOK = downloader.Run()
import pickle import torch import torch.nn as nn import torch.nn.functional as F import torch.optim as optim from tqdm import tqdm import os def create_folder(path): if not os.path.exists(path): os.makedirs(path) def save_pkl(pkl_data, save_path): with open(save_path, 'wb') as f: pickle.dump(pkl_data, f) def get_feature(train_loader, test_loaders, color_fn, opt): epochs = opt['epochs'] lr = opt['lr'] device = opt['device'] model = create_mnist_model(opt['model']) # create models model = model.to(device) optimizer = optim.Adam(model.parameters(), lr=lr) scheduler = optim.lr_scheduler.StepLR(optimizer, 20, 0.1) # training train(train_loader, model, optimizer, opt, scheduler) # testing with torch.no_grad(): corrects = 0 for x, y in test_loaders: x, y = x.to(device), y.to(device) colors = color_fn(x) out, feature = model(x) corrects += out.max(1)[1].eq(y).sum().item() acc = corrects / len(test_loaders.dataset) print('Test accuracy on Colored MNIST = {:.2%}'.format(acc)) return feature.cpu(), y.cpu(), colors.cpu() def train(loader, model, optimizer, opt, scheduler=None): epochs = opt['epochs'] device = opt['device'] model.train() with tqdm(range(1, epochs + 1)) as pbar: for _ in pbar: losses = [] corrects = 0 if scheduler is not None: scheduler.step() for x, y in loader: x,y = x.to(device), y.to(device) out, feature = model(x) loss = F.cross_entropy(out, y) losses.append(loss.item()) corrects += out.max(1)[1].eq(y).sum().item() optimizer.zero_grad() loss.backward() optimizer.step() loss = sum(losses) / len(losses) acc = 100 * corrects / len(loader.dataset) pbar.set_postfix(loss='%.3f' % loss, acc='%.2f%%' % acc) return loss, acc class LeNet(nn.Module): def __init__(self, in_channels, out_dims): super(LeNet, self).__init__() self.conv1 = nn.Conv2d(in_channels, 6, 5) self.conv2 = nn.Conv2d(6, 16, 5) self.fc1 = nn.Linear(16 * 4 * 4, 120) self.fc2 = nn.Linear(120, 84) self.out = nn.Linear(84, out_dims) def forward(self, x): x = F.relu(self.conv1(x)) x = F.max_pool2d(x, 2) x = F.relu(self.conv2(x)) x = F.max_pool2d(x, 2) x = x.view(x.size(0), -1) x = F.relu(self.fc1(x)) x = F.relu(self.fc2(x)) return self.out(x), x class MLP(nn.Module): def __init__(self, in_dim, hidden_dims, out_dim): super().__init__() layers = [] for dim in hidden_dims: layers.append(nn.Linear(in_dim, dim)) layers.append(nn.ReLU()) in_dim = dim self.layers = nn.Sequential(*layers) self.out = nn.Linear(in_dim, out_dim) def forward(self, x): if len(x.size()) > 2: x = x.view(x.size(0), -1) x = self.layers(x) return self.out(x) def create_mnist_model(model_name): if model_name == 'lenet': return LeNet(3, 10) elif model_name == 'mlp': return MLP(784 * 3, [300, 100], 10) else: raise ValueError('Model not supported')
// Copyright (c) 2018 Aurigma Inc. All rights reserved. // Licensed under the MIT License. See LICENSE file in the project root for full license information. // Type.registerNamespace("Aurigma.GraphicsMill"); Aurigma.GraphicsMill.UpdateStatus = function () { /// <summary>Specifies a current bitmap status.</summary> /// <field name="ready" type="Number" integer="true" static="true"><summary>The remote scripting method has been completed (or was not run yet), and you can freely get return value or exception details.</summary></field> /// <field name="refresh" type="Number" integer="true" static="true"><summary>The control updates a portion of image it displays (e.g. when user zoomed or scrolled it). The bitmap state is not changed while status is "refresh". </summary></field> /// <field name="busy" type="Number" integer="true" static="true"><summary>The remote scripting method is running (the bitmap state is changing). </summary></field> throw Error.notImplemented(); }; Aurigma.GraphicsMill.UpdateStatus.prototype = { ready: 0, refresh: 1, busy: 2 }; Aurigma.GraphicsMill.UpdateStatus.registerEnum("Aurigma.GraphicsMill.UpdateStatus"); Aurigma.GraphicsMill.ScrollBarsStyle = function () { /// <summary>Specifies values which specify when to display scroll bars at the control.</summary> /// <field name="always" type="Number" integer="true" static="true"><summary>Scroll bars are always displayed regardless of the control content dimensions. If control content is too small, scroll bars are disabled.</summary></field> /// <field name="auto" type="Number" integer="true" static="true"><summary>Scroll bars are displayed when control content is too large to fit the control client area. When content is too small, scroll bars are hidden.</summary></field> throw Error.notImplemented(); }; Aurigma.GraphicsMill.ScrollBarsStyle.prototype = { always: 0, auto: 1 }; Aurigma.GraphicsMill.ScrollBarsStyle.registerEnum("Aurigma.GraphicsMill.ScrollBarsStyle"); Aurigma.GraphicsMill.ZoomQuality = function () { /// <summary>Specifies zoom quality of the displayed image.</summary> /// <remarks>Zoom with higher quality requires more resources on the server-side than lower quality zoom.</remarks> /// <field name="low" type="Number" integer="true" static="true"><summary>Low quality.</summary></field> /// <field name="medium" type="Number" integer="true" static="true"><summary>Medium quality.</summary></field> /// <field name="high" type="Number" integer="true" static="true"><summary>High quality.</summary></field> /// <field name="shrinkHighStretchLow" type="Number" integer="true" static="true"><summary>This mode enables server-side high quality resize in the case when zoom value is lower than 1.0, otherwise the image is resized on the client.</summary></field> throw Error.notImplemented(); }; Aurigma.GraphicsMill.ZoomQuality.prototype = { low: 0, medium: 1, high: 2, shrinkHighStretchLow: 3 }; Aurigma.GraphicsMill.ZoomQuality.registerEnum("Aurigma.GraphicsMill.ZoomQuality"); Aurigma.GraphicsMill.ZoomMode = function () { /// <summary>Specifies zoom modes of the displayed image.</summary> /// <field name="none" type="Number" integer="true" static="true"><summary>Zoom modifier is specified only manually. Use <see cref="P:J:Aurigma.GraphicsMill.BaseViewer.zoom" /> property. Also, you can use some zooming navigator controls.</summary></field> /// <field name="bestFit" type="Number" integer="true" static="true"><summary>Zoom modifier is calculated automatically so that entire image could fit the control. If image is smaller than the control client area, the image is stretched to occupy as much area as possible.</summary></field> /// <field name="bestFitShrinkOnly" type="Number" integer="true" static="true"><summary>Zoom modifier is calculated automatically so that entire image could fit the control. If image is smaller than the control client area, zooming modifier is set to 1 (i.e. no zoom).</summary></field> /// <field name="fitToWidth" type="Number" integer="true" static="true"><summary>Zoom modifier is calculated automatically so that the image width would be the same as the control client area width. If the image width is smaller than the control client area width, image is stretched.</summary></field> /// <field name="fitToHeight" type="Number" integer="true" static="true"><summary>Zoom modifier is calculated automatically so that the image height would be the same as the control client area height. If the image height is smaller than the control client area height, image is stretched.</summary></field> /// <field name="zoomControl" type="Number" integer="true" static="true"><summary>The control is resized to have the same client area as the image dimensions.</summary></field> /// <field name="fitToWidthShrinkOnly" type="Number" integer="true" static="true"><summary>Zoom modifier is calculated automatically so that the image width would be the same as the control client area width. If image width is smaller than the control client area, zooming modifier is set to 1 (i.e. no zoom).</summary></field> /// <field name="fitToHeightShrinkOnly" type="Number" integer="true" static="true"><summary>Zoom modifier is calculated automatically so that the image height would be the same as the control client area height. If image height is smaller than the control client area, zooming modifier is set to 1 (i.e. no zoom).</summary></field> throw Error.notImplemented(); }; Aurigma.GraphicsMill.ZoomMode.prototype = { none: 0, bestFit: 1, bestFitShrinkOnly: 2, fitToWidth: 3, fitToHeight: 4, zoomControl: 5, fitToWidthShrinkOnly: 6, fitToHeightShrinkOnly: 7 }; Aurigma.GraphicsMill.ZoomMode.registerEnum("Aurigma.GraphicsMill.ZoomMode"); Aurigma.GraphicsMill.ViewportAlignment = function () { /// <summary>Specifies possible values for alignment of viewport.</summary> /// <field name="centerBottom" type="Number" integer="true" static="true"><summary>Viewport is positioned in the middle of the bottom side.</summary></field> /// <field name="centerCenter" type="Number" integer="true" static="true"><summary>Viewport is positioned in the center of the viewer.</summary></field> /// <field name="centerTop" type="Number" integer="true" static="true"><summary>Viewport is positioned in the middle of the top side.</summary></field> /// <field name="leftBottom" type="Number" integer="true" static="true"><summary>Viewport is positioned in the left bottom corner.</summary></field> /// <field name="leftCenter" type="Number" integer="true" static="true"><summary>Viewport is positioned in the middle of the left side.</summary></field> /// <field name="leftTop" type="Number" integer="true" static="true"><summary>Viewport is positioned in the left top corner.</summary></field> /// <field name="rightBottom" type="Number" integer="true" static="true"><summary>Viewport is positioned in the right bottom corner.</summary></field> /// <field name="rightCenter" type="Number" integer="true" static="true"><summary>Viewport is positioned in the middle of the right side.</summary></field> /// <field name="rightTop" type="Number" integer="true" static="true"><summary>Viewport is positioned in the right top corner.</summary></field> throw Error.notImplemented(); }; Aurigma.GraphicsMill.ViewportAlignment.prototype = { centerBottom: 5, centerCenter: 4, centerTop: 3, leftBottom: 2, leftCenter: 1, leftTop: 0, rightBottom: 8, rightCenter: 7, rightTop: 6 }; Aurigma.GraphicsMill.ViewportAlignment.registerEnum("Aurigma.GraphicsMill.ViewportAlignment"); Aurigma.GraphicsMill.JqueryMode = function () { /// <summary>Specifies possible values for source of JQuery library.</summary> /// <field name="BuiltIn" type="Number" integer="true" static="true"><summary>Use JQuery library included in control.</summary></field> /// <field name="External" type="Number" integer="true" static="true"><summary>Use window.JQuery</summary></field> throw Error.notImplemented(); }; Aurigma.GraphicsMill.JqueryMode.prototype = { BuiltIn: 0, External: 1 }; Aurigma.GraphicsMill.JqueryMode.registerEnum("Aurigma.GraphicsMill.JqueryMode"); Aurigma.GraphicsMill.ViewerClientSideOptions = function (viewer) { /// <summary>Exposes properties which configure automatic postback for individual events of the <see cref="T:J:Aurigma.GraphicsMill.BaseViewer" /> control.</summary> /// <remarks><para>Instances of the <see cref="T:J:Aurigma.GraphicsMill.ViewerClientSideOptions" /> class are returned by the <see cref="P:J:Aurigma.GraphicsMill.BaseViewer.clientSideOptions">BaseViewer.clientSideOptions</see> property.</para><para>Each property of this class enables or disables automatic postback for appropriate event. However if the <see cref="P:J:Aurigma.GraphicsMill.BaseViewer.autoPostBack">BaseViewer.autoPostBack</see> value is <b>true</b>, automatic postback occurs regardless of these properties values.</para></remarks> /// <seealso cref="T:Aurigma.Aurigma.GraphicsMill.AjaxControls.ViewerClientSideOptions" /> /// <constructor><exclude /></constructor> this._v = viewer; }; Aurigma.GraphicsMill.ViewerClientSideOptions.prototype = { get_postBackOnWorkspaceChanged: function () { /// <value type="Boolean">The <see cref="T:J:Boolean" /> value which turns automatic postback for the <see cref="E:J:Aurigma.GraphicsMill.BaseViewer.workspaceChanged">BaseViewer.workspaceChanged</see> event on.</value> /// <summary>Gets/sets a value which turns automatic postback for the <see cref="E:J:Aurigma.GraphicsMill.BaseViewer.workspaceChanged">BaseViewer.workspaceChanged</see> event on.</summary> /// <remarks><para>If <see cref="P:J:Aurigma.GraphicsMill.BaseViewer.autoPostBack">BaseViewer.autoPostBack</see> is <b>true</b>, <see cref="E:J:Aurigma.GraphicsMill.BaseViewer.workspaceChanged">BaseViewer.workspaceChanged</see> event fires regardless of the value of this property.</para><para>This property corresponds to <see cref="P:Aurigma.Aurigma.GraphicsMill.AjaxControls.ViewerClientSideOptions.PostBackOnWorkspaceChanged">ViewerClientSideOptions.PostBackOnWorkspaceChanged</see> server-side member.</para></remarks> return this._v._clientSideOptions$postBackOnWorkspaceChanged; }, set_postBackOnWorkspaceChanged: function (v) { this._v._clientSideOptions$postBackOnWorkspaceChanged = v; }, get_postBackOnWorkspaceClick: function () { /// <value type="Boolean">The <see cref="T:J:Boolean" /> value which turns automatic postback for the <see cref="E:J:Aurigma.GraphicsMill.BaseViewer.workspaceClick">workspaceClick</see> event on.</value> /// <summary>Gets/sets a value which turns automatic postback for the <see cref="E:J:Aurigma.GraphicsMill.BaseViewer.workspaceClick">workspaceClick</see> event on.</summary> /// <remarks><para>If <see cref="P:J:Aurigma.GraphicsMill.BaseViewer.autoPostBack">BaseViewer.autoPostBack</see> is <b>true</b>, <see cref="E:J:Aurigma.GraphicsMill.BaseViewer.workspaceClick">workspaceClick</see> event fires regardless of the value of this property.</para><para>This property corresponds to <see cref="P:Aurigma.Aurigma.GraphicsMill.AjaxControls.ViewerClientSideOptions.PostBackOnWorkspaceClick">ViewerClientSideOptions.PostBackOnWorkspaceClick</see> server-side member.</para></remarks> return this._v._clientSideOptions$postBackOnWorkspaceClick; }, set_postBackOnWorkspaceClick: function (v) { this._v._clientSideOptions$postBackOnWorkspaceClick = v; } }; Aurigma.GraphicsMill.ViewerClientSideOptions.registerClass("Aurigma.GraphicsMill.ViewerClientSideOptions"); Aurigma.GraphicsMill.BaseViewer = function (element) { /// <summary>This client-side class corresponds to the <see cref="T:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer" /> server-side control and gives an opportunity to obtain access to its primary members in JavaScript.</summary> /// <remarks><para>All the client-side classes intended to display some content in a browser are inherited from this class.</para><para><see cref="T:J:Aurigma.GraphicsMill.BaseViewer" /> is an abstract class which implements common functionality for content viewer controls. You cannot instantiate objects of this class directly.</para></remarks> /// <seealso cref="T:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer" /> /// <constructor><exclude /></constructor> Aurigma.GraphicsMill.BaseViewer.initializeBase(this, [element]); //Begin of the fields populated during $create call this._autoPostBack = false; this._backColor = "#FFFFFF"; this._clientSideOptions$postBackOnWorkspaceChanged = false; this._clientSideOptions$postBackOnWorkspaceClick = false; this._maxZoom = 16; this._minZoom = 0.1; this._navigator = ""; this._rubberband = ""; this._screenXDpi = 72; this._screenYDpi = 72; this._scrollBarsStyle = Aurigma.GraphicsMill.ScrollBarsStyle.always; this._scrollBarWidth = 17; this._scrollingPosition = new Aurigma.GraphicsMill.PointF(0, 0); this._viewportAlignment = Aurigma.GraphicsMill.ViewportAlignment.leftTop; this._jqueryMode = Aurigma.GraphicsMill.JqueryMode.BuiltIn; this._zoom = 1; this._zoomMode = Aurigma.GraphicsMill.ZoomMode.none, this._zoomQuality = Aurigma.GraphicsMill.ZoomQuality.shrinkHighStretchLow, this._bestFitWhiteSpacePc = 0, this._stateFieldId = ""; this._needUpdateViewport = false; this._msTouchActionInitialValue = ""; //Postback functions and fields this._workspaceClickPostBack = null; this._workspaceChangedPostBack = null; this._callback = null; this._workspaceChanged = false, this._workspaceClick = false, this._workspaceClickArgs = new Sys.UI.Point(0, 0); this._toolTip = ""; this._accessKey = ""; this._tabIndex = ""; this._borderWidth = 0; //End of the fields populated during $create call this._callbackArgs = ""; this._callbackContext = 0; this._status = Aurigma.GraphicsMill.UpdateStatus.ready; this._exceptionDescription = ""; this._returnValue = ""; this._clientSideOptions = new Aurigma.GraphicsMill.ViewerClientSideOptions(this); this._delayedRefreshTimeout = 1000; this._refreshTimer = null; this._contentElements = []; //Specify whether we need to refresh the image. this._needToRefresh = false; this._rulerEnabled = false; this._rulerWidth = 13; this._rulerScale = 1; // to translate points to inches for example. this._rulerOffsetX = 0; this._rulerOffsetY = 0; this._rulersOnScrollDelegate = null; this._rulersOnZoomDelegate = null; this._activeAjax = 0; this._pinchZoomEnabled = true; this._holderBounds = null; this._jquery = window.jQuery; this._contentCtxDimension = {}; this._viewportLocation = {}; this._jHolderElement = null; this._touchFlags = {}; this._rulers = {}; this._bodyCursor = null; }; Aurigma.GraphicsMill.BaseViewer.prototype = { //-------------------------------------------------------------------------- //Private //-------------------------------------------------------------------------- //-------------------------------------------------------------------------- //Methods //-------------------------------------------------------------------------- get_contentWidth: function () { /// <summary>Gets a value that represents the width of the control's content taking into account its horizontal resolution and zoom value.</summary> /// <value type="Number">The width of the control's content.</value> /// <remarks><para>This property corresponds to <see cref="P:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.ContentWidth">BaseViewer.ContentWidth</see> server-side member.</para></remarks> return this._holderBounds.width; }, get_width: function () { /// <summary>Gets the width (in pixels) of the control area.</summary> /// <value type="Number">The width of the control area.</value> /// <remarks><para>This property corresponds to <see cref="P:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.Width">BaseViewer.Width</see> server-side member.</para></remarks> return this._holderBounds.offsetWidth; }, get_contentHeight: function () { /// <summary>Gets a value that represents the height of the control's content taking into account its vertical resolution and zoom value.</summary> /// <value type="Number">The height of the control's content.</value> /// <remarks><para>This property corresponds to <see cref="P:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.ContentHeight">BaseViewer.ContentHeight</see> server-side member.</para></remarks> return this._holderBounds.height; }, get_height: function () { /// <summary>Gets the height (in pixels) of the control area.</summary> /// <value type="Number">The height of the control area.</value> /// <remarks><para>This property corresponds to <see cref="P:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.Height">BaseViewer.Height</see> server-side member.</para></remarks> return this._holderBounds.offsetHeight; }, notifySizeChanged: function () { this._onResize(); }, _render: function () { var wsize = "width:" + this.get_contentWidth() + "px;height:" + this.get_contentHeight() + "px;"; var sb = new Sys.StringBuilder(); sb.append("\u003cdiv id=\"cvContent\" style=\"position:absolute; overflow: hidden;"); if (!this.get_hasContent || this.get_hasContent()) sb.append(wsize); else sb.append("display:none;"); var vl = this._getViewportLocation(); sb.append("left:" + vl.x + "px;top:" + vl.y + "px"); sb.append("\"\u003e"); this._renderContent(sb); // close content. sb.append("\u003c/div\u003e"); var layerIDs = ["cvRubberband", "cvNavigator"]; for (var i = 0; i < layerIDs.length; i++) { var layerID = layerIDs[i]; sb.append("\u003cdiv id=\""); sb.append(layerID); sb.append("\""); sb.append(" style=\" "); if (!this.get_hasContent || this.get_hasContent()) sb.append(wsize); sb.append("position:absolute;left:0px;top:0px\"\u003e\u003c/div\u003e"); } sb.append("\u003c/div\u003e"); return sb.toString(); }, _clearElement: function (el) { while (el.firstChild) el.removeChild(el.firstChild); }, _rulersOnScroll: function () { if (!this._touchFlags.pinchStarted) this._updateRulersStyle(); }, _rulersOnZoom: function () { if (!this._touchFlags.pinchStarted) { this._drawRulers(); this._updateRulersStyle(); } }, _rulersOnMouseMove: function (o, e) { // used to draw slide bars just when somebody move mouse over content but don't hold some object // and don't use setCapture if (this.get_rulerEnabled()) { var topSlideBar = this._rulers.topSlideBar; var leftSlideBar = this._rulers.leftSlideBar; if (Aurigma.GraphicsMill.Utils.Platform.IsTouchDevice()) { if (!Aurigma.GraphicsMill.Utils.Platform.IsTouchIE()) return; else if (e.originalEvent.pointerType != e.originalEvent.MSPOINTER_TYPE_MOUSE) { topSlideBar.style.left = "0px"; leftSlideBar.style.top = "0px"; return; } } var vl = this._getViewportLocation(); var contentOffset = this._getElementPageCoord(this._contentCtx); var pageX = -1, pageY = -1; if (!isNaN(e.pageX) && !isNaN(e.pageY)) { pageX = e.pageX; pageY = e.pageY; } else if (e.originalEvent) { if (e.originalEvent.touches) { pageX = e.originalEvent.touches[0].pageX; pageY = e.originalEvent.touches[0].pageY; } else { pageX = e.originalEvent.pageX; pageY = e.originalEvent.pageY; } } var x = vl.x + pageX - contentOffset.left - this._scrollingPosition.x; var y = vl.y + pageY - contentOffset.top - this._scrollingPosition.y; topSlideBar.style.left = x + "px"; leftSlideBar.style.top = y + "px"; } }, _initializeRulers: function () { // create Rulers. var el = this.get_element().parentNode; var doc = el.ownerDocument; var rulersStyle = { "z-index": "2", "-webkit-transform": "translate3d(0px, 0px, 0px)", "-moz-transform": "translate3d(0px, 0px, 0px)" //"transform": "translate3d(0px, 0px, 0px)" }; var topRuller = doc.createElement("div"); this._jquery(topRuller).css(rulersStyle); var fullTopRuller = doc.createElement("div"); var leftRuller = doc.createElement("div"); this._jquery(leftRuller).css(rulersStyle); var fullLeftRuller = doc.createElement("div"); var whiteRect = doc.createElement("div"); this._jquery(whiteRect).css("z-index", "2"); var topSlideBar = doc.createElement("div"); var leftSlideBar = doc.createElement("div"); this._rulers.topRuller = topRuller; this._rulers.leftRuller = leftRuller; this._rulers.fullTopRuller = fullTopRuller; this._rulers.fullLeftRuller = fullLeftRuller; this._rulers.whiteRect = whiteRect; this._rulers.topSlideBar = topSlideBar; this._rulers.leftSlideBar = leftSlideBar; var id = this.get_element().id; topRuller.id = id + "_TopRuler"; fullTopRuller.id = id + "_FullTopRuler"; leftRuller.id = id + "_LeftRuler"; fullLeftRuller.id = id + "_FullLeftRuler"; whiteRect.id = id + "_WhiteRect"; topSlideBar.id = id + "_TopSlideBar"; leftSlideBar.id = id + "_LeftSlideBar"; fullTopRuller = topRuller.appendChild(fullTopRuller); fullLeftRuller = leftRuller.appendChild(fullLeftRuller); topSlideBar = topRuller.appendChild(topSlideBar); leftSlideBar = leftRuller.appendChild(leftSlideBar); topRuller = el.appendChild(topRuller); leftRuller = el.appendChild(leftRuller); whiteRect = el.appendChild(whiteRect); topRuller.style.position = fullTopRuller.style.position = leftRuller.style.position = fullLeftRuller.style.position = whiteRect.style.position = topSlideBar.style.position = leftSlideBar.style.position = "absolute"; leftSlideBar.style.width = topSlideBar.style.height = this.get_rulerWidth() + "px"; leftSlideBar.style.height = topSlideBar.style.width = "1px"; leftSlideBar.style.backgroundColor = topSlideBar.style.backgroundColor = "#ff0000"; leftSlideBar.style.overflow = topSlideBar.style.overflow = "hidden"; leftSlideBar.style.webkitTransform = topSlideBar.style.webkitTransform = "translate3d(0, 0, 0)"; topRuller.style.overflow = leftRuller.style.overflow = whiteRect.style.overflow = "hidden"; topRuller.style.visibility = leftRuller.style.visibility = whiteRect.style.visibility = "hidden"; whiteRect.style.backgroundColor = "#909090"; el.style.position = "relative"; this._drawRulers(); this._updateRulersStyle(); if (!this._rulersOnScrollDelegate) { this._rulersOnScrollDelegate = Function.createDelegate(this, this._rulersOnScroll); this.add_scrolled(this._rulersOnScrollDelegate); } if (!this._rulersOnZoomDelegate) { this._rulersOnZoomDelegate = Function.createDelegate(this, this._rulersOnZoom); this.add_zoomed(this._rulersOnZoomDelegate); } this.add_mouseMove(Aurigma.GraphicsMill.Utils.createBoundedWrapper(this, this._rulersOnMouseMove)); }, _disposeRulers: function () { if (this._rulersOnScrollDelegate) { this.remove_scrolled(this._rulersOnScrollDelegate); this._rulersOnScrollDelegate = null; } if (this._rulersOnZoomDelegate) { this.remove_zoomed(this._rulersOnZoomDelegate); this._rulersOnZoomDelegate = null; } }, _updateRulersStyle: function () { // Set ruler style. var leftRuller = this._rulers.leftRuller; var topRuller = this._rulers.topRuller; var fullLeftRuller = this._rulers.fullLeftRuller; var fullTopRuller = this._rulers.fullTopRuller; var whiteRect = this._rulers.whiteRect; var width = this.get_width(); var hight = this.get_height(); var contentWidth = this.get_contentWidth(); var contentHeight = this.get_contentHeight(); var scrollBarWidth = this.get_scrollBarWidth(); var leftRullerStyle = leftRuller.style; var topRullerStyle = topRuller.style; var fullTopRullerStyle = fullTopRuller.style; var fullLeftRullerStyle = fullLeftRuller.style; var whiteRectStyle = whiteRect.style; if (!this._rulerEnabled) { topRullerStyle.width = topRullerStyle.height = leftRullerStyle.width = leftRullerStyle.height = topRullerStyle.borderWidth = leftRullerStyle.borderWidth = whiteRectStyle.width = whiteRectStyle.height = "0px"; leftRullerStyle.visibility = topRullerStyle.visibility = whiteRectStyle.visibility = "hidden"; } else { leftRullerStyle.visibility = topRullerStyle.visibility = whiteRectStyle.visibility = "inherit"; leftRullerStyle.backgroundColor = topRullerStyle.backgroundColor = "#ffffff"; leftRullerStyle.borderRight = topRullerStyle.borderBottom = "1px solid black"; var sbAlways = this.get_scrollBarsStyle() == Aurigma.GraphicsMill.ScrollBarsStyle.always; var sbAuto = this.get_scrollBarsStyle() == Aurigma.GraphicsMill.ScrollBarsStyle.auto; var isThisScrollBar = sbAlways || (sbAuto && (contentHeight > hight - this.get_rulerWidth())); var isThatScrollBar = sbAlways || (sbAuto && (contentWidth > width - this.get_rulerWidth())); isThisScrollBar = isThisScrollBar || (isThatScrollBar && sbAuto && (contentHeight > hight - this.get_rulerWidth() - scrollBarWidth)); isThatScrollBar = isThatScrollBar || (isThisScrollBar && sbAuto && (contentWidth > width - this.get_rulerWidth() - scrollBarWidth)); var rw = isThisScrollBar ? width - scrollBarWidth : width; var rh = isThatScrollBar ? hight - scrollBarWidth : hight; topRullerStyle.width = rw + "px"; leftRullerStyle.height = rh + "px"; fullTopRullerStyle.height = topRullerStyle.height = whiteRect.style.height = this._rulerWidth + "px"; fullLeftRullerStyle.width = leftRullerStyle.width = whiteRect.style.width = this._rulerWidth + "px"; // to get clear scroll we need rulers positioned in top-left corner. topRullerStyle.top = leftRullerStyle.top = topRullerStyle.left = leftRullerStyle.left = whiteRect.style.top = whiteRect.style.left = "0px"; var sp = this.get_scrollingPosition(); fullTopRullerStyle.left = -sp.x + "px"; fullLeftRullerStyle.top = -sp.y + "px"; } }, _drawRulers: function () { if (this._rulerEnabled) { var doc = this.get_element().parentNode.ownerDocument; var fullLeftRuler = this._rulers.fullLeftRuller; var fullTopRuler = this._rulers.fullTopRuller; var axes = [ { controlLength: this.get_width(), contentLength: this.get_contentWidth(), viewportLocation: this._getViewportLocation().x, factor: this.get_zoom() * this.get_actualSizeHorizontalScale(), ruler: fullTopRuler }, { controlLength: this.get_height(), contentLength: this.get_contentHeight(), viewportLocation: this._getViewportLocation().y, factor: this.get_zoom() * this.get_actualSizeHorizontalScale(), ruler: fullLeftRuler }]; var rulerOffsets = [this.get_rulerOffsetX(), this.get_rulerOffsetY()]; var sw = this.get_scrollBarWidth(); var sbAlways = this.get_scrollBarsStyle() == Aurigma.GraphicsMill.ScrollBarsStyle.always; var sbAuto = this.get_scrollBarsStyle() == Aurigma.GraphicsMill.ScrollBarsStyle.auto; var rulerScale = this.get_rulerScale(); for (var i = 0; i < axes.length; i++) { axes[i].origin = axes[i].viewportLocation + rulerOffsets[i] * axes[i].factor; var isThisScrollBar = sbAlways || (sbAuto && (axes[1 - i].contentLength > axes[1 - i].controlLength - this.get_rulerWidth())); var isThatScrollBar = sbAlways || (sbAuto && (axes[i].contentLength > axes[i].controlLength - this.get_rulerWidth())); isThisScrollBar = isThisScrollBar || (isThatScrollBar && sbAuto && (axes[1 - i].contentLength > axes[1 - i].controlLength - this.get_rulerWidth() - sw)); isThatScrollBar = isThatScrollBar || (isThisScrollBar && sbAuto && (axes[i].contentLength > axes[i].controlLength - this.get_rulerWidth() - sw)); if (isThatScrollBar) axes[i].rulerPixelLength = Math.max(axes[i].contentLength + this.get_rulerWidth(), axes[i].controlLength - (isThisScrollBar ? sw : 0)); else axes[i].rulerPixelLength = (isThisScrollBar) ? axes[i].controlLength - sw : axes[i].controlLength; axes[i].startWorkspaceLocation = -axes[i].origin; axes[i].endWorkspaceLocation = axes[i].startWorkspaceLocation + axes[i].rulerPixelLength; axes[i].startWorkspaceLocation /= axes[i].factor; axes[i].endWorkspaceLocation /= axes[i].factor; axes[i].startWorkspaceLocation *= rulerScale; axes[i].endWorkspaceLocation *= rulerScale; // generate division. var maxCutLength = 80; // pixels; var minCutLength = 4; // pixels; var currentDivision = 1; var tmp = (maxCutLength / axes[i].factor) * rulerScale; var size = 1; while (tmp > 10) { tmp /= 10; size *= 10; } if (tmp > 5) { size = 5 * size; currentDivision = 5; } else if (tmp > 2) { size = 2 * size; currentDivision = 2; } // generate first fragmentation (with numbers); var cuts = [{ location: 0, index: 0 }]; var cur = 0; while (cur < axes[i].endWorkspaceLocation) { cur += size; cuts.push({ location: cur, index: 0 }); } cur = 0; while (cur > axes[i].startWorkspaceLocation) { cur -= size; cuts.push({ location: cur, index: 0 }); } // clear elements. this._clearElement(axes[i].ruler); // draw text. var j; for (j = 0; j < cuts.length; j++) { var txt = doc.createElement("span"); var labelText = Math.abs(Math.round(cuts[j].location)).toString(); txt.innerHTML = (i == 1) ? labelText.split('').join('<br/>') : labelText; txt.style.position = "absolute"; txt.style.fontSize = "9px"; txt.style.margin = "0px"; txt.style.padding = "0px"; txt.style.fontFamily = "Tahoma, Verdana, Arial;"; txt.style.backgroundColor = "#ffffff"; var offset = Math.round(cuts[j].location * axes[i].factor / rulerScale + axes[i].origin); txt.style.top = (i == 0) ? "-1px" : offset + 2 + "px"; txt.style.left = (i == 1) ? "2px" : offset + 2 + "px"; axes[i].ruler.appendChild(txt); } // create other cuts. var currentIndex = 1; var divs = { 1: { newDiv: 5, fractions: 2 }, 2: { newDiv: 1, fractions: 2 }, 5: { newDiv: 1, fractions: 5 } }; var c = divs[currentDivision]; var newSize = size / c.fractions; while (newSize * axes[i].factor / rulerScale > minCutLength) { var l = cuts.length - 1; while (l >= 0) { for (var k = 0; k < c.fractions - 1; k++) cuts.push({ index: currentIndex, location: cuts[l].location + newSize * (k + 1) }); l--; } currentIndex++; c = divs[c.newDiv]; newSize = newSize / c.fractions; } var j; for (j = 0; j < cuts.length; j++) { var cut = doc.createElement("div"); cut.style.position = "absolute"; cut.style.overflow = "hidden"; cut.style.backgroundColor = "#000000"; cut.style.padding = "0px"; cut.style.margin = "0px"; var offset = Math.round(cuts[j].location * axes[i].factor / rulerScale + axes[i].origin/* + this.get_rulerWidth()*/); var rw = this.get_rulerWidth(); var cutWidth = Math.max(Math.ceil(rw / Math.pow(2, cuts[j].index)), 2); cut.style.width = (i == 0) ? "1px" : cutWidth + "px"; cut.style.height = (i == 1) ? "1px" : cutWidth + "px"; cut.style.top = (i == 0) ? (rw - cutWidth) + "px" : offset + "px"; cut.style.left = (i == 1) ? (rw - cutWidth) + "px" : offset + "px"; cut.style.webkitTransform = "translate3d(0, 0, 0)"; axes[i].ruler.appendChild(cut); } } } }, _generateCssWithPrefixes: function (prefixes, value) { var retVal = {}; for (var i in prefixes) retVal[prefixes[i] + "transition"] = value; return retVal; }, _getPageZoom: function () { return screen.deviceXDPI / screen.logicalXDPI; }, _getActualScrollPosition: function () { /// <returns type="Sys.UI.Point" /> var holder = this.get_element(); var scrollLeft = holder.scrollLeft, scrollTop = holder.scrollTop; return new Aurigma.GraphicsMill.PointF(scrollLeft, scrollTop); }, _getViewportLocation: function () { /// <returns type="Sys.UI.Point" /> if (this.get_zoomMode() == Aurigma.GraphicsMill.ZoomMode.zoomControl) return new Sys.UI.Point(0, 0); var rullerWidth = this.get_rulerEnabled() ? this.get_rulerWidth() : 0; var elementBounds = this._holderBounds; var viewAreaWidth = elementBounds.width - rullerWidth; var viewAreaHeight = elementBounds.height - rullerWidth; var contentWidth = this.get_contentWidth(); var contentHeight = this.get_contentHeight(); var x, y; switch (this._viewportAlignment) { case Aurigma.GraphicsMill.ViewportAlignment.centerBottom: case Aurigma.GraphicsMill.ViewportAlignment.centerCenter: case Aurigma.GraphicsMill.ViewportAlignment.centerTop: x = Math.floor((viewAreaWidth - contentWidth) / 2); break; case Aurigma.GraphicsMill.ViewportAlignment.leftBottom: case Aurigma.GraphicsMill.ViewportAlignment.leftCenter: case Aurigma.GraphicsMill.ViewportAlignment.leftTop: x = 0; break; case Aurigma.GraphicsMill.ViewportAlignment.rightBottom: case Aurigma.GraphicsMill.ViewportAlignment.rightCenter: case Aurigma.GraphicsMill.ViewportAlignment.rightTop: x = viewAreaWidth - contentWidth; break; } ; switch (this._viewportAlignment) { case Aurigma.GraphicsMill.ViewportAlignment.centerCenter: case Aurigma.GraphicsMill.ViewportAlignment.leftCenter: case Aurigma.GraphicsMill.ViewportAlignment.rightCenter: y = Math.floor((viewAreaHeight - contentHeight) / 2); break; case Aurigma.GraphicsMill.ViewportAlignment.centerTop: case Aurigma.GraphicsMill.ViewportAlignment.leftTop: case Aurigma.GraphicsMill.ViewportAlignment.rightTop: y = 0; break; case Aurigma.GraphicsMill.ViewportAlignment.centerBottom: case Aurigma.GraphicsMill.ViewportAlignment.leftBottom: case Aurigma.GraphicsMill.ViewportAlignment.rightBottom: y = viewAreaHeight - contentHeight; break; } ; return new Sys.UI.Point(Math.max(rullerWidth, x + rullerWidth), Math.max(rullerWidth, y + rullerWidth)); }, ignoreDocumentClickOnce: function () { this._ignoreDocumentClickOnce = true; }, _onDocumentClick: function (e) { if (this._ignoreDocumentClickOnce === true) { this._ignoreDocumentClickOnce = false; return; } this._raiseEvent("documentClick", e); }, _onClick: function (e) { this._raiseEvent("click", e); }, _onMouseDown: function (e) { this._raiseEvent("mouseDown", e); }, _onMouseMove: function (e) { this._raiseEvent("mouseMove", e); }, _onMouseUp: function (e) { this._raiseEvent("mouseUp", e); }, _useWorkspaceCoords: function (e) { this._fixJqueryEvent(e); var pt = this.pageToWorkspacePoint(e.pageX, e.pageY); e.x = pt.x; e.y = pt.y; }, _fixJqueryEvent: function (e) { //add pageX/Y and clientX/Y to jquery event var originalEvent = e.originalEvent; if (typeof e.type == "string" && e.type.indexOf("touch") !== -1)//touch event { var touch = originalEvent.touches.length > 0 ? originalEvent.touches[0] : originalEvent.changedTouches[0]; e.pageX = touch.pageX; e.pageY = touch.pageY; e.clientX = touch.clientX; e.clientY = touch.clientY; } else { if (isNaN(e.pageX) || isNaN(e.pageY)) { e.pageX = originalEvent.pageX; e.pageY = originalEvent.pageY; } if (isNaN(e.clientX) || isNaN(e.clientY)) { e.clientX = originalEvent.clientX; e.clientY = originalEvent.clientY; } } }, _onWorkspaceDoubleClick: function (e) { this._useWorkspaceCoords(e); var opt = { x: e.x, y: e.y }; this._raiseEvent("workspaceDoubleClick", opt); }, _onWorkspaceClick: function (e) { this._useWorkspaceCoords(e); var opt = { x: e.x, y: e.y }; this._raiseEvent("workspaceClick", opt); this._workspaceClickArgs = new Aurigma.GraphicsMill.PointF(opt.x, opt.y); if (this.get_autoPostBack() || this.get_clientSideOptions().get_postBackOnWorkspaceClick()) this._workspaceClickPostBack(); else this._workspaceClick = true; }, _onWorkspaceMouseDown: function (e) { if (this._isManyTouches(e)) return; this._useWorkspaceCoords(e); this._raiseEvent("workspaceMouseDown", e); // Enable capture to handle mouse move events outside control if (!Aurigma.GraphicsMill.Utils.Platform.IsTouchIE()) { this._contentCtx.setCapture(); } }, _onWorkspaceMouseUp: function (e) { if (this._isManyTouches(e)) return; this._useWorkspaceCoords(e); this._raiseEvent("workspaceMouseUp", e); // Disable capture this._contentCtx.releaseCapture(); }, _onWorkspaceMouseMove: function (e) { if (this._isManyTouches(e)) { //prevent browser pinch e.preventDefault(); return; } this._useWorkspaceCoords(e); this._raiseEvent("workspaceMouseMove", e); }, _saveState: function () { var s = new Object(); this._serializeState(s); var state = Sys.Serialization.JavaScriptSerializer.serialize(s); $get(this._stateFieldId).value = state; }, _callbackSuccess: function (message, context) { /// <param name="name" type="String" /> /// <param name="context" type="String" /> if (context == this._callbackContext) { eval(message); this._callbackComplete(); if (this._needToRefresh) { this._updateViewport(); this._refresh(); } } }, _callbackError: function (message, context) { /// <param name="name" type="String" /> /// <param name="context" type="String" /> this._exceptionDescription = message; this._callbackComplete(); }, _callbackComplete: function () { this._activeAjax--; //Change the viewer status. this._status = Aurigma.GraphicsMill.UpdateStatus.ready; this._raiseEvent("statusChanged"); }, //-------------------------------------------------------------------------- //Protected //-------------------------------------------------------------------------- //-------------------------------------------------------------------------- //Method //-------------------------------------------------------------------------- /// <protected /> _raiseEvent: function (name, args) { /// <param name="name" type="String" /> /// <param name="args" /> var h = this.get_events().getHandler(name); if (h) { if (args == undefined) args = Sys.EventArgs.Empty; h(this, args); } }, /// <protected /> _makeInactive: function (domElement) { /// <param name="e" domElement="true" /> domElement.ondrag = function () { return false; }; domElement.unselectable = "on"; }, /// <protected /> _makeInactiveAll: function () { var el = this._contentElements; for (var i = 0; i < el.length; i++) this._makeInactive(el[i]); }, /// <protected /> _resizeContentElements: function () { var contentWidth = this.get_contentWidth(); var contentHeight = this.get_contentHeight(); var el = this._contentElements; for (var i = 0; i < el.length; i++) { el[i].style.width = contentWidth + "px"; el[i].style.height = contentHeight + "px"; if (el[i].tagName == "IMG") { el[i].width = contentWidth; el[i].height = contentHeight; } } this._contentCtxDimension.width = contentWidth; this._contentCtxDimension.height = contentHeight; this._holderBounds = this._getElementBounds(this.get_element()); }, _getElementBounds: function (element) { //when the element is placed in the hidden element ("display: none" style) //then clientWidth, clientHeight, offsetWidth, offsetHeight, offsetTop, offsetLeft are 0 var hiddenElements = []; var jElement = this._jquery(element); //remove display:none style from parent elements while (jElement.length > 0 && jElement[0] != document) { if (jElement.css("display") == "none") { hiddenElements.push({ element: jElement, display: jElement[0].style.display, visibility: jElement[0].style.visibility }); jElement[0].style.visibility = "hidden"; jElement[0].style.display = "block"; } jElement = jElement.parent(); } //get element properties var width = element.clientWidth; var height = element.clientHeight; var offsetHeight = element.offsetHeight; var offsetWidth = element.offsetWidth; var offsetTop = element.offsetTop; var offsetLeft = element.offsetLeft; //restore display:none style for (var i = 0; i < hiddenElements.length; i++) { var hiddenElementBound = hiddenElements[i]; hiddenElementBound.element[0].style.visibility = hiddenElementBound.visibility; hiddenElementBound.element[0].style.display = hiddenElementBound.display; } return { "width": width, "height": height, "offsetWidth": offsetWidth, "offsetHeight": offsetHeight, "offsetTop": offsetTop, "offsetLeft": offsetLeft }; }, _isManyTouches: function (jEvent) { if (Aurigma.GraphicsMill.Utils.Platform.IsTouchDevice()) { if (window.navigator.pointerEnabled) { if (!this._touchFlags.multiTouch && jEvent.originalEvent.isPrimary === false) this._touchFlags.multiTouch = true; if (this._touchFlags.multiTouch) return true; } else { var originalEvent = jEvent.originalEvent; if (typeof originalEvent.touches === "object" && originalEvent.touches !== null && typeof originalEvent.touches.length === "number" && originalEvent.touches.length > 1) { return true; } } } return false; }, /// <protected /> _onTouch: function (e) { this._touchFlags.startZoom = this.get_zoom(); this._touchFlags.multiTouch = e.gesture.touches.length > 1; }, /// <protected /> _onPinch: function (e, skipScrollToGestureCenter) { e.gesture.preventDefault(); e.preventDefault(); this._touchFlags.multiTouch = e.gesture.touches.length > 1; if (!this._touchFlags.pinchStarted) { this._touchFlags.pinchStarted = true; this._raiseEvent("pinchStart"); } if (this.get_pinchZoomEnabled()) { var zoom = this._touchFlags.startZoom * e.gesture.scale; var zoomParams = {}; if (skipScrollToGestureCenter) zoomParams.skipZoomToCenter = true; else { zoomParams.centerPageX = e.gesture.center.pageX; zoomParams.centerPageY = e.gesture.center.pageY; } this.set_zoom(zoom, zoomParams); } }, /// <protected /> _onRelease: function (e) { this._touchFlags.multiTouch = false; if (this._touchFlags.pinchStarted) { this._touchFlags.pinchStarted = false; this._raiseEvent("pinchStop"); } }, /// <protected /> initialize: function () { /// <exclude /> var style = document.createElement("style"); style.id = "aurigmaStyles"; style.type = "text/css"; style.innerHTML = ".aurigmaNoSelect {" + "-webkit-user-select: none;" + "-moz-user-select: none;" + "-ms-user-select: none;" + "user-select: none;" + "}"; document.getElementsByTagName("head")[0].appendChild(style); if (this._jqueryMode === Aurigma.GraphicsMill.JqueryMode.BuiltIn) { this._jquery = Aurigma207b12f3449a482cb956bf29e1ebc1c9.jQuery; } else { if (typeof window.jQuery !== "function") throw new Error("jQuery not found"); this._jquery = window.jQuery; } Aurigma.GraphicsMill.BaseViewer.callBaseMethod(this, 'initialize'); var holderElement = this.get_element(); this._holderBounds = this._getElementBounds(holderElement); this._jHolderElement = this._jquery(holderElement); holderElement.innerHTML = this._render(); // Change border style. var border = this.get_element().parentNode; border.style.borderWidth = this._borderWidth + "px"; this._makeInactive(holderElement); var contentElement = this._jquery(holderElement).find("#cvContent"); this._contentElements.push(this._contentCtx = contentElement[0]); this._contentElements.push(this._rubberbandCtx = this._jquery(holderElement).find("#cvRubberband")[0]); this._contentElements.push(this._navigatorCtx = this._jquery(holderElement).find("#cvNavigator")[0]); this._addSetCapture(holderElement); //Init scrolling position var sp = this._scrollingPosition; this._scrollInitialized = true; holderElement.scrollLeft = sp.x; holderElement.scrollTop = sp.y; this._initializeRulers(); var pointerStartEvents = "mousedown"; var pointerMoveEvents = "mousemove"; var pointerUpEvents = "mouseup"; var pointerCancel = ""; //gesture handling if (Aurigma.GraphicsMill.Utils.Platform.IsTouchDevice()) { this._jHolderElement.hammer() .on("touch", Aurigma.GraphicsMill.Utils.createBoundedWrapper(this, this._onTouch)) .on("pinch", Aurigma.GraphicsMill.Utils.createBoundedWrapper(this, this._onPinch)) .on("release", Aurigma.GraphicsMill.Utils.createBoundedWrapper(this, this._onRelease)); if (window.navigator.pointerEnabled) { pointerStartEvents = "pointerdown"; pointerMoveEvents = "pointermove"; pointerUpEvents = "pointerup"; pointerCancel = "pointerCancel"; this._jHolderElement.css("touch-action", this._msTouchActionInitialValue); } else { pointerStartEvents = "mousedown touchstart"; pointerMoveEvents = "mousemove touchmove"; pointerUpEvents = "mouseup touchend"; pointerCancel = "touchcancel"; } } //mouse/touch events handling this._jquery(document).on("click", Aurigma.GraphicsMill.Utils.createBoundedWrapper(this, this._onDocumentClick)); this._jHolderElement .on("contextmenu", function (e) { e.preventDefault(); }) .on("click", Aurigma.GraphicsMill.Utils.createBoundedWrapper(this, this._onClick)) .on(pointerStartEvents, Aurigma.GraphicsMill.Utils.createBoundedWrapper(this, this._onMouseDown)) .on(pointerMoveEvents, Aurigma.GraphicsMill.Utils.createBoundedWrapper(this, this._onMouseMove)) .on(pointerUpEvents, Aurigma.GraphicsMill.Utils.createBoundedWrapper(this, this._onMouseUp)) .on(pointerCancel, Aurigma.GraphicsMill.Utils.createBoundedWrapper(this, this._onMouseUp)) .on("scroll", Aurigma.GraphicsMill.Utils.createBoundedWrapper(this, this._onScroll)); var jqContentCtx = this._jquery(this._contentCtx); jqContentCtx .on("click", Aurigma.GraphicsMill.Utils.createBoundedWrapper(this, this._onWorkspaceClick)) .on("dblclick", Aurigma.GraphicsMill.Utils.createBoundedWrapper(this, this._onWorkspaceDoubleClick)) .on(pointerStartEvents, Aurigma.GraphicsMill.Utils.createBoundedWrapper(this, this._onWorkspaceMouseDown)) .on(pointerMoveEvents, Aurigma.GraphicsMill.Utils.createBoundedWrapper(this, this._onWorkspaceMouseMove)) .on(pointerUpEvents, Aurigma.GraphicsMill.Utils.createBoundedWrapper(this, this._onWorkspaceMouseUp)) .on(pointerCancel, Aurigma.GraphicsMill.Utils.createBoundedWrapper(this, this._onWorkspaceMouseUp)); this._contentCtxDimension = { width: this.get_contentWidth(), height: this.get_contentHeight() }; this._updateViewport(); this._updateViewportAlignment(); this._updateRulersStyle(); this.add_pinchStart(Aurigma.GraphicsMill.Utils.createBoundedWrapper(this, function () { this._rulers.topRuller.style.opacity = this._rulers.leftRuller.style.opacity = 0.1; })); this.add_pinchStop(Aurigma.GraphicsMill.Utils.createBoundedWrapper(this, function () { this._rulersOnZoom(); this._rulers.topRuller.style.opacity = this._rulers.leftRuller.style.opacity = 1; })); setInterval(Aurigma.GraphicsMill.Utils.createBoundedWrapper(this, function () { var bounds = this._getElementBounds(this.get_element()); if (bounds.width > 0 && bounds.width !== this._holderBounds.width || bounds.height > 0 && bounds.height !== this._holderBounds.height) { this._holderBounds = bounds; this._onResize(); } }), 500); }, /// <protected /> _onScroll: function (e) { if (this._scrollInitialized) { this._scrollInitialized = false; return; } this._scrollingPosition = this._getActualScrollPosition(); //Raise a client event. this._raiseEvent("scrolled"); }, /// <protected /> _onResize: function (e) { this._updateViewport(); this._updateViewportAlignment(); this._updateRulersStyle(); this._scrollingPosition = this._getActualScrollPosition(); this._raiseEvent("onresize"); }, /// <protected /> _updateViewport: function () { if (this.get_hasContent && !this.get_hasContent()) return; var mode = this.get_zoomMode(); //Update zoom and or the control size if (mode == Aurigma.GraphicsMill.ZoomMode.zoomControl) { var scrollBarWidth = (this.get_scrollBarsStyle() == Aurigma.GraphicsMill.ScrollBarsStyle.auto) ? 0 : this.get_scrollBarWidth(); var el = this.get_element(); var parent = el.parentNode; el.style.width = Math.round(this.get_contentWidth() + scrollBarWidth + rw) + "px"; el.style.height = Math.round(this.get_contentHeight() + scrollBarWidth + rw) + "px"; parent.style.width = Math.round(this.get_contentWidth() + scrollBarWidth + rw) + "px"; parent.style.height = Math.round(this.get_contentHeight() + scrollBarWidth + rw) + "px"; } else if (mode != Aurigma.GraphicsMill.ZoomMode.none) this._zoom = this.calculateZoomByZoomMode(mode); this._resizeContentElements(); var vl = this._getViewportLocation(); this._contentCtx.style.left = vl.x + "px"; this._contentCtx.style.top = vl.y + "px"; this._viewportLocation = vl; //Raise the client event this._raiseEvent("zoomed"); }, _addSetCapture: function (holderElement) { //polyfill for setCapture/releaseCapture var thisViewer = this; if (window.HTMLElement) { var element = window.HTMLElement.prototype; var capture = "click mousedown mouseup mousemove mouseover mouseout"; if (!element.setCapture) { element.setCapture = function (retargetToElement, wnd) { if (Aurigma.GraphicsMill.Utils.Platform.IsTouchDevice()) return; wnd = wnd || window; var doc = wnd.document; //It is incorrect implementation in the general case due to the fact that holderElement inherits preventing of selection, //but there is not use cases that sensetive to it now. doc.querySelector("body").classList.add("aurigmaNoSelect"); var self = this; if (!this._capture) { this._capture = function (e) { var eventPath = Aurigma.GraphicsMill.Utils.getEventPath(e.originalEvent); if (eventPath.indexOf(self) !== -1) return; var event = document.createEvent("MouseEvents"); var dx = 0, dy = 0; event.initMouseEvent(e.type, e.bubbles, e.cancelable, e.view, e.detail, e.screenX, e.screenY, e.clientX + dx, e.clientY + dy, e.ctrlKey, e.altKey, e.shiftKey, e.metaKey, e.button, e.relatedTarget); self.dispatchEvent(event); }; thisViewer._jquery(doc).on(capture, this._capture); } }; element.releaseCapture = function (wind) { if (Aurigma.GraphicsMill.Utils.Platform.IsTouchDevice()) return; var wnd = (wind) ? wind : window; var doc = wnd.document; doc.querySelector("body").classList.remove("aurigmaNoSelect"); if (this._stopPropagation) thisViewer._jquery(holderElement).off(capture, this._stopPropagation); if (this._capture) thisViewer._jquery(wnd.document).off(capture, this._capture); this._capture = null; }; } } }, _getElementPageCoord: function (domElement) { return this._jquery(domElement).offset(); }, _zoomToPagePoint: function (zoom, pageX, pageY) { var controlPt = this.pageToControlPoint(pageX, pageY); var workspacePt = this.pageToWorkspacePoint(pageX, pageY); this._setZoom(zoom); var contentPt = this.workspaceToContentPoint(workspacePt); var rulerWidth = this.get_rulerEnabled() ? this._rulerWidth : 0; var scroll = { x: Math.round(contentPt.x - controlPt.x + rulerWidth), y: Math.round(contentPt.y - controlPt.y + rulerWidth) }; this.set_scrollingPosition(scroll); }, _updateViewportAlignment: function () { var vl = this._getViewportLocation(); this._contentCtx.style.left = vl.x + "px"; this._contentCtx.style.top = vl.y + "px"; }, /// <protected /> _serializeState: function (state) { state.ClientSideOptions_PostBackOnWorkspaceChanged = this._clientSideOptions$postBackOnWorkspaceChanged; state.ClientSideOptions_PostBackOnWorkspaceClick = this._clientSideOptions$postBackOnWorkspaceClick; state.ScrollingPosition = this._scrollingPosition.toPoint(); state.ScrollingSize = this.get_scrollingSize(); state.ScrollBarsStyle = this._scrollBarsStyle; state.ScrollBarWidth = this._scrollBarWidth; state.ViewportAlignment = this._viewportAlignment; state.Zoom = this._zoom; state.ZoomMode = this._zoomMode; state.Navigator = this._navigator; state.Rubberband = this._rubberband; state.WorkspaceChanged = this._workspaceChanged; state.WorkspaceClick = this._workspaceClick; state.WorkspaceClickArgs = this._workspaceClickArgs; state.RulerEnabled = this._rulerEnabled; state.RulerWidth = this._rulerWidth; state.RulerScale = this._rulerScale; state.RulerOffsetX = this._rulerOffsetX; state.RulerOffsetY = this._rulerOffsetY; }, //-------------------------------------------------------------------------- //Public //-------------------------------------------------------------------------- //-------------------------------------------------------------------------- //Properties //-------------------------------------------------------------------------- get_scrollingSize: function () { /// <value type="Sys.UI.Point">A scroll bar length.</value> /// <summary>Gets a scroll bar length (in other words, the right-bottom point of the image fragment which is out of the visible area).</summary> var holder = this.get_element(); var w = holder.scrollWidth - holder.clientWidth; var h = holder.scrollHeight - holder.clientHeight; if (w < 0) w = 0; if (h < 0) h = 0; return new Sys.UI.Point(Math.round(w), Math.round(h)); }, get_contentCtx: function () { /// <value domElement="true" /> /// <exclude /> return this._contentCtx; }, get_rubberbandCtx: function () { /// <value domElement="true" /> /// <exclude /> return this._rubberbandCtx; }, get_navigatorCtx: function () { /// <value domElement="true" /> /// <exclude /> return this._navigatorCtx; }, get_autoPostBack: function () { /// <value type="Boolean">The value which is <b>true</b> when a postback to the server automatically occurs whenever the user navigates the content in the <see cref="T:J:Aurigma.GraphicsMill.BaseViewer" />, <b>false</b> otherwise.</value> /// <summary>Gets/sets a value indicating whether a postback to the server automatically occurs when the user zooms or scrolls the content.</summary> /// <remarks><para>If you want to disable automatic postback for certain events (e.g. <see cref="E:J:Aurigma.GraphicsMill.BaseViewer.zoomed" />) and enable it for other ones (e.g. <see cref="E:J:Aurigma.GraphicsMill.BaseViewer.workspaceClick" />), you can use the <see cref="P:J:Aurigma.GraphicsMill.BaseViewer.clientSideOptions" /> property. It exposes a boolean property for each event. </para><para>If <see cref="P:J:Aurigma.GraphicsMill.BaseViewer.autoPostBack" /> is <b>true</b>, postback occurs regardless of values of properties of the <see cref="T:J:Aurigma.GraphicsMill.ViewerClientSideOptions" /> object returned by the <see cref="P:J:Aurigma.GraphicsMill.BaseViewer.clientSideOptions" /> property.</para><para>Default value is <b>false</b>.</para><para>This property corresponds to <see cref="P:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.AutoPostBack">BaseViewer.AutoPostBack</see> server-side member.</para></remarks> return this._autoPostBack; }, set_autoPostBack: function (v) { this._autoPostBack = v; }, get_borderWidth: function () { /// <value type="Number" integer="true">The value which represents the width (in pixels) of the control border.</value> /// <summary>Gets the width (in pixels) of the control border.</summary> /// <remarks>This property corresponds to <see cref="P:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.BorderWidth">BaseViewer.BorderWidth</see> server-side member.</remarks>. return this._borderWidth; }, get_clientSideOptions: function () { /// <value type="Aurigma.GraphicsMill.ViewerClientSideOptions"><see cref="T:J:Aurigma.GraphicsMill.ViewerClientSideOptions" /> class instance which provides an access to properties which configure automatic postback for individual events.</value> /// <summary>Gets values which configure automatic postback for individual events.</summary> /// <remarks><para>Automatic postback can be enabled for all events with the <see cref="P:J:Aurigma.GraphicsMill.BaseViewer.autoPostBack" /> property. However if you want to disable automatic postback for certain events (e.g. <see cref="E:J:Aurigma.GraphicsMill.BaseViewer.zoomed" />) and enable it for the other ones (e.g. <see cref="E:J:Aurigma.GraphicsMill.BaseViewer.workspaceClick" />), you can use this property. It exposes a boolean property for each event. </para><para>This property makes sense only if <see cref="P:J:Aurigma.GraphicsMill.BaseViewer.autoPostBack" /> is <b>false</b>. If it is <b>true</b>, postback occurs regardless of values of properties of the <see cref="T:J:Aurigma.GraphicsMill.ViewerClientSideOptions" /> object.</para><para>This property corresponds to <see cref="P:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.ClientSideOptions">BaseViewer.ClientSideOptions</see> server-side member.</para></remarks> return this._clientSideOptions; }, set_cursor: function (v, onBody) { /// <value type="Aurigma.GraphicsMill.Cursor" /> this._contentCtx.style.cursor = Aurigma.GraphicsMill.Utils.cursorToCss(v); if (window != null) { v = onBody === true ? v : Aurigma.GraphicsMill.Cursor.defaultCursor; if (v === this._bodyCursor) return; window.document.querySelector("body").style.cursor = Aurigma.GraphicsMill.Utils.cursorToCss(v); this._bodyCursor = v; } }, get_delayedRefreshTimeout: function () { /// <value type="Number" integer="true">The value which represents amount of milliseconds to wait before delayed refresh will be invoked.</value> /// <summary>Amount of milliseconds to wait before delayed refresh will be invoked.</summary> return this._delayedRefreshTimeout; }, set_delayedRefreshTimeout: function (v) { this._delayedRefreshTimeout = v; }, get_maxZoom: function () { /// <value type="Number">The number that specifies the maximum allowed zoom value.</value> /// <summary>Gets the maximum allowed zoom value.</summary> /// <remarks><para>Zoom values are measured in percents/100. It means that value = 1 specify 100% zoom (i.e. actual size), value = 10 means 1000% zoom (10x), value = 0,5 means 50% zoom (half), etc.</para><para>This property corresponds to <see cref="P:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.MaxZoom">BaseViewer.MaxZoom</see> server-side member. </para></remarks> return this._maxZoom > 0 ? this._maxZoom : 2.5; }, set_maxZoom: function (value) { this._maxZoom = value; }, get_minZoom: function () { /// <value type="Number">The number that specifies the minimum allowed zoom value.</value> /// <summary>Gets the minimum allowed zoom value.</summary> /// <remarks><para>Zoom values are measured in percents/100. It means that value = 1 specify 100% zoom (i.e. actual size), value = 10 means 1000% zoom (10x), value = 0,5 means 50% zoom (half), etc. </para><para>This property corresponds to <see cref="P:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.MinZoom">BaseViewer.MinZoom</see> server-side member. </para></remarks> return this._minZoom > 0 ? this._minZoom : 0.001; }, set_minZoom: function (value) { this._minZoom = value; }, get_navigator: function () { /// <value type="String">The value which contains an ID of the navigator control you need to attach.</value> /// <summary>Gets/sets the navigator control ID (i.e. value stored in the attribute <b>id</b> of the tag that inserts the control).</summary> /// <remarks><para>The following navigator controls are available:</para><list type="bullet"><item><term><see cref="T:J:Aurigma.GraphicsMill.ZoomInNavigator" /></term><description>left button clicks zoom the image in.</description></item><item><term><see cref="T:J:Aurigma.GraphicsMill.ZoomOutNavigator" /></term><description>left button clicks zoom the image out.</description></item><item><term><see cref="T:J:Aurigma.GraphicsMill.ZoomRectangleNavigator" /></term><description>user stretches the rectangle by mouse and when mouse button is released, it zooms selected rectangle in.</description></item><item><term><see cref="T:J:Aurigma.GraphicsMill.PanNavigator" /></term><description>when user presses the mouse button down and move the mouse, the image is panned until user releases the button.</description></item></list><para>This property corresponds to <see cref="P:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.Navigator">BaseViewer.Navigator</see> server-side member.</para></remarks> return this._navigator; }, set_navigator: function (v) { //Disconnect a navigator if any connected. var nid = this._navigator; var n = nid ? $find(nid) : null; if (n) n.disconnect(); //Connect a new navigator if (v.get_id) v = v.get_id(); this._navigator = v; if (v) { n = $find(v); if (n) { // A little hack. if (!this._initialized) this.initialize(); n.connect(this.get_element().id); } } //Update the rubberband var rid = this.get_rubberband(); if (rid) { var r = $find(rid); if (r) r.update(); } }, get_rubberband: function () { /// <value type="String">The value which contains an ID of the rubberband control you need to attach.</value> /// <summary>Gets/sets the rubberband control ID (i.e. value stored in the attribute <b>id</b> of the tag that inserts the control).</summary> /// <remarks><para>Only the <see cref="T:J:Aurigma.GraphicsMill.RectangleRubberband" /> control is available.</para><para>This property corresponds to <see cref="P:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.Rubberband">BaseViewer.Rubberband</see> server-side member.</para></remarks> return this._rubberband; }, set_rubberband: function (v) { //Disconnect a rubberband if any connected var rid = this._rubberband; var rubberband = rid ? $find(rid) : null; if (rubberband) { this._jHolderElement.css("-ms-touch-action", "pan-x pan-y"); rubberband.disconnect(); } //Connect a new rubberband if (v && v.get_id) v = v.get_id(); this._rubberband = v; if (v) { rubberband = $find(v); if (rubberband) { // A little hack. if (!this._initialized) this.initialize(); //hack for touch devices if (Aurigma.GraphicsMill.Utils.Platform.IsTouchDevice()) rubberband.set_erasable(false); this._jHolderElement.css("-ms-touch-action", "none"); rubberband.connect(this.get_element().id); } } }, get_screenXDpi: function () { /// <value type="Number">The value representing horizontal resolution in DPI used to show content in the browser.</value> /// <summary>Gets a value representing horizontal resolution in DPI used to show content in the browser.</summary> /// <remarks><para>If the <see cref="P:J:Aurigma.GraphicsMill.BitmapViewer.scaleToActualSize" /> property is set to <b>true</b> the value of the <see cref="P:J:Aurigma.GraphicsMill.BaseViewer.screenXDpi" /> property is used to scale content width to its actual size. </para><para>This property corresponds to <see cref="P:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.ScreenXDpi">BaseViewer.ScreenXDpi</see> server-side member.</para></remarks> /// <seealso cref="P:J:Aurigma.GraphicsMill.BaseViewer.screenYDpi" /> /// <seealso cref="P:J:Aurigma.GraphicsMill.BitmapViewer.scaleToActualSize" /> return this._screenXDpi; }, get_screenYDpi: function () { /// <value type="Number">The value representing vertical resolution in DPI used to show content in the browser.</value> /// <summary>Gets a value representing vertical resolution in DPI used to show content in the browser.</summary> /// <remarks><para>If the <see cref="P:J:Aurigma.GraphicsMill.BitmapViewer.scaleToActualSize" /> property is set to <b>true</b> the value of the <see cref="P:J:Aurigma.GraphicsMill.BaseViewer.screenYDpi" /> property is used to scale content height to its actual size. </para><para>This property corresponds to <see cref="P:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.ScreenYDpi">BaseViewer.ScreenYDpi</see> server-side member.</para></remarks> /// <seealso cref="P:J:Aurigma.GraphicsMill.BaseViewer.screenXDpi" /> /// <seealso cref="P:J:Aurigma.GraphicsMill.BitmapViewer.scaleToActualSize" /> return this._screenYDpi; }, get_scrollBarsStyle: function () { /// <value type="Aurigma.GraphicsMill.ScrollBarsStyle">The <see cref="T:J:Aurigma.GraphicsMill.ScrollBarsStyle" /> enumeration member that specifies when to display scroll bars.</value> /// <summary>Gets a value that specifies whether to display scroll bars and whether to hide them automatically when the displayed content is less than the control size.</summary> /// <remarks><para>This property corresponds to <see cref="P:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.ScrollBarsStyle">BaseViewer.ScrollBarsStyle</see> server-side member.</para></remarks> return this._scrollBarsStyle; }, get_scrollBarWidth: function () { /// <value type="Number" integer="true">The value that represents a scroll bar width (in pixels) in calculations.</value> /// <summary>Gets a value that represents a scroll bar width (in pixels) in calculations.</summary> /// <remarks><para>Since there is no simple way to determine the scroll bar width from the JavaScript (taking into account different platform, accessibility modes, etc) the estimated value is specified by this property.</para> /// <para>This property corresponds to <see cref="P:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.ScrollBarWidth">BaseViewer.ScrollBarWidth</see> server-side member.</para></remarks> return this._scrollBarWidth; }, get_scrollingPosition: function () { /// <value type="Sys.UI.Point">The point that stores the position of the scroll bars.</value> /// <summary>Gets/sets the position of the scroll bars.</summary> /// <remarks><para>This property corresponds to <see cref="P:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.ScrollingPosition">BaseViewer.ScrollingPosition</see> server-side member.</para></remarks> this._scrollingPosition = this._getActualScrollPosition(); return this._scrollingPosition.toPoint(); }, set_scrollingPosition: function (value) { var jHolder = this._jquery(this.get_element()); jHolder.scrollLeft(value.x).scrollTop(value.y); var pt = this.get_scrollingPosition(); this._scrollingPosition = new Aurigma.GraphicsMill.PointF(pt.x, pt.y); this._raiseEvent("scrolled"); }, get_viewportAlignment: function () { /// <value type="Aurigma.GraphicsMill.ViewportAlignment">The <see cref="T:J:Aurigma.GraphicsMill.ViewportAlignment" /> enumeration member that specifies content alignment in the control.</value> /// <summary>Gets/sets a value that specifies content alignment in the control.</summary> /// <remarks><para>This property corresponds to <see cref="P:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.ViewportAlignment">BaseViewer.ViewportAlignment</see> server-side member.</para></remarks> return this._viewportAlignment; }, set_viewportAlignment: function (v) { this._viewportAlignment = v; this._updateViewportAlignment(); this._drawRulers(); this._updateRulersStyle(); }, get_zoom: function () { /// <value type="Number">The number that specifies the current zoom value.</value> /// <summary>Gets/sets the current zoom value.</summary> /// <remarks><para>Zoom values are measured in percents/100. It means that value = 1 specifies 100% zoom (i.e. actual size), value = 10 means 1000% zoom (10x), value = 0.5 means 50% zoom (half), etc.</para><note>If automatic zoom mode is used (i.e. <see cref="P:J:Aurigma.GraphicsMill.BaseViewer.zoomMode" /> property is not <see cref="F:J:Aurigma.GraphicsMill.ZoomMode.none" />) the value of this property will be ignored.</note><para>Default value is 1.</para><para>This property corresponds to <see cref="P:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.Zoom">BaseViewer.Zoom</see> server-side member.</para></remarks> /// <seealso cref="P:J:Aurigma.GraphicsMill.BaseViewer.zoomMode" /> return this._zoom; }, set_zoom: function (zoom, params) { var zoomCenterX, zoomCenterY; if (!params) { var pageCoords = this._getElementPageCoord(this.get_element()); var rulerWidth = this.get_rulerEnabled() ? this._rulerWidth : 0; var viewportWidth = this._holderBounds.width - rulerWidth, viewportHeight = this._holderBounds.height - rulerWidth; zoomCenterX = Math.round(pageCoords.left + rulerWidth + viewportWidth / 2); zoomCenterY = Math.round(pageCoords.top + rulerWidth + viewportHeight / 2); } else if (params.skipZoomToCenter) { this._setZoom(zoom); return; } else { zoomCenterX = params.centerPageX; zoomCenterY = params.centerPageY; } this._zoomToPagePoint(zoom, zoomCenterX, zoomCenterY); }, _setZoom: function (value) { if (this._zoom == value && (this._zoomMode == Aurigma.GraphicsMill.ZoomMode.zoomControl || this._zoomMode == Aurigma.GraphicsMill.ZoomMode.none)) return; this._zoom = Math.min(Math.max(value, this.get_minZoom()), this.get_maxZoom()); if (this._zoomMode != Aurigma.GraphicsMill.ZoomMode.zoomControl) this._zoomMode = Aurigma.GraphicsMill.ZoomMode.none; this._updateViewport(); }, get_pinchZoomEnabled: function () { return this._pinchZoomEnabled; }, set_pinchZoomEnabled: function (v) { this._pinchZoomEnabled = v; }, get_rulerEnabled: function () { /// <summary>Gets or sets a value indicating whether to show the ruler.</summary> /// <value type="Boolean"><strong>true</strong> if the ruler is shown; otherwise <strong>false</strong>.</value> /// <remarks><para>This property corresponds to <see cref="P:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.RulerEnabled">BaseViewer.RulerEnabled</see> server-side member.</para></remarks> return this._rulerEnabled; }, set_rulerEnabled: function (v) { var displayStatus = v ? "block" : "none"; this._rulers.fullLeftRuller.style.display = displayStatus; this._rulers.fullTopRuller.style.display = displayStatus; this._rulers.leftRuller.style.display = displayStatus; this._rulers.topRuller.style.display = displayStatus; this._rulers.whiteRect.style.display = displayStatus; this._rulerEnabled = v; this.set_zoomMode(this.get_zoomMode()); }, get_rulerScale: function () { /// <summary>Gets or sets the ruler scale.</summary> /// <value type="Number">The the ruler scale.</value> /// <remarks><para>This property corresponds to <see cref="P:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.RulerScale">BaseViewer.RulerScale</see> server-side member.</para></remarks> return this._rulerScale; }, set_rulerScale: function (v) { if (v <= 0) throw new Error('Ruler scale should be greater 0.'); this._rulerScale = v; this._updateViewport(); }, get_rulerOffsetX: function () { /// <summary>Gets or sets the ruler offset on x-axis.</summary> /// <value type="Number">The the ruler offset.</value> /// <remarks><para>This property corresponds to <see cref="P:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.RulerOffsetX">BaseViewer.RulerOffsetX</see> server-side member.</para></remarks> return this._rulerOffsetX; }, set_rulerOffsetX: function (v) { this._rulerOffsetX = v; this._updateViewport(); }, get_rulerOffsetY: function () { /// <summary>Gets or sets the ruler offset on y-axis.</summary> /// <value type="Number">The the ruler offset.</value> /// <remarks><para>This property corresponds to <see cref="P:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.RulerOffsetY">BaseViewer.RulerOffsetY</see> server-side member.</para></remarks> return this._rulerOffsetY; }, set_rulerOffsetY: function (v) { this._rulerOffsetY = v; this._updateViewport(); }, get_rulerWidth: function () { /// <summary>Gets or sets the ruler width.</summary> /// <value type="Number">The the ruler width.</value> /// <remarks><para>This property corresponds to <see cref="P:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.RulerWidth">BaseViewer.RulerWidth</see> server-side member.</para></remarks> return this._rulerWidth; }, set_rulerWidth: function (v) { this._rulerWidth = v; this._updateViewport(); }, get_zoomMode: function () { /// <value type="Aurigma.GraphicsMill.ZoomMode"><see cref="T:J:Aurigma.GraphicsMill.ZoomMode" /> enumeration member that specifies the zooming behavior.</value> /// <summary>Gets/sets a value that specifies content zoom mode of the control (automatic or manual).</summary> /// <remarks><para>In manual zoom mode (i.e. if the value of this property is <see cref="F:J:Aurigma.GraphicsMill.ZoomMode.none" />) user can change the content zoom either by <see cref="P:J:Aurigma.GraphicsMill.BaseViewer.zoom" /> property or using some zooming <see cref="P:J:Aurigma.GraphicsMill.BaseViewer.navigator">navigator</see>.</para><para>If automatic zoom (except <see cref="F:J:Aurigma.GraphicsMill.ZoomMode.zoomControl" />) mode is used and you attempt to change <see cref="P:J:Aurigma.GraphicsMill.BaseViewer.zoom" /> value manually, <see cref="P:J:Aurigma.GraphicsMill.BaseViewer.zoomMode" /> property will be set to <see cref="F:J:Aurigma.GraphicsMill.ZoomMode.none" />.</para><note>When you set some zooming navigator into the <see cref="P:J:Aurigma.GraphicsMill.BaseViewer.navigator" /> property, this property is reset to <see cref="F:J:Aurigma.GraphicsMill.ZoomMode.none" />.</note><para>This property corresponds to <see cref="P:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.ZoomMode">BaseViewer.ZoomMode</see> server-side member.</para></remarks> /// <seealso cref="P:J:Aurigma.GraphicsMill.BaseViewer.zoom" /> return this._zoomMode; }, set_zoomMode: function (v) { this._zoomMode = v; this._updateViewport(); }, get_bestFitWhiteSpacePc: function () { /// <summary>Gets or sets white space between content maximal demention and canvas</summary> /// <value type="Number">white space in percent (from 1 to 99)</value> return this._bestFitWhiteSpacePc * 100; }, set_bestFitWhiteSpacePc: function (v) { if ((typeof v == "number" && !isNaN(v)) && v >= 0 && v < 99) this._bestFitWhiteSpacePc = v / 100; this.set_zoomMode(this.get_zoomMode()); }, get_zoomQuality: function () { /// <value type="Aurigma.GraphicsMill.ZoomQuality"><see cref="T:J:Aurigma.GraphicsMill.ZoomQuality" /> enumeration member which specifies which resize algorithm to use.</value> /// <summary>Gets a value that specifies a zoom quality.</summary> /// <remarks><para>The content can be zoomed with different quality (<see cref="F:J:Aurigma.GraphicsMill.ZoomQuality.high" />, <see cref="F:J:Aurigma.GraphicsMill.ZoomQuality.medium" />, <see cref="F:J:Aurigma.GraphicsMill.ZoomQuality.low" /> and <see cref="F:J:Aurigma.GraphicsMill.ZoomQuality.shrinkHighStretchLow" />). If it is zoomed with low quality, the performance is higher, and vice versa.</para></remarks> return this._zoomQuality; }, get_status: function () { /// <value type="Aurigma.GraphicsMill.UpdateStatus">The value which represents current status of Viewer control.</value> /// <summary>Gets a current Viewer status.</summary> /// <remarks><para>It can be one of the following values:</para><list type="bullet"><item><term><see cref="F:J:Aurigma.GraphicsMill.UpdateStatus.ready" /></term><description> The remote scripting method has been completed (or was not run yet), and you can freely get return value or exception details.</description></item><item><term><see cref="F:J:Aurigma.GraphicsMill.UpdateStatus.busy" /></term><description> The remote scripting method is running (the viewer state is changing).</description></item><item><term><see cref="F:J:Aurigma.GraphicsMill.UpdateStatus.refresh" /></term><description> The control updates a portion of content it displays (e.g. when user zoomed or scrolled it).</description></item></list></remarks> return this._status; }, get_exceptionDescription: function () { /// <value type="String">The value which represents description of exception which was thrown during calling remote scripting method.</value> /// <summary>When a remote scripting method fails, this method returns the exception description.</summary> /// <remarks><para>If the method succeeded, empty string returned.</para><para>To determine when the remote method is completed, use <see cref="E:J:Aurigma.GraphicsMill.BaseViewer.statusChanged" /> event.</para></remarks> return this._exceptionDescription; }, get_returnValue: function () { /// <value>The value returned by remote scripting method.</value> /// <summary>When a remote scripting method is completed, this method returns its return value.</summary> /// <remarks><para>To determine when the remote method is completed, use <see cref="E:J:Aurigma.GraphicsMill.BaseViewer.statusChanged" /> event.</para></remarks> return this._returnValue; }, //-------------------------------------------------------------------------- //Methods //-------------------------------------------------------------------------- calculateZoomByZoomMode: function (zoomMode) { //Workspace width/height var cw = this.get_workspaceWidth() * this.get_screenXDpi() / 72; var ch = this.get_workspaceHeight() * this.get_screenYDpi() / 72; var rw = this.get_rulerEnabled() ? this.get_rulerWidth() : 0; var bounds = this._holderBounds; var viewPortTotalRectangle = { width: bounds.offsetWidth - rw, height: bounds.offsetHeight - rw }; //Content width/height var scw = cw; var sch = ch; //The horizontal zoom without the scroll bars var hzwsb = viewPortTotalRectangle.width / scw; //The vertical zoom without the scroll bars var vzwsb = viewPortTotalRectangle.height / sch; //Scroll bar width var s = this.get_scrollBarWidth(); //The horizontal zoom with the scroll bars var hz = (viewPortTotalRectangle.width - s) / scw; //The vertical zoom with the scroll bars var vz = (viewPortTotalRectangle.height - s) / sch; var sbAlways = (this._scrollBarsStyle == Aurigma.GraphicsMill.ScrollBarsStyle.always); //Zoom var zoom = this.get_zoom(); switch (zoomMode) { case Aurigma.GraphicsMill.ZoomMode.bestFit: if (this._bestFitWhiteSpacePc > 0) { hzwsb = (viewPortTotalRectangle.width - (viewPortTotalRectangle.width * this._bestFitWhiteSpacePc)) / scw; vzwsb = (viewPortTotalRectangle.height - (viewPortTotalRectangle.height * this._bestFitWhiteSpacePc)) / sch; hz = (viewPortTotalRectangle.width - s - (viewPortTotalRectangle.width - s) * this._bestFitWhiteSpacePc) / scw; vz = (viewPortTotalRectangle.height - s - (viewPortTotalRectangle.height - s) * this._bestFitWhiteSpacePc) / sch; } zoom = sbAlways ? Math.min(hz, vz) : Math.min(hzwsb, vzwsb); break; case Aurigma.GraphicsMill.ZoomMode.bestFitShrinkOnly: zoom = sbAlways ? Math.min(hz, vz) : Math.min(hzwsb, vzwsb); zoom = Math.min(1, zoom); break; case Aurigma.GraphicsMill.ZoomMode.fitToHeight: if (sbAlways) zoom = vz; else zoom = (Math.round(vzwsb * scw) <= viewPortTotalRectangle.width) ? vzwsb : vz; break; case Aurigma.GraphicsMill.ZoomMode.fitToHeightShrinkOnly: if (sbAlways) zoom = Math.min(1, vz); else { // We should use Math.min here instead of using it later. // For example, if we have vzwsb a little more than 1 and vz a little less. // With Math.min we get 1as result, without - vz; zoom = (Math.round(Math.min(1, vzwsb) * scw) <= viewPortTotalRectangle.width) ? Math.min(1, vzwsb) : Math.min(1, vz); } break; case Aurigma.GraphicsMill.ZoomMode.fitToWidth: if (sbAlways) zoom = hz; else zoom = (Math.round(hzwsb * sch) <= viewPortTotalRectangle.height) ? hzwsb : hz; break; case Aurigma.GraphicsMill.ZoomMode.fitToWidthShrinkOnly: if (sbAlways) zoom = Math.min(1, hz); else zoom = (Math.round(Math.min(1, hzwsb) * sch) <= viewPortTotalRectangle.height) ? Math.min(1, hzwsb) : Math.min(1, hz); break; } return Math.min(Math.max(zoom, this._minZoom), this._maxZoom); }, clearRenderCtx: function (ctx) { /// <param name="ctx" domElement="true" /> /// <exclude /> while (ctx.childNodes.length > 0) ctx.removeChild(ctx.childNodes[0]); }, workspaceToContentPoint: function (point) { return this.controlToContentPoint(this.workspaceToControlPoint(point)); }, controlToContentPoint: function (point) { /// <param name="point" type="Sys.UI.Point"></param> /// <returns type="Sys.UI.Point"></returns> /// <exclude /> var vl = this._getViewportLocation(); var sp = this._getActualScrollPosition(); var pt = new Aurigma.GraphicsMill.PointF(0, 0); pt.x = point.x - vl.x + sp.x; pt.y = point.y - vl.y + sp.y; return pt.round(); }, controlToPagePoint: function (point) { /// <param name="point" type="Sys.UI.Point"></param> /// <returns type="Sys.UI.Point"></returns> /// <exclude /> var pageCoords = this._getElementPageCoord(this.get_element()); var pageX = point.x + pageCoords.left; var pageY = point.y + pageCoords.top; return new Sys.UI.Point(Math.round(pageX), Math.round(pageY)); }, contentToControlPoint: function (point) { /// <param name="point" type="Sys.UI.Point"></param> /// <returns type="Sys.UI.Point"></returns> /// <exclude /> var vl = this._getViewportLocation(); var sp = this._getActualScrollPosition(); var pt = new Aurigma.GraphicsMill.PointF(0, 0); pt.x = point.x + vl.x - sp.x; pt.y = point.y + vl.y - sp.y; return pt.round(); }, controlToContentRectangle: function (rect) { /// <param name="rect" type="Aurigma.GraphicsMill.Rectangle"></param> /// <returns type="Aurigma.GraphicsMill.Rectangle"></returns> /// <exclude /> var pt1 = new Aurigma.GraphicsMill.PointF(rect.x, rect.y); var pt2 = new Aurigma.GraphicsMill.PointF(rect.x + rect.width, rect.y + rect.height); pt1 = this.controlToContentPoint(pt1); pt2 = this.controlToContentPoint(pt2); return new Aurigma.GraphicsMill.Rectangle(pt1.x, pt1.y, pt2.x - pt1.x, pt2.y - pt1.y); }, contentToControlRectangle: function (rect) { /// <param name="rect" type="Aurigma.GraphicsMill.Rectangle"></param> /// <returns type="Aurigma.GraphicsMill.Rectangle"></returns> /// <exclude /> var pt1 = new Aurigma.GraphicsMill.PointF(rect.x, rect.y); var pt2 = new Aurigma.GraphicsMill.PointF(rect.x + rect.width, rect.y + rect.height); pt1 = this.contentToControlPoint(pt1); pt2 = this.contentToControlPoint(pt2); return new Aurigma.GraphicsMill.Rectangle(pt1.x, pt1.y, pt2.x - pt1.x, pt2.y - pt1.y); }, workspaceToControlPoint: function (point) { /// <summary>Translates coordinates from the workspace-related coordinate system to the control-related one.</summary> /// <param name="point" type="Sys.UI.Point">Coordinates in the workspace coordinate system.</param> /// <returns type="Aurigma.GraphicsMill.PointF">Coordinates in the control coordinate system.</returns> /// <remarks><para>The <see cref="T:J:Aurigma.GraphicsMill.BitmapViewer" /> allows to handle two coordinate systems: workspace-related and control-related and provides the <see cref="M:J:Aurigma.GraphicsMill.BaseViewer.workspaceToControlPoint" /> method to translate the point from the workspace-related coordinate system to the control-related one.</para><para>The workspace-related coordinate system represents logical coordinates of the image loaded in the control and allows to work with it regardless of zoom, scroll or alignment. The control-related one is used to measure parameters of standard control events (e.g. position of the mouse pointer).</para><para>This method corresponds to <see cref="M:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.WorkspaceToControl(System.Drawing.PointF)">BaseViewer.WorkspaceToControl(System.Drawing.PointF)</see> server-side member.</para></remarks> /// <seealso cref="M:J:Aurigma.GraphicsMill.BaseViewer.controlToWorkspacePoint" /> var z = this.get_zoom(); var pt = new Sys.UI.Point(Math.round(point.x * z * this._screenXDpi / 72), Math.round(point.y * z * this._screenYDpi / 72)); return this.contentToControlPoint(pt).toPoint(); }, controlToWorkspacePoint: function (point) { /// <summary>Translates coordinates from the control-related coordinate system to the workspace-related one.</summary> /// <param name="point" type="Aurigma.GraphicsMill.PointF">Coordinates in the control coordinate system.</param> /// <returns type="Sys.UI.Point">Coordinates in the workspace coordinate system.</returns> /// <remarks><para>The <see cref="T:J:Aurigma.GraphicsMill.BitmapViewer" /> allows to handle two coordinate systems: workspace-related and control-related and provides the <see cref="M:J:Aurigma.GraphicsMill.BaseViewer.controlToWorkspacePoint" /> method to translate the point from the control-related coordinate system to the workspace-related one.</para><para>The workspace-related coordinate system represents logical coordinates of the image loaded in the control and allows to work with it regardless of zoom, scroll or alignment. The control-related one is used to measure parameters of standard control events (e.g. position of the mouse pointer).</para><para>This method corresponds to <see cref="M:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.ControlToWorkspace(System.Drawing.PointF)">BaseViewer.ControlToWorkspace(System.Drawing.PointF)</see> server-side member.</para></remarks> /// <seealso cref="M:J:Aurigma.GraphicsMill.BaseViewer.workspaceToControlPoint" /> var z = this.get_zoom(); var pt = this.controlToContentPoint(point); return new Aurigma.GraphicsMill.PointF(pt.x / (z * this._screenXDpi / 72), pt.y / (z * this._screenYDpi / 72)); }, workspaceToControlRectangle: function (rect) { /// <summary>Translates rectangle coordinates from the workspace-related coordinate system to the control-related one.</summary> /// <param name="rect" type="Aurigma.GraphicsMill.Rectangle">Rectangle coordinates in the workspace coordinate system.</param> /// <returns type="Aurigma.GraphicsMill.Rectangle">Rectangle coordinates in the control coordinate system.</returns> /// <remarks><para>The <see cref="T:J:Aurigma.GraphicsMill.BitmapViewer" /> allows to handle two coordinate systems: workspace-related and control-related and provides the <see cref="M:J:Aurigma.GraphicsMill.BaseViewer.workspaceToControlRectangle" /> method to translate the rectangle from the workspace-related coordinate system to the control-related one.</para><para>The workspace-related coordinate system represents logical coordinates of the image loaded in the control and allows to work with it regardless of zoom, scroll or alignment. The control-related one is used to measure parameters of standard control events (e.g. position of the mouse pointer).</para><para>This method corresponds to <see cref="M:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.WorkspaceToControl(System.Drawing.RectangleF)">BaseViewer.WorkspaceToControl(System.Drawing.RectangleF)</see> server-side member.</para></remarks> /// <seealso cref="M:J:Aurigma.GraphicsMill.BaseViewer.controlToWorkspaceRectangle" /> var z = this.get_zoom(); var hs = this._screenXDpi / 72 * z; var vs = this._screenYDpi / 72 * z; var newRect = new Aurigma.GraphicsMill.Rectangle(Math.round(rect.x * hs), Math.round(rect.y * vs), Math.round(rect.width * hs), Math.round(rect.height * vs)); return this.contentToControlRectangle(newRect); }, pageToControlPoint: function (pageX, pageY) { /// <summary>Translates coordinates from the page-related coordinate system to the control-related one.</summary> /// <param name="point" type="Aurigma.GraphicsMill.PointF">Coordinates in the page coordinate system.</param> /// <returns type="Sys.UI.Point">Coordinates in the workspace coordinate system.</returns> var pageCoords = this._getElementPageCoord(this.get_element()); var holderX = pageX - pageCoords.left; var holderY = pageY - pageCoords.top; return new Sys.UI.Point(Math.round(holderX), Math.round(holderY)); }, pageToWorkspacePoint: function (pageX, pageY) { /// <summary>Translates coordinates from the page-related coordinate system to the workspace-related one.</summary> /// <param name="point" type="Aurigma.GraphicsMill.PointF">Coordinates in the page coordinate system.</param> /// <returns type="Sys.UI.Point">Coordinates in the workspace coordinate system.</returns> return this.controlToWorkspacePoint(this.pageToControlPoint(pageX, pageY)); }, controlToWorkspaceRectangle: function (rect) { /// <summary>Translates rectangle coordinates from the control-related coordinate system to the workspace-related one.</summary> /// <param name="rect" type="Aurigma.GraphicsMill.Rectangle">Rectangle coordinates in the control coordinate system.</param> /// <returns type="Aurigma.GraphicsMill.Rectangle">Rectangle coordinates in the workspace coordinate system.</returns> /// <remarks><para>The <see cref="T:J:Aurigma.GraphicsMill.BitmapViewer" /> allows to handle two coordinate systems: workspace-related and control-related and provides the <see cref="M:J:Aurigma.GraphicsMill.BaseViewer.controlToWorkspaceRectangle" /> method to translate the rectangle from the control-related coordinate system to the workspace-related one.</para><para>The workspace-related coordinate system represents logical coordinates of the image loaded in the control and allows to work with it regardless of zoom, scroll or alignment. The control-related one is used to measure parameters of standard control events (e.g. position of the mouse pointer).</para><para>This method corresponds to <see cref="M:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.ControlToWorkspace(System.Drawing.RectangleF)">BaseViewer.ControlToWorkspace(System.Drawing.RectangleF)</see> server-side member.</para></remarks> /// <seealso cref="M:J:Aurigma.GraphicsMill.BaseViewer.workspaceToControlRectangle" /> var z = this.get_zoom(); var hs = 1 / (z * this._screenXDpi / 72); var vs = 1 / (z * this._screenYDpi / 72); var newRect = this.controlToContentRectangle(rect); return new Aurigma.GraphicsMill.Rectangle(newRect.x * hs, newRect.y * vs, newRect.width * hs, newRect.height * vs); }, invokeRemoteMethod: function (name, args) { /// <param name="name" type="String">The name of the remote method which should be run on the server.</param> /// <param name="args" type="Array" mayBeNull="true">The array of arguments. The first element of the array is passed into the first argument, the second element - into the second one, etc. Number of array items should be the same as a number of arguments.</param> /// <returns type="Boolean">The <see cref="T:J:Boolean" /> value which specifies whether the method was run successfully.</returns> /// <summary>Runs the specified remote method on the server.</summary> if (this._status == Aurigma.GraphicsMill.UpdateStatus.busy) return false; this._status = (name == "__Refresh") ? Aurigma.GraphicsMill.UpdateStatus.refresh : Aurigma.GraphicsMill.UpdateStatus.busy; this._raiseEvent("statusChanged"); this._callbackContext++; this._activeAjax++; this._callbackArgs = Sys.Serialization.JavaScriptSerializer.serialize( [name, args]); // A little HACK :) // We call not documented function from ASP.NET. this._raiseInvokingCallbackRequest(); this._saveState(); __theFormPostData = ""; __theFormPostCollection = new Array(); WebForm_InitCallback(); this._callback(); return true; }, abort: function () { /// <summary>Cancels all remote methods.</summary> if (this._status == Aurigma.GraphicsMill.UpdateStatus.busy) { this._callbackContext++; this._exceptionDescription = Aurigma.GraphicsMill.UpdateStatus.ready; this._status = Aurigma.GraphicsMill.UpdateStatus.ready; } }, dispose: function () { /// <summary>Releases all resources.</summary> /// <remarks>This method corresponds to <see cref="M:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.Dispose" /> server-side member.</remarks> this._disposeRulers(); Sys.Application.removeComponent(this); }, delayedRefresh: function () { /// <summary>Indicates that the control needs to be refreshed. The actual refreshing is applied when the user does not update the control state (such as scroll position, zoom value) in one second.</summary> if (this._refreshTimer) window.clearTimeout(this._refreshTimer); var onTimeout = Aurigma.GraphicsMill.Utils.createBoundedWrapper(this, function () { this._refreshTimer = null; this.refresh(); }); this._refreshTimer = window.setTimeout(onTimeout, this._delayedRefreshTimeout); }, refresh: function () { /// <summary>Refreshes the control immediately.</summary> if (this._status == Aurigma.GraphicsMill.UpdateStatus.busy) this._needToRefresh = true; else { this._needToRefresh = false; this._refresh(); } }, //-------------------------------------------------------------------------- //Events //-------------------------------------------------------------------------- _raiseInvokingCallbackRequest: function () { var handler = this.get_events().getHandler("invokingCallbackRequest"); if (handler) handler(this); }, add_onResize: function (h) { this.get_events().addHandler("onresize", h); }, remove_onResize: function (h) { this.get_events().removeHandler("onresize", h); }, add_invokingCallbackRequest: function (h) { /// <param name="h" type="Function" /> /// <summary>Occurs before the callback is initiated by this control.</summary> this.get_events().addHandler("invokingCallbackRequest", h); }, remove_invokingCallbackRequest: function (h) { this.get_events().removeHandler("invokingCallbackRequest", h); }, add_workspaceChanged: function (h) { /// <param name="h" type="Function" /> /// <summary>Occurs when the content of the associated workspace is replaced.</summary> /// <remarks><para>This event corresponds to <see cref="E:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.WorkspaceChanged">BaseViewer.WorkspaceChanged</see> server-side member.</para></remarks> this.get_events().addHandler("workspaceChanged", h); }, remove_workspaceChanged: function (h) { /// <param name="h" type="Function" /> this.get_events().removeHandler("workspaceChanged", h); }, add_scrolled: function (h) { /// <param name="h" type="Function" /> /// <summary>Fires when scroll position of the control is changed.</summary> /// <remarks><para>This event corresponds to <see cref="E:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.Scrolled">BaseViewer.Scrolled</see> server-side member.</para></remarks> this.get_events().addHandler("scrolled", h); }, remove_scrolled: function (h) { /// <param name="h" type="Function" /> this.get_events().removeHandler("scrolled", h); }, add_statusChanged: function (h) { /// <param name="h" type="Function" /> /// <summary>Fires when status (see <see cref="P:J:Aurigma.GraphicsMill.BaseViewer.status" /> property) is updated. </summary> /// <remarks><para>Using this method you can determine when the remote scripting method was stopped. To do it, this event handler should analyze the value returned with the <see cref="P:J:Aurigma.GraphicsMill.BaseViewer.status"/> property. If it is <see cref="F:J:Aurigma.GraphicsMill.UpdateStatus.ready" />, the remote method has been completed. If it is <see cref="F:J:Aurigma.GraphicsMill.UpdateStatus.busy"/>, the remote method has been started. If it is <see cref="F:J:Aurigma.GraphicsMill.UpdateStatus.refresh"/>, the bitmap is not modified, but the control is downloading a portion of the image (e.g. when user zoomed or scrolled content).</para></remarks> this.get_events().addHandler("statusChanged", h); }, remove_statusChanged: function (h) { /// <param name="h" type="Function" /> this.get_events().removeHandler("statusChanged", h); }, add_pinchStart: function (h) { /// <param name="h" type="Function" /> /// <summary>Fires when the content is start zooming by pinch gesture.</summary> this.get_events().addHandler("pinchStart", h); }, remove_pinchStart: function (h) { /// <param name="h" type="Function" /> this.get_events().removeHandler("pinchStart", h); }, add_pinchStop: function (h) { /// <param name="h" type="Function" /> /// <summary>Fires when the content zooming by pinch gesture.</summary> this.get_events().addHandler("pinchStop", h); }, remove_pinchStop: function (h) { /// <param name="h" type="Function" /> this.get_events().removeHandler("pinchStop", h); }, add_zoomed: function (h) { /// <param name="h" type="Function" /> /// <summary>Fires when the content is zoomed in the control.</summary> /// <remarks><para>This event corresponds to <see cref="E:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.Zoomed">BaseViewer.Zoomed</see> server-side member.</para></remarks> this.get_events().addHandler("zoomed", h); }, remove_zoomed: function (h) { /// <param name="h" type="Function" /> this.get_events().removeHandler("zoomed", h); }, add_click: function (h) { /// <param name="h" type="Function" /> /// <summary>Fires when the control is clicked.</summary> this.get_events().addHandler("click", h); }, remove_click: function (h) { /// <param name="h" type="Function" /> this.get_events().removeHandler("click", h); }, add_mouseDown: function (h) { /// <param name="h" type="Function" /> /// <summary>Fires when the user clicks the control with either mouse button. </summary> this.get_events().addHandler("mouseDown", h); }, remove_mouseDown: function (h, d) { /// <param name="h" type="Function" /> this.get_events().removeHandler("mouseDown", h); }, add_mouseMove: function (h) { /// <param name="h" type="Function" /> /// <summary>Fires when the user moves the mouse over the control.</summary> this.get_events().addHandler("mouseMove", h); }, remove_mouseMove: function (h) { /// <param name="h" type="Function" /> this.get_events().removeHandler("mouseMove", h); }, add_mouseUp: function (h) { /// <param name="h" type="Function" /> /// <summary>Fires when the user releases a mouse button while the mouse is over the control.</summary> this.get_events().addHandler("mouseUp", h); }, remove_mouseUp: function (h) { /// <param name="h" type="Function" /> this.get_events().removeHandler("mouseUp", h); }, add_workspaceClick: function (h) { /// <param name="h" type="Function" /> /// <summary>Fires when the content displayed in the control is clicked.</summary> /// <remarks><para>Position of the mouse pointer is measured in the coordinates of the workspace and can be used to determine a point of the content selected by mouse click.</para><para>This event corresponds to <see cref="E:Aurigma.Aurigma.GraphicsMill.AjaxControls.BaseViewer.WorkspaceClick">BaseViewer.WorkspaceClick</see> server-side member.</para></remarks> this.get_events().addHandler("workspaceClick", h); }, remove_workspaceClick: function (h) { /// <param name="h" type="Function" /> this.get_events().removeHandler("workspaceClick", h); }, add_workspaceDoubleClick: function (h) { /// <param name="h" type="Function" /> /// <summary>Occurs when a mouse button is double clicked in the content displayed by this viewer control.</summary> /// <remarks>Position of the mouse pointer is measured in the coordinates of the workspace and can be used to determine a point of the content selected by mouse click.</remarks> this.get_events().addHandler("workspaceDoubleClick", h); }, remove_workspaceDoubleClick: function (h) { /// <param name="h" type="Function" /> this.get_events().removeHandler("workspaceDoubleClick", h); }, add_workspaceMouseDown: function (h) { /// <param name="h" type="Function" /> /// <summary>Occurs when a mouse button is clicked in the content displayed by this viewer control.</summary> /// <remarks>Position of the mouse pointer is measured in the coordinates of the workspace and can be used to determine a point of the content selected by mouse click.</remarks> this.get_events().addHandler("workspaceMouseDown", h); }, remove_workspaceMouseDown: function (h) { /// <param name="h" type="Function" /> this.get_events().removeHandler("workspaceMouseDown", h); }, add_workspaceMouseMove: function (h) { /// <param name="h" type="Function" /> /// <summary>Fires when the user moves the mouse over the content of control.</summary> this.get_events().addHandler("workspaceMouseMove", h); }, remove_workspaceMouseMove: function (h) { /// <param name="h" type="Function" /> this.get_events().removeHandler("workspaceMouseMove", h); }, add_workspaceMouseUp: function (h) { /// <param name="h" type="Function" /> /// <summary>Fires when the user releases a mouse button while the mouse is over the content of control.</summary> this.get_events().addHandler("workspaceMouseUp", h); }, remove_workspaceMouseUp: function (h) { /// <param name="h" type="Function" /> this.get_events().removeHandler("workspaceMouseUp", h); } }; Aurigma.GraphicsMill.BaseViewer.registerClass("Aurigma.GraphicsMill.BaseViewer", Sys.UI.Control, Sys.IDisposable); // WORKAROUND for ASP.NET 2.0 client script bug. // To allow ClientCallback into OnComplete function of another ClientCallback following code was applied. // This code is used into invokeRemoteMethod of BaseViewer. // More details: http://forums.microsoft.com/MSDN/ShowPost.aspx?PostID=705049&SiteID=1 Sys.Application.add_load(function (e, t) { if (t.get_isPartialLoad()) return; WebForm_CallbackComplete = function () { // SyncFix: the original version uses "i" as global thereby resulting in javascript errors when "i" is used elsewhere in consuming pages for (var i = 0; i < __pendingCallbacks.length; i++) { callbackObject = __pendingCallbacks[i]; if (callbackObject && callbackObject.xmlRequest && (callbackObject.xmlRequest.readyState == 4)) { // the callback should be executed after releasing all resources // associated with this request. // Originally if the callback gets executed here and the callback // routine makes another ASP.NET ajax request then the pending slots and // pending callbacks array gets messed up since the slot is not released // before the next ASP.NET request comes. // FIX: This statement has been moved below // WebForm_ExecuteCallback(callbackObject); if (!__pendingCallbacks[i].async) __synchronousCallBackIndex = -1; __pendingCallbacks[i] = null; var callbackFrameID = "__CALLBACKFRAME" + i; var xmlRequestFrame = document.getElementById(callbackFrameID); if (xmlRequestFrame) xmlRequestFrame.parentNode.removeChild(xmlRequestFrame); // SyncFix: the following statement has been moved down from above; WebForm_ExecuteCallback(callbackObject); } } }; }); if (typeof (Sys) !== 'undefined') Sys.Application.notifyScriptLoaded();
var SAMPLE_DATA = { simple: [ new labella.Node(1,50), new labella.Node(2,50), new labella.Node(3,50), new labella.Node(3,50), new labella.Node(3,50), new labella.Node(304,50), new labella.Node(454,50), new labella.Node(454,50), new labella.Node(454,50), new labella.Node(804,50), new labella.Node(804,70), new labella.Node(804,50), new labella.Node(804,50), new labella.Node(854,50), new labella.Node(854,50) ] };
import{r as s,h as t,H as e}from"./p-b054f48f.js";const i=class{constructor(t){s(this,t),this.name="",this.value="",this.labelName="",this.checked=!1}render(){return t(e,{class:"mx-switch"},t("label",{class:"relative inline-flex flex-nowrap align-center items-center cursor-pointer text-sm w-36 h-14"},t("input",{class:"absolute h-0 w-0 opacity-0",type:"checkbox",name:this.name,checked:this.checked}),t("span",{class:"slider round"}),t("div",{class:"ml-48 inline-block whitespace-nowrap","data-testid":"labelName"},this.labelName)))}};export{i as mx_switch}
# -*- coding: utf-8 -*- """ Family entity class implementation """ __author__ = 'Samir Adrik' __email__ = 'samir.adrik@gmail.com' from typing import Union from source.util import Assertor, Tracking from .entity import Entity from .female import Female from .male import Male class Family(Entity): """ Family entity class """ @Tracking def validate_family_members(self, family_members: list): """ Validate the family_members object. In this implementation a Family has the following characteristics: - Needs to be a list - All objects in the list must be either of class Male or Female Parameters ---------- family_members : list list of Male or Female objects """ Assertor.assert_data_types([family_members], [list]) for family_member in family_members: Assertor.assert_data_types([family_member], [(Male, Female)]) @Tracking def validate_income(self, income: Union[int, float, str]): """ method for validating that income is non-negative Parameters ---------- income : int, float, str income to be validated """ Assertor.assert_data_types([income], [(int, float, str)]) Assertor.assert_non_negative([income], msg="Only non-negative 'income' accepted") @Tracking def validating_cars(self, cars: Union[int, str]): """ method for validating that number of cars is non-negative Parameters ---------- cars : int, str number of cars to be validated """ Assertor.assert_data_types([cars], [(int, str)]) Assertor.assert_non_negative([cars], msg="Only non-negative 'numbers of cars'") def __init__(self, family_members: list = None, income: Union[int, float, str] = 0, cars: Union[int, str] = 0): """ Constructor / Instantiate the class Parameters ---------- family_members : list list of Person (Male or Female) instances income : int, float, str gross yearly income cars : int, str number of cars in the family """ super().__init__() self.validate_family_members(family_members) self.validate_income(income) self.validating_cars(cars) self._familie_medlemmer = family_members self._inntekt = str(income) self._antall_biler = str(cars) @property def familie_medlemmer(self): """ family_members getter Returns ------- out : list all active family_members """ return self._familie_medlemmer @familie_medlemmer.setter def familie_medlemmer(self, new_members: list): """ family_members setter Parameters ---------- new_members : list a list of family_members, i.e. of person (Male or Female) objects to append to family """ self.validate_family_members(new_members) self._familie_medlemmer = new_members @Tracking def add_family_members(self, family_members: (list, Male, Female)): """ Append a list Male or Female family_member to family_members Parameters ---------- family_members : list of Female or Male instances family member to be appended """ if isinstance(family_members, list): all(Assertor.assert_data_types([member], [Male, Female]) for member in family_members) self._familie_medlemmer.extend(family_members) else: Assertor.assert_data_types([family_members], [Male, Female]) self._familie_medlemmer.extend([family_members]) @property def inntekt(self): """ income getter Returns ------- out : int, float current gross yearly income """ return self._inntekt @inntekt.setter def inntekt(self, income: (int, float, str)): """ income setter Parameters ---------- income : int, float, str new gross yearly income """ self.validate_income(income) self._inntekt = str(income) @property def antall_biler(self): """ cars setter Returns ------- out : str number of cars in the family """ return self._antall_biler @antall_biler.setter def antall_biler(self, cars: (int, str)): """ cars setter Parameters ---------- cars : int, str new number of cars to set in family """ self.validating_cars(cars) self._antall_biler = str(cars) @Tracking def sifo_properties(self): """ return all active sifo compatible properties and values in a dictionary Returns ------- Out : dict dictionary of all active properties """ properties = dict(list(self.__dict__.items())[-2:]) for i, family_member in enumerate(self.familie_medlemmer): for name, prop in family_member.__dict__.items(): if "_id" not in name: properties.update({name + str(i): prop}) return {name[1:]: value for name, value in properties.items()} @staticmethod def rules(): """ list of all rules in this entity Returns ------- out : list all rules in entity """ return ", ".join( ['non_negative_income', 'non_negative_cars', 'kindergarten_criteria', 'sfo_criteria', 'pregnant_criteria']).replace("'", "")
import gym import numpy as np import pytest from stable_baselines3 import A2C, DDPG, DQN, PPO, SAC, TD3 from stable_baselines3.common.env_util import make_vec_env from stable_baselines3.common.evaluation import evaluate_policy class DummyMultiDiscreteSpace(gym.Env): def __init__(self, nvec): super(DummyMultiDiscreteSpace, self).__init__() self.observation_space = gym.spaces.MultiDiscrete(nvec) self.action_space = gym.spaces.Box(low=-1, high=1, shape=(2,), dtype=np.float32) def reset(self): return self.observation_space.sample() def step(self, action): return self.observation_space.sample(), 0.0, False, {} class DummyMultiBinary(gym.Env): def __init__(self, n): super(DummyMultiBinary, self).__init__() self.observation_space = gym.spaces.MultiBinary(n) self.action_space = gym.spaces.Box(low=-1, high=1, shape=(2,), dtype=np.float32) def reset(self): return self.observation_space.sample() def step(self, action): return self.observation_space.sample(), 0.0, False, {} @pytest.mark.parametrize("model_class", [SAC, TD3, DQN]) @pytest.mark.parametrize("env", [DummyMultiDiscreteSpace([4, 3]), DummyMultiBinary(8)]) def test_identity_spaces(model_class, env): """ Additional tests for DQ/SAC/TD3 to check observation space support for MultiDiscrete and MultiBinary. """ # DQN only support discrete actions if model_class == DQN: env.action_space = gym.spaces.Discrete(4) env = gym.wrappers.TimeLimit(env, max_episode_steps=100) model = model_class("MlpPolicy", env, gamma=0.5, seed=1, policy_kwargs=dict(net_arch=[64])) model.learn(total_timesteps=500) evaluate_policy(model, env, n_eval_episodes=5, warn=False) @pytest.mark.parametrize("model_class", [A2C, DDPG, DQN, PPO, SAC, TD3]) @pytest.mark.parametrize("env", ["Pendulum-v0", "CartPole-v1"]) def test_action_spaces(model_class, env): if model_class in [SAC, DDPG, TD3]: supported_action_space = env == "Pendulum-v0" elif model_class == DQN: supported_action_space = env == "CartPole-v1" elif model_class in [A2C, PPO]: supported_action_space = True if supported_action_space: model_class("MlpPolicy", env) else: with pytest.raises(AssertionError): model_class("MlpPolicy", env) @pytest.mark.parametrize("model_class", [A2C, PPO, DQN]) @pytest.mark.parametrize("env", ["Taxi-v3"]) def test_discrete_obs_space(model_class, env): env = make_vec_env(env, n_envs=2, seed=0) kwargs = {} if model_class == DQN: kwargs = dict(buffer_size=1000, learning_starts=100) else: kwargs = dict(n_steps=256) model_class("MlpPolicy", env, **kwargs).learn(256)
// @ts-nocheck import path from 'path'; import alias from '@rollup/plugin-alias'; import multiInput from 'rollup-plugin-multi-input'; import babel from '@rollup/plugin-babel'; import ts from 'rollup-plugin-ts'; import { defineConfig } from 'rollup'; import { nodeResolve } from '@rollup/plugin-node-resolve'; import { Addon } from '@embroider/addon-dev/rollup'; import packageJson from '../package.json'; const addon = new Addon(); const extensions = ['.js', '.ts', '.hbs']; const USE_TS_PLUGIN = process.env.TS; const transpilation = [ // Instruct rollup how to resolve ts and hbs imports // (importing a template-only component, for example) nodeResolve({ resolveOnly: ['./'], extensions }), // Allow top-level imports (what folks are used to from v1 addons) // During the build, anything referencing a top-level import will be // replaced with a relative import. // DANGER: it's somewhat easy to cause circular references with this tool alias({ entries: [ { find: '#types', replacement: path.resolve('src', '-private', 'types.ts'), }, { find: packageJson.name, replacement: path.resolve('src'), }, { find: `${packageJson.name}/(.*)`, replacement: path.resolve('src/$1'), }, ], }), // This babel config should *not* apply presets or compile away ES modules. // It exists only to provide development niceties for you, like automatic // template colocation. // See `babel.config.json` for the actual Babel configuration! ...[ !USE_TS_PLUGIN ? // when using decorators, a common chunk is created "rollupPluginBabelHelpers" // for the decorating wrapping functions // babel({ babelHelpers: 'bundled', extensions }) : // this plugin forces you to rely on 'tslib' // ts({ // can be changed to swc or other transpilers later // but we need the ember plugins converted first // (template compilation and co-location) transpiler: 'babel', browserslist: ['last 2 firefox versions', 'last 2 chrome versions'], // setting this true greatly improves performance, but // at the cost of safety. transpileOnly: false, tsconfig: { fileName: 'tsconfig.json', hook: (config) => ({ ...config, declaration: true }), }, }), ], // Follow the V2 Addon rules about dependencies. Your code can import from // `dependencies` and `peerDependencies` as well as standard Ember-provided // package names. addon.dependencies(), // Ensure that standalone .hbs files are properly integrated as Javascript. addon.hbs(), // addons are allowed to contain imports of .css files, which we want rollup // to leave alone and keep in the published output. // addon.keepAssets(['**/*.css']), ]; // these should be JS, even though the authored format is TS // Unfortunately, your local project layout has to kind of match what classic ember expects // so that all the app-re-exports can be properly generated const globallyAvailable = ['components/**/*.js', 'instance-initializers/*.js', 'helpers/**/*.js']; export default defineConfig({ external: ['tslib'], input: ['src/**/*{js,hbs,ts}'], output: { sourcemap: true, format: 'es', dir: 'dist', }, plugins: [ multiInput(), ...transpilation, // These are the modules that users should be able to import from your // addon. Anything not listed here may get optimized away. addon.publicEntrypoints([...globallyAvailable]), // These are the modules that should get reexported into the traditional // "app" tree. Things in here should also be in publicEntrypoints above, but // not everything in publicEntrypoints necessarily needs to go here. addon.appReexports([...globallyAvailable]), addon.clean(), ], });
import os import platform from collections import OrderedDict from conans.client import tools from conans.client.build.compiler_flags import architecture_flag, parallel_compiler_cl_flag from conans.client.build.cppstd_flags import cppstd_from_settings, cppstd_flag_new as cppstd_flag from conans.client.tools import cross_building from conans.client.tools.apple import is_apple_os from conans.client.tools.oss import get_cross_building_settings from conans.errors import ConanException from conans.model.build_info import DEFAULT_BIN, DEFAULT_INCLUDE, DEFAULT_LIB, DEFAULT_SHARE from conans.util.env_reader import get_env from conans.util.log import logger verbose_definition_name = "CMAKE_VERBOSE_MAKEFILE" cmake_install_prefix_var_name = "CMAKE_INSTALL_PREFIX" runtime_definition_var_name = "CONAN_LINK_RUNTIME" cmake_in_local_cache_var_name = "CONAN_IN_LOCAL_CACHE" def get_toolset(settings, generator): compiler = settings.get_safe("compiler") compiler_base = settings.get_safe("compiler.base") if compiler == "Visual Studio": subs_toolset = settings.get_safe("compiler.toolset") if subs_toolset: return subs_toolset elif compiler == "intel" and compiler_base == "Visual Studio" and "Visual" in generator: compiler_version = settings.get_safe("compiler.version") if compiler_version: compiler_version = compiler_version if "." in compiler_version else \ "%s.0" % compiler_version return "Intel C++ Compiler " + compiler_version return None def get_generator(conanfile): # Returns the name of the generator to be used by CMake if "CONAN_CMAKE_GENERATOR" in os.environ: return os.environ["CONAN_CMAKE_GENERATOR"] compiler = conanfile.settings.get_safe("compiler") compiler_base = conanfile.settings.get_safe("compiler.base") arch = conanfile.settings.get_safe("arch") compiler_version = conanfile.settings.get_safe("compiler.version") compiler_base_version = conanfile.settings.get_safe("compiler.base.version") os_build, _, _, _ = get_cross_building_settings(conanfile) if not compiler or not compiler_version or not arch: if os_build == "Windows": logger.warning("CMake generator could not be deduced from settings") return None return "Unix Makefiles" if compiler == "Visual Studio" or compiler_base == "Visual Studio": version = compiler_base_version or compiler_version _visuals = {'8': '8 2005', '9': '9 2008', '10': '10 2010', '11': '11 2012', '12': '12 2013', '14': '14 2015', '15': '15 2017', '16': '16 2019'}.get(version, "UnknownVersion %s" % version) base = "Visual Studio %s" % _visuals return base # The generator depends on the build machine, not the target if os_build == "Windows" and compiler != "qcc": return "MinGW Makefiles" # it is valid only under Windows return "Unix Makefiles" def get_generator_platform(settings, generator): # Returns the generator platform to be used by CMake if "CONAN_CMAKE_GENERATOR_PLATFORM" in os.environ: return os.environ["CONAN_CMAKE_GENERATOR_PLATFORM"] compiler = settings.get_safe("compiler") compiler_base = settings.get_safe("compiler.base") arch = settings.get_safe("arch") if settings.get_safe("os") == "WindowsCE": return settings.get_safe("os.platform") if (compiler == "Visual Studio" or compiler_base == "Visual Studio") and \ generator and "Visual" in generator: return {"x86": "Win32", "x86_64": "x64", "armv7": "ARM", "armv8": "ARM64"}.get(arch) return None def is_multi_configuration(generator): if not generator: return False return "Visual" in generator or "Xcode" in generator def is_toolset_supported(generator): # https://cmake.org/cmake/help/v3.14/variable/CMAKE_GENERATOR_TOOLSET.html if not generator: return False return "Visual" in generator or "Xcode" in generator or "Green Hills MULTI" in generator def is_generator_platform_supported(generator): # https://cmake.org/cmake/help/v3.14/variable/CMAKE_GENERATOR_PLATFORM.html if not generator: return False return "Visual" in generator or "Green Hills MULTI" in generator def verbose_definition(value): return {verbose_definition_name: "ON" if value else "OFF"} def in_local_cache_definition(value): return {cmake_in_local_cache_var_name: "ON" if value else "OFF"} def runtime_definition(runtime): return {runtime_definition_var_name: "/%s" % runtime} if runtime else {} def build_type_definition(new_build_type, old_build_type, generator, output): if new_build_type and new_build_type != old_build_type: output.warn("Forced CMake build type ('%s') different from the settings build type ('%s')" % (new_build_type, old_build_type)) build_type = new_build_type or old_build_type if build_type and not is_multi_configuration(generator): return {"CMAKE_BUILD_TYPE": build_type} return {} class CMakeDefinitionsBuilder(object): def __init__(self, conanfile, cmake_system_name=True, make_program=None, parallel=True, generator=None, set_cmake_flags=False, forced_build_type=None, output=None): self._conanfile = conanfile self._forced_cmake_system_name = cmake_system_name self._make_program = make_program self._parallel = parallel self._generator = generator self._set_cmake_flags = set_cmake_flags self._forced_build_type = forced_build_type self._output = output def _ss(self, setname): """safe setting""" return self._conanfile.settings.get_safe(setname) def _get_cpp_standard_vars(self): cppstd = cppstd_from_settings(self._conanfile.settings) if not cppstd: return {} definitions = {} if cppstd.startswith("gnu"): definitions["CONAN_CMAKE_CXX_STANDARD"] = cppstd[3:] definitions["CONAN_CMAKE_CXX_EXTENSIONS"] = "ON" else: definitions["CONAN_CMAKE_CXX_STANDARD"] = cppstd definitions["CONAN_CMAKE_CXX_EXTENSIONS"] = "OFF" definitions["CONAN_STD_CXX_FLAG"] = cppstd_flag(self._conanfile.settings) return definitions def _cmake_cross_build_defines(self): os_ = self._ss("os") arch = self._ss("arch") os_ver_str = "os.api_level" if os_ == "Android" else "os.version" op_system_version = self._ss(os_ver_str) env_sn = get_env("CONAN_CMAKE_SYSTEM_NAME", "") env_sn = {"False": False, "True": True, "": None}.get(env_sn, env_sn) cmake_system_name = env_sn or self._forced_cmake_system_name os_build, _, _, _ = get_cross_building_settings(self._conanfile) compiler = self._ss("compiler") libcxx = self._ss("compiler.libcxx") definitions = OrderedDict() os_ver = get_env("CONAN_CMAKE_SYSTEM_VERSION", op_system_version) toolchain_file = get_env("CONAN_CMAKE_TOOLCHAIN_FILE", "") if toolchain_file != "": logger.info("Setting Cross build toolchain file: %s" % toolchain_file) definitions["CMAKE_TOOLCHAIN_FILE"] = toolchain_file return definitions if cmake_system_name is False: return definitions # System name and system version if cmake_system_name is not True: # String not empty definitions["CMAKE_SYSTEM_NAME"] = cmake_system_name else: # detect if we are cross building and the system name and version if cross_building(self._conanfile): # We are cross building if os_ != os_build: if os_: # the_os is the host (regular setting) definitions["CMAKE_SYSTEM_NAME"] = {"iOS": "Darwin", "tvOS": "Darwin", "watchOS": "Darwin", "Neutrino": "QNX"}.get(os_, os_) else: definitions["CMAKE_SYSTEM_NAME"] = "Generic" if os_ver: definitions["CMAKE_SYSTEM_VERSION"] = os_ver if is_apple_os(os_): definitions["CMAKE_OSX_DEPLOYMENT_TARGET"] = os_ver # system processor cmake_system_processor = os.getenv("CONAN_CMAKE_SYSTEM_PROCESSOR") if cmake_system_processor: definitions["CMAKE_SYSTEM_PROCESSOR"] = cmake_system_processor if definitions: # If enabled cross compile for env_var in ["CONAN_CMAKE_FIND_ROOT_PATH", "CONAN_CMAKE_FIND_ROOT_PATH_MODE_PROGRAM", "CONAN_CMAKE_FIND_ROOT_PATH_MODE_LIBRARY", "CONAN_CMAKE_FIND_ROOT_PATH_MODE_INCLUDE"]: value = os.getenv(env_var) if value: definitions[env_var] = value if self._conanfile and self._conanfile.deps_cpp_info.sysroot: sysroot_path = self._conanfile.deps_cpp_info.sysroot else: sysroot_path = os.getenv("CONAN_CMAKE_FIND_ROOT_PATH", None) if sysroot_path: # Needs to be set here, can't be managed in the cmake generator, CMake needs # to know about the sysroot before any other thing definitions["CMAKE_SYSROOT"] = sysroot_path.replace("\\", "/") # Adjust Android stuff if str(os_) == "Android" and definitions["CMAKE_SYSTEM_NAME"] == "Android": arch_abi_settings = tools.to_android_abi(arch) if arch_abi_settings: definitions["CMAKE_ANDROID_ARCH_ABI"] = arch_abi_settings definitions["ANDROID_ABI"] = arch_abi_settings conan_cmake_android_ndk = os.getenv("CONAN_CMAKE_ANDROID_NDK") if conan_cmake_android_ndk: definitions["ANDROID_NDK"] = conan_cmake_android_ndk definitions["ANDROID_PLATFORM"] = "android-%s" % op_system_version definitions["ANDROID_TOOLCHAIN"] = compiler # More details about supported stdc++ libraries here: # https://developer.android.com/ndk/guides/cpp-support.html if libcxx: definitions["ANDROID_STL"] = libcxx else: definitions["ANDROID_STL"] = 'none' logger.info("Setting Cross build flags: %s" % ", ".join(["%s=%s" % (k, v) for k, v in definitions.items()])) return definitions def _get_make_program_definition(self): make_program = os.getenv("CONAN_MAKE_PROGRAM") or self._make_program if make_program: if not tools.which(make_program): self._output.warn("The specified make program '%s' cannot be found and will be " "ignored" % make_program) else: self._output.info("Using '%s' as CMAKE_MAKE_PROGRAM" % make_program) return {"CMAKE_MAKE_PROGRAM": make_program} return {} def get_definitions(self): compiler = self._ss("compiler") compiler_base = self._ss("compiler.base") compiler_version = self._ss("compiler.version") arch = self._ss("arch") os_ = self._ss("os") libcxx = self._ss("compiler.libcxx") runtime = self._ss("compiler.runtime") build_type = self._ss("build_type") definitions = OrderedDict() definitions.update(runtime_definition(runtime)) definitions.update(build_type_definition(self._forced_build_type, build_type, self._generator, self._output)) if tools.is_apple_os(os_): definitions["CMAKE_OSX_ARCHITECTURES"] = tools.to_apple_arch(arch) # xcrun is only available on macOS, otherwise it's cross-compiling and it needs to be # set within CMake toolchain if platform.system() == "Darwin": definitions["CMAKE_OSX_SYSROOT"] = tools.XCRun(self._conanfile.settings).sdk_path definitions.update(self._cmake_cross_build_defines()) definitions.update(self._get_cpp_standard_vars()) definitions.update(in_local_cache_definition(self._conanfile.in_local_cache)) if compiler: definitions["CONAN_COMPILER"] = compiler if compiler_version: definitions["CONAN_COMPILER_VERSION"] = str(compiler_version) # C, CXX, LINK FLAGS if compiler == "Visual Studio" or compiler_base == "Visual Studio": if self._parallel: flag = parallel_compiler_cl_flag(output=self._output) definitions['CONAN_CXX_FLAGS'] = flag definitions['CONAN_C_FLAGS'] = flag else: # arch_flag is only set for non Visual Studio arch_flag = architecture_flag(self._conanfile.settings) if arch_flag: definitions['CONAN_CXX_FLAGS'] = arch_flag definitions['CONAN_SHARED_LINKER_FLAGS'] = arch_flag definitions['CONAN_C_FLAGS'] = arch_flag if self._set_cmake_flags: definitions['CMAKE_CXX_FLAGS'] = arch_flag definitions['CMAKE_SHARED_LINKER_FLAGS'] = arch_flag definitions['CMAKE_C_FLAGS'] = arch_flag if libcxx: definitions["CONAN_LIBCXX"] = libcxx # Shared library try: definitions["BUILD_SHARED_LIBS"] = "ON" if self._conanfile.options.shared else "OFF" except ConanException: pass # Install to package folder try: if self._conanfile.package_folder: definitions["CMAKE_INSTALL_PREFIX"] = self._conanfile.package_folder definitions["CMAKE_INSTALL_BINDIR"] = DEFAULT_BIN definitions["CMAKE_INSTALL_SBINDIR"] = DEFAULT_BIN definitions["CMAKE_INSTALL_LIBEXECDIR"] = DEFAULT_BIN definitions["CMAKE_INSTALL_LIBDIR"] = DEFAULT_LIB definitions["CMAKE_INSTALL_INCLUDEDIR"] = DEFAULT_INCLUDE definitions["CMAKE_INSTALL_OLDINCLUDEDIR"] = DEFAULT_INCLUDE definitions["CMAKE_INSTALL_DATAROOTDIR"] = DEFAULT_SHARE except AttributeError: pass # fpic if not str(os_).startswith("Windows"): fpic = self._conanfile.options.get_safe("fPIC") if fpic is not None: shared = self._conanfile.options.get_safe("shared") fpic_value = "ON" if (fpic or shared) else "OFF" definitions["CONAN_CMAKE_POSITION_INDEPENDENT_CODE"] = fpic_value # Adjust automatically the module path in case the conanfile is using the # cmake_find_package or cmake_find_package_multi install_folder = self._conanfile.install_folder.replace("\\", "/") if "cmake_find_package" in self._conanfile.generators: definitions["CMAKE_MODULE_PATH"] = install_folder if "cmake_find_package_multi" in self._conanfile.generators: # The cmake_find_package_multi only works with targets and generates XXXConfig.cmake # that require the prefix path and the module path definitions["CMAKE_PREFIX_PATH"] = install_folder definitions["CMAKE_MODULE_PATH"] = install_folder definitions.update(self._get_make_program_definition()) # Disable CMake export registry #3070 (CMake installing modules in user home's) definitions["CMAKE_EXPORT_NO_PACKAGE_REGISTRY"] = "ON" return definitions
from typing import List import numpy as np # todo remove boilerplate duplications # todo comments # todo logging # todo naming from deeppavlov.models.go_bot.nlu.dto.nlu_response import NLUResponse from deeppavlov.models.go_bot.policy.dto.digitized_policy_features import DigitizedPolicyFeatures from deeppavlov.models.go_bot.tracker.dto.dst_knowledge import DSTKnowledge from copy import deepcopy class UtteranceFeatures: """ the DTO-like class storing the training features of a single utterance of a dialog (to feed the GO-bot policy model) """ action_mask: np.ndarray attn_key: np.ndarray tokens_embeddings_padded: np.ndarray features: np.ndarray def __init__(self, nlu_response: NLUResponse, tracker_knowledge: DSTKnowledge, features: DigitizedPolicyFeatures): self.action_mask = features.action_mask self.attn_key = features.attn_key tokens_vectorized = nlu_response.tokens_vectorized # todo proper oop self.tokens_embeddings_padded = tokens_vectorized.tokens_embeddings_padded self.features = features.concat_feats class UtteranceTarget: """ the DTO-like class storing the training target of a single utterance of a dialog (to feed the GO-bot policy model) """ action_id: int def __init__(self, action_id): self.action_id = action_id class UtteranceDataEntry: """ the DTO-like class storing both the training features and target of a single utterance of a dialog (to feed the GO-bot policy model) """ features: UtteranceFeatures target: UtteranceTarget def __init__(self, features, target): self.features = features self.target = target @staticmethod def from_features_and_target(features: UtteranceFeatures, target: UtteranceTarget): return UtteranceDataEntry(deepcopy(features), deepcopy(target)) @staticmethod def from_features(features: UtteranceFeatures): return UtteranceDataEntry(deepcopy(features), UtteranceTarget(None)) class DialogueFeatures: """ the DTO-like class storing both the training features of a dialog (to feed the GO-bot policy model) """ action_masks: List[np.ndarray] attn_keys: List[np.ndarray] tokens_embeddings_paddeds: List[np.ndarray] featuress: List[np.ndarray] def __init__(self): self.action_masks = [] self.attn_keys = [] self.tokens_embeddings_paddeds = [] self.featuress = [] def append(self, utterance_features: UtteranceFeatures): self.action_masks.append(utterance_features.action_mask) self.attn_keys.append(utterance_features.attn_key) self.tokens_embeddings_paddeds.append(utterance_features.tokens_embeddings_padded) self.featuress.append(utterance_features.features) def __len__(self): return len(self.featuress) class DialogueTargets: """ the DTO-like class storing both the training targets of a dialog (to feed the GO-bot policy model) """ action_ids: List[int] def __init__(self): self.action_ids = [] def append(self, utterance_target: UtteranceTarget): self.action_ids.append(utterance_target.action_id) def __len__(self): return len(self.action_ids) class DialogueDataEntry: """ the DTO-like class storing both the training features and targets of a dialog (to feed the GO-bot policy model) """ features: DialogueFeatures targets: DialogueTargets def __init__(self): self.features = DialogueFeatures() self.targets = DialogueTargets() def append(self, utterance_features: UtteranceDataEntry): self.features.append(utterance_features.features) self.targets.append(utterance_features.target) def __len__(self): return len(self.features) class PaddedDialogueFeatures(DialogueFeatures): """ the DTO-like class storing both the **padded to some specified length** training features of a dialog (to feed the GO-bot policy model) """ padded_dialogue_length_mask: List[int] def __init__(self, dialogue_features: DialogueFeatures, sequence_length): super().__init__() padding_length = sequence_length - len(dialogue_features) self.padded_dialogue_length_mask = [1] * len(dialogue_features) + [0] * padding_length self.action_masks = dialogue_features.action_masks + \ [np.zeros_like(dialogue_features.action_masks[0])] * padding_length self.attn_keys = dialogue_features.attn_keys + [np.zeros_like(dialogue_features.attn_keys[0])] * padding_length self.tokens_embeddings_paddeds = dialogue_features.tokens_embeddings_paddeds + \ [np.zeros_like( dialogue_features.tokens_embeddings_paddeds[0])] * padding_length self.featuress = dialogue_features.featuress + [np.zeros_like(dialogue_features.featuress[0])] * padding_length class PaddedDialogueTargets(DialogueTargets): """ the DTO-like class storing both the **padded to some specified length** training targets of a dialog (to feed the GO-bot policy model) """ def __init__(self, dialogue_targets: DialogueTargets, sequence_length): super().__init__() padding_length = sequence_length - len(dialogue_targets) self.action_ids = dialogue_targets.action_ids + [0] * padding_length class PaddedDialogueDataEntry(DialogueDataEntry): """ the DTO-like class storing both the **padded to some specified length** training features and targets of a dialog (to feed the GO-bot policy model) """ features: PaddedDialogueFeatures targets: PaddedDialogueTargets def __init__(self, dialogue_data_entry: DialogueDataEntry, sequence_length): super().__init__() self.features = PaddedDialogueFeatures(dialogue_data_entry.features, sequence_length) self.targets = PaddedDialogueTargets(dialogue_data_entry.targets, sequence_length) class BatchDialoguesFeatures: """ the DTO-like class storing both the training features of a batch of dialogues. (to feed the GO-bot policy model) """ b_action_masks: List[List[np.ndarray]] b_attn_keys: List[List[np.ndarray]] b_tokens_embeddings_paddeds: List[List[np.ndarray]] b_featuress: List[List[np.ndarray]] b_padded_dialogue_length_mask: List[List[int]] max_dialogue_length: int def __init__(self, max_dialogue_length): self.b_action_masks = [] self.b_attn_keys = [] self.b_tokens_embeddings_paddeds = [] self.b_featuress = [] self.b_padded_dialogue_length_mask = [] self.max_dialogue_length = max_dialogue_length def append(self, padded_dialogue_features: PaddedDialogueFeatures): self.b_action_masks.append(padded_dialogue_features.action_masks) self.b_attn_keys.append(padded_dialogue_features.attn_keys) self.b_tokens_embeddings_paddeds.append(padded_dialogue_features.tokens_embeddings_paddeds) self.b_featuress.append(padded_dialogue_features.featuress) self.b_padded_dialogue_length_mask.append(padded_dialogue_features.padded_dialogue_length_mask) def __len__(self): return len(self.b_featuress) class BatchDialoguesTargets: """ the DTO-like class storing both the training targets of a batch of dialogues. (to feed the GO-bot policy model) """ b_action_ids: List[List[int]] max_dialogue_length: int def __init__(self, max_dialogue_length): self.b_action_ids = [] self.max_dialogue_length = max_dialogue_length def append(self, padded_dialogue_targets: PaddedDialogueTargets): self.b_action_ids.append(padded_dialogue_targets.action_ids) def __len__(self): return len(self.b_action_ids) class BatchDialoguesDataset: """ the DTO-like class storing both the training features and target of a batch of dialogues. (to feed the GO-bot policy model) Handles the dialogues padding. """ features: BatchDialoguesFeatures targets: BatchDialoguesTargets def __init__(self, max_dialogue_length): self.features = BatchDialoguesFeatures(max_dialogue_length) self.targets = BatchDialoguesTargets(max_dialogue_length) self.max_dialogue_length = max_dialogue_length def append(self, dialogue_features: DialogueDataEntry): padded_dialogue_features = PaddedDialogueDataEntry(dialogue_features, self.max_dialogue_length) self.features.append(padded_dialogue_features.features) self.targets.append(padded_dialogue_features.targets) def __len__(self): return len(self.features)
var thumbUp = document.getElementsByClassName("fa-thumbs-up"); var trash = document.getElementsByClassName("fa-trash"); Array.from(thumbUp).forEach(function(element) { element.addEventListener('click', function(){ element.classList.add("complete") const name = this.parentNode.parentNode.childNodes[1].innerText const msg = this.parentNode.parentNode.childNodes[3].innerText const customer = this.parentNode.parentNode.childNodes[5].innerText const thumbUp = parseFloat(this.parentNode.parentNode.childNodes[7].innerText) const size = this.parentNode.parentNode.childNodes[9].innerText const cream = this.parentNode.parentNode.childNodes[11].innerText const milk = this.parentNode.parentNode.childNodes[13].innerText const sugar = this.parentNode.parentNode.childNodes[15].innerText fetch('messages', { method: 'put', headers: {'Content-Type': 'application/json'}, body: JSON.stringify({ 'name': name, 'msg': msg, 'thumbUp':thumbUp, 'customer':customer }) }) .then(response => { if (response.ok) return response.json() }) .then(data => { console.log(this.parentNode.parentNode.childNodes[5].innerText) window.location.reload(true) }) }); }); Array.from(trash).forEach(function(element) { element.addEventListener('click', function(){ const name = this.parentNode.parentNode.childNodes[1].innerText const msg = this.parentNode.parentNode.childNodes[3].innerText fetch('messages', { method: 'delete', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ 'name': name, 'msg': msg }) }).then(function (response) { window.location.reload() }) }); });
/* Licensed under the Apache License, Version 2.0 (the "License") http://www.apache.org/licenses/LICENSE-2.0 */ const WB_AREA_SEL = '.room-block .wb-block'; const WBA_WB_SEL = '.room-block .wb-block .wb-tab-content'; const VIDWIN_SEL = '.video.user-video'; const VID_SEL = '.video-container[id!=user-video]'; const CAM_ACTIVITY = 'VIDEO'; const MIC_ACTIVITY = 'AUDIO'; const SCREEN_ACTIVITY = 'SCREEN'; const REC_ACTIVITY = 'RECORD'; var VideoUtil = (function() { const self = {}; function _getVid(uid) { return 'video' + uid; } function _isSharing(sd) { return !!sd && 'SCREEN' === sd.type && sd.activities.includes(SCREEN_ACTIVITY); } function _isRecording(sd) { return !!sd && 'SCREEN' === sd.type && sd.activities.includes(REC_ACTIVITY); } function _hasMic(sd) { return !sd || sd.activities.includes(MIC_ACTIVITY); } function _hasCam(sd) { return !sd || sd.activities.includes(CAM_ACTIVITY); } function _hasVideo(sd) { return _hasCam(sd) || _isSharing(sd) || _isRecording(sd); } function _getRects(sel, excl) { const list = [], elems = $(sel); for (let i = 0; i < elems.length; ++i) { if (excl !== $(elems[i]).attr('aria-describedby')) { list.push(_getRect(elems[i])); } } return list; } function _getRect(e) { const win = $(e), winoff = win.offset(); return {left: winoff.left , top: winoff.top , right: winoff.left + win.width() , bottom: winoff.top + win.height()}; } function _container() { const a = $(WB_AREA_SEL); const c = a.find('.wb-area .tabs .wb-tab-content'); return c.length > 0 ? $(WBA_WB_SEL) : a; } function __processTopToBottom(area, rectNew, list) { const offsetX = 20 , offsetY = 10; let minY = area.bottom, posFound; do { posFound = true; for (let i = 0; i < list.length; ++i) { const rect = list[i]; minY = Math.min(minY, rect.bottom); if (rectNew.left < rect.right && rectNew.right > rect.left && rectNew.top < rect.bottom && rectNew.bottom > rect.top) { rectNew.left = rect.right + offsetX; posFound = false; } if (rectNew.right >= area.right) { rectNew.left = area.left; rectNew.top = Math.max(minY, rectNew.top) + offsetY; posFound = false; } if (rectNew.bottom >= area.bottom) { rectNew.top = area.top; posFound = true; break; } } } while (!posFound); return {left: rectNew.left, top: rectNew.top}; } function __processEqualsBottomToTop(area, rectNew, list) { const offsetX = 20 , offsetY = 10; rectNew.bottom = area.bottom; let minY = area.bottom, posFound; do { posFound = true; for (let i = 0; i < list.length; ++i) { const rect = list[i]; minY = Math.min(minY, rect.top); if (rectNew.left < rect.right && rectNew.right > rect.left && rectNew.top < rect.bottom && rectNew.bottom > rect.top) { rectNew.left = rect.right + offsetX; posFound = false; } if (rectNew.right >= area.right) { rectNew.left = area.left; rectNew.bottom = Math.min(minY, rectNew.top) - offsetY; posFound = false; } if (rectNew.top <= area.top) { rectNew.top = area.top; posFound = true; break; } } } while (!posFound); return {left: rectNew.left, top: rectNew.top}; } function _getPos(list, w, h, _processor) { if (Room.getOptions().interview) { return {left: 0, top: 0}; } const wba = _container() , woffset = wba.offset() , area = {left: woffset.left, top: woffset.top, right: woffset.left + wba.width(), bottom: woffset.top + wba.height()} , rectNew = { _left: area.left , _top: area.top , _right: area.left + w , _bottom: area.top + h , get left() { return this._left; } , set left(l) { this._left = l; this._right = l + w; } , get right() { return this._right; } , get top() { return this._top; } , set top(t) { this._top = t; this._bottom = t + h; } , set bottom(b) { this._bottom = b; this._top = b - h; } , get bottom() { return this._bottom; } }; const processor = _processor || __processTopToBottom; return processor(area, rectNew, list); } function _arrange() { const list = []; $(VIDWIN_SEL).each(function() { const v = $(this); v.css(_getPos(list, v.width(), v.height())); list.push(_getRect(v)); }); } function _arrangeResize() { const list = []; function __getDialog(_v) { return $(_v).find('.video-container.ui-dialog-content'); } $(VIDWIN_SEL).toArray().sort((v1, v2) => { const c1 = __getDialog(v1).data().stream() , c2 = __getDialog(v2).data().stream(); return c2.level - c1.level || c1.user.displayName.localeCompare(c2.user.displayName); }).forEach(_v => { const v = $(_v); __getDialog(v) .dialog('option', 'width', 120) .dialog('option', 'height', 90); v.css(_getPos(list, v.width(), v.height(), __processEqualsBottomToTop)); list.push(_getRect(v)); }); } function _cleanStream(stream) { if (!!stream) { stream.getTracks().forEach(track => track.stop()); } } function _cleanPeer(peer) { if (!!peer) { peer.cleaned = true; try { const pc = peer.peerConnection; if (!!pc) { pc.getSenders().forEach(sender => { try { if (sender.track) { sender.track.stop(); } } catch(e) { OmUtil.log('Failed to clean sender' + e); } }); pc.getReceivers().forEach(receiver => { try { if (receiver.track) { receiver.track.stop(); } } catch(e) { OmUtil.log('Failed to clean receiver' + e); } }); pc.onconnectionstatechange = null; pc.ontrack = null; pc.onremovetrack = null; pc.onremovestream = null; pc.onicecandidate = null; pc.oniceconnectionstatechange = null; pc.onsignalingstatechange = null; pc.onicegatheringstatechange = null; pc.onnegotiationneeded = null; } peer.dispose(); peer.removeAllListeners('icecandidate'); delete peer.generateOffer; delete peer.processAnswer; delete peer.processOffer; delete peer.addIceCandidate; } catch(e) { //no-op } } } function _isChrome(_b) { const b = _b || kurentoUtils.WebRtcPeer.browser; return b.name === 'Chrome' || b.name === 'Chromium'; } function _isEdge(_b) { const b = _b || kurentoUtils.WebRtcPeer.browser; return b.name === 'Edge' && "MSGestureEvent" in window; } function _isEdgeChromium(_b) { const b = _b || kurentoUtils.WebRtcPeer.browser; return b.name === 'Edge' && !("MSGestureEvent" in window); } function _setPos(v, pos) { if (v.dialog('instance')) { v.dialog('widget').css(pos); } } function _askPermission(callback) { const perm = $('#ask-permission'); if (undefined === perm.dialog('instance')) { perm.data('callbacks', []).dialog({ appendTo: '.room-block .room-container' , autoOpen: true , buttons: [ { text: perm.data('btn-ok') , click: function() { while (perm.data('callbacks').length > 0) { perm.data('callbacks').pop()(); } $(this).dialog('close'); } } ] }); } else if (!perm.dialog('isOpen')) { perm.dialog('open') } perm.data('callbacks').push(callback); } function _disconnect(node) { try { node.disconnect(); //this one can throw } catch (e) { //no-op } } function _sharingSupported() { const b = kurentoUtils.WebRtcPeer.browser; return (b.name === 'Edge' && b.major > 16) || (b.name === 'Firefox') || (b.name === 'Opera') || (b.name === 'Yandex') || _isChrome(b) || _isEdgeChromium(b) || (b.name === 'Mozilla' && b.major > 4); } function _highlight(el, clazz, count) { if (!el || el.length < 1 || el.hasClass('disabled') || count < 0) { return; } el.addClass(clazz).delay(2000).queue(function(next) { el.removeClass(clazz).delay(2000).queue(function(next1) { _highlight(el, clazz, --count); next1(); }); next(); }); } self.getVid = _getVid; self.isSharing = _isSharing; self.isRecording = _isRecording; self.hasMic = _hasMic; self.hasCam = _hasCam; self.hasVideo = _hasVideo; self.getRects = _getRects; self.getPos = _getPos; self.container = _container; self.arrange = _arrange; self.arrangeResize = _arrangeResize; self.cleanStream = _cleanStream; self.cleanPeer = _cleanPeer; self.addIceServers = function(opts, m) { if (m && m.iceServers && m.iceServers.length > 0) { opts.configuration = {iceServers: m.iceServers}; } return opts; }; self.isEdge = _isEdge; self.isEdgeChromium = _isEdgeChromium; self.isChrome = _isChrome; self.setPos = _setPos; self.askPermission = _askPermission; self.disconnect = _disconnect; self.sharingSupported = _sharingSupported; self.highlight = _highlight; return self; })(); var Volume = (function() { let video, vol, drop, slider, handleEl, hideTimer = null , lastVolume = 50, muted = false; function __cancelHide() { if (hideTimer) { clearTimeout(hideTimer); hideTimer = null; } } function __hideDrop() { __cancelHide(); hideTimer = setTimeout(() => { drop.hide(); hideTimer = null; }, 3000); } function _create(_video) { video = _video; _destroy(); const uid = video.stream().uid , volId = 'volume-' + uid; vol = OmUtil.tmpl('#volume-control-stub', volId) slider = vol.find('.slider'); drop = vol.find('.dropdown-menu'); vol.on('mouseenter', function(e) { e.stopImmediatePropagation(); drop.show(); __hideDrop() }) .click(function(e) { e.stopImmediatePropagation(); OmUtil.roomAction({action: 'mute', uid: uid, mute: !muted}); _mute(!muted); drop.hide(); return false; }).dblclick(function(e) { e.stopImmediatePropagation(); return false; }); drop.on('mouseenter', function() { __cancelHide(); }); drop.on('mouseleave', function() { __hideDrop(); }); handleEl = vol.find('.handle'); slider.slider({ orientation: 'vertical' , range: 'min' , min: 0 , max: 100 , value: lastVolume , create: function() { handleEl.text($(this).slider('value')); } , slide: function(event, ui) { _handle(ui.value); } }); _handle(lastVolume); _mute(muted); return vol; } function _handle(val) { handleEl.text(val); const vidEl = video.video() , data = vidEl.data(); if (video.stream().self) { if (data.gainNode) { data.gainNode.gain.value = val / 100; } } else { vidEl[0].volume = val / 100; } const ico = vol.find('a'); if (val > 0 && ico.hasClass('volume-off')) { ico.toggleClass('volume-off volume-on'); video.handleMicStatus(true); } else if (val === 0 && ico.hasClass('volume-on')) { ico.toggleClass('volume-on volume-off'); video.handleMicStatus(false); } } function _mute(mute) { if (!slider) { return; } muted = mute; if (mute) { const val = slider.slider('option', 'value'); if (val > 0) { lastVolume = val; } slider.slider('option', 'value', 0); _handle(0); } else { slider.slider('option', 'value', lastVolume); _handle(lastVolume); } } function _destroy() { if (vol) { vol.remove(); vol = null; } } return { create: _create , handle: _handle , mute: _mute , muted: function() { return muted; } , destroy: _destroy }; });