text
stringlengths 3
1.05M
|
|---|
import svelte from 'rollup-plugin-svelte';
import commonjs from '@rollup/plugin-commonjs';
import resolve from '@rollup/plugin-node-resolve';
import livereload from 'rollup-plugin-livereload';
import { terser } from 'rollup-plugin-terser';
import sveltePreprocess from 'svelte-preprocess';
import typescript from '@rollup/plugin-typescript';
import css from 'rollup-plugin-css-only';
const production = !process.env.ROLLUP_WATCH;
function serve() {
let server;
function toExit() {
if (server) server.kill(0);
}
return {
writeBundle() {
if (server) return;
server = require('child_process').spawn('npm', ['run', 'start', '--', '--dev'], {
stdio: ['ignore', 'inherit', 'inherit'],
shell: true
});
process.on('SIGTERM', toExit);
process.on('exit', toExit);
}
};
}
export default {
input: 'src/main.ts',
output: {
sourcemap: true,
format: 'iife',
name: 'app',
file: 'public/bundle.js'
},
plugins: [
svelte({
preprocess: sveltePreprocess({ sourceMap: !production }),
compilerOptions: {
// enable run-time checks when not in production
dev: !production
}
}),
// we'll extract any component CSS out into
// a separate file - better for performance
css({ output: 'bundle.css' }),
// If you have external dependencies installed from
// npm, you'll most likely need these plugins. In
// some cases you'll need additional configuration -
// consult the documentation for details:
// https://github.com/rollup/plugins/tree/master/packages/commonjs
resolve({
browser: true,
dedupe: ['svelte']
}),
commonjs(),
typescript({
sourceMap: !production,
inlineSources: !production
}),
// In dev mode, call `npm run start` once
// the bundle has been generated
!production && serve(),
// Watch the `public` directory and refresh the
// browser on changes when not in production
!production && livereload('public'),
// If we're building for production (npm run build
// instead of npm run dev), minify
production && terser()
],
watch: {
clearScreen: false
}
};
|
// Cloud Foundry environment variables
var port = process.env.VCAP_APP_PORT || process.env.PORT || 3001;
var services = JSON.parse(process.env.VCAP_SERVICES);
var mongoAddress = services['mongodb-2.4'][0]['credentials']['url'];
var mongoAccess = require('url').parse(mongoAddress);
module.exports = {
mongodb: {
server: mongoAccess.hostname,
port: mongoAccess.port,
//autoReconnect: automatically reconnect if connection is lost
autoReconnect: true,
//poolSize: size of connection pool (number of connections to use)
poolSize: 4,
//set admin to true if you want to turn on admin features
//if admin is true, the auth list below will be ignored
//if admin is true, you will need to enter an admin username/password below (if it is needed)
admin: false,
// >>>> If you are using regular accounts, fill out auth details in the section below
// >>>> If you have admin auth, leave this section empty and skip to the next section
auth: [
/*
* Add the the name, the username, and the password of the databases you want to connect to
* Add as many databases as you want!
*/
{
database: mongoAccess.pathname.substring(1, mongoAccess.pathname.length),
username: mongoAccess.auth.split(':')[0],
password: mongoAccess.auth.split(':')[1]
}
],
// >>>> If you are using an admin mongodb account, or no admin account exists, fill out section below
// >>>> Using an admin account allows you to view and edit all databases, and view stats
//leave username and password empty if no admin account exists
//adminUsername: mongoAccess.username,
//adminPassword: mongoAccess.password,
//whitelist: hide all databases except the ones in this list (empty list for no whitelist)
whitelist: [],
//blacklist: hide databases listed in the blacklist (empty list for no blacklist)
blacklist: ['system.indexes', 'system.users']
},
site: {
//baseUrl: the URL that mongo express will be located at
//Remember to add the forward slash at the end!
baseUrl: '/',
port: port,
cookieSecret: 'cookiesecret',
sessionSecret: 'sessionsecret'
},
options: {
//documentsPerPage: how many documents you want to see at once in collection view
documentsPerPage: 10,
//editorTheme: Name of the theme you want to use for displaying documents
//See http://codemirror.net/demo/theme.html for all examples
editorTheme: "rubyblue",
//The options below aren't being used yet
//cmdType: the type of command line you want mongo express to run
//values: eval, subprocess
// eval - uses db.eval. commands block, so only use this if you have to
// subprocess - spawns a mongo command line as a subprocess and pipes output to mongo express
cmdType: 'eval',
//subprocessTimeout: number of seconds of non-interaction before a subprocess is shut down
subprocessTimeout: 300
}
};
|
export default {
elem: 'svg',
attrs: {
xmlns: 'http://www.w3.org/2000/svg',
viewBox: '0 0 32 32',
width: 24,
height: 24,
},
content: [
{
elem: 'path',
attrs: { d: 'M26 24v4H6v-4H4v4a2 2 0 0 0 2 2h20a2 2 0 0 0 2-2v-4z' },
},
{
elem: 'path',
attrs: {
d: 'M6 12l1.41 1.4L15 5.83V25h2V5.83l7.59 7.57L26 12 16 2 6 12z',
},
},
],
name: 'export',
size: 24,
};
|
from django.conf.urls import include, url
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
import profiles.urls
import accounts.urls
from . import views
from . import serializers
urlpatterns = [
url(r'^$', views.HomePage.as_view(), name='home'),
url(r'^about/$', views.AboutPage.as_view(), name='about'),
url(r'^users/', include(profiles.urls, namespace='profiles')),
url(r'^admin/', include(admin.site.urls)),
url(r'^api/documents/$', views.FileView.as_view(), name="file-upload"),
# url(r'^download/(?P<file_name>.+)$', views.download.as_view()),
# url(r'^media\/(?P<path>.*)$', views.download),
url(r'^', include(accounts.urls, namespace='accounts')),
]
# User-uploaded files like profile pics need to be served in development
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
# Include django debug toolbar if DEBUG is on
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import base64
from tempfile import TemporaryFile
from openerp import tools
from openerp.osv import osv, fields
class base_language_import(osv.osv_memory):
""" Language Import """
_name = "base.language.import"
_description = "Language Import"
_columns = {
'name': fields.char('Language Name', required=True),
'code': fields.char('ISO Code', size=5, help="ISO Language and Country code, e.g. en_US", required=True),
'data': fields.binary('File', required=True),
'overwrite': fields.boolean('Overwrite Existing Terms',
help="If you enable this option, existing translations (including custom ones) "
"will be overwritten and replaced by those in this file"),
}
def import_lang(self, cr, uid, ids, context=None):
if context is None:
context = {}
this = self.browse(cr, uid, ids[0])
if this.overwrite:
context = dict(context, overwrite=True)
fileobj = TemporaryFile('w+')
try:
fileobj.write(base64.decodestring(this.data))
# now we determine the file format
fileobj.seek(0)
first_line = fileobj.readline().strip().replace('"', '').replace(' ', '')
fileformat = first_line.endswith("type,name,res_id,src,value") and 'csv' or 'po'
fileobj.seek(0)
tools.trans_load_data(cr, fileobj, fileformat, this.code, lang_name=this.name, context=context)
finally:
fileobj.close()
return True
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
webpackJsonp([54],{2096:function(l,n,t){"use strict";function View_CoreLoginSitePolicyPage_1(l){return e._57(0,[(l()(),e._31(0,0,null,null,5,"ion-card",[],null,null,null,null,null)),e._30(1,16384,null,0,M.a,[U.a,e.t,e.V],null,null),(l()(),e._55(-1,null,["\n "])),(l()(),e._31(3,0,null,null,1,"core-iframe",[],null,null,null,O.b,O.a)),e._30(4,638976,null,0,x.a,[D.b,E.a,a.a,H.c,F.a,T.b,c.b,[2,A.a],G.a],{src:[0,"src"]},null),(l()(),e._55(-1,null,["\n "]))],function(l,n){l(n,4,0,n.component.sitePolicy)},null)}function View_CoreLoginSitePolicyPage_0(l){return e._57(0,[(l()(),e._31(0,0,null,null,12,"ion-header",[],null,null,null,null,null)),e._30(1,16384,null,0,J.a,[U.a,e.t,e.V,[2,K.a]],null,null),(l()(),e._55(-1,null,["\n "])),(l()(),e._31(3,0,null,null,8,"ion-navbar",[["class","toolbar"],["core-back-button",""]],[[8,"hidden",0],[2,"statusbar-padding",null]],null,null,N.b,N.a)),e._30(4,49152,null,0,W.a,[q.a,[2,K.a],[2,F.a],U.a,e.t,e.V],null,null),e._30(5,212992,null,0,z.a,[W.a,G.a,B.a,Q.b],null,null),(l()(),e._55(-1,3,["\n "])),(l()(),e._31(7,0,null,3,3,"ion-title",[],null,null,null,X.b,X.a)),e._30(8,49152,null,0,Y.a,[U.a,e.t,e.V,[2,Z.a],[2,W.a]],null,null),(l()(),e._55(9,0,["",""])),e._47(131072,$.a,[B.a,e.j]),(l()(),e._55(-1,3,["\n "])),(l()(),e._55(-1,null,["\n"])),(l()(),e._55(-1,null,["\n"])),(l()(),e._31(14,0,null,null,54,"ion-content",[],[[2,"statusbar-padding",null],[2,"has-refresher",null]],null,null,ll.b,ll.a)),e._30(15,4374528,null,0,nl.a,[U.a,G.a,tl.a,e.t,e.V,q.a,el.a,e.M,[2,K.a],[2,F.a]],null,null),(l()(),e._55(-1,1,["\n "])),(l()(),e._31(17,0,null,1,50,"core-loading",[],null,null,null,il.b,il.a)),e._30(18,638976,null,0,ol.a,[B.a,e.t,Q.b,c.b],{hideUntil:[0,"hideUntil"]},null),(l()(),e._55(-1,0,["\n "])),(l()(),e._31(20,0,null,0,46,"ion-list",[],null,null,null,null,null)),e._30(21,16384,null,0,al.a,[U.a,e.t,e.V,G.a,ul.l,tl.a],null,null),(l()(),e._55(-1,null,["\n "])),(l()(),e._31(23,0,null,null,7,"ion-item",[["class","item item-block"],["text-wrap",""]],null,null,null,cl.b,cl.a)),e._30(24,1097728,null,3,rl.a,[_l.a,U.a,e.t,e.V,[2,sl.a]],null,null),e._52(335544320,1,{contentLabel:0}),e._52(603979776,2,{_buttons:1}),e._52(603979776,3,{_icons:1}),e._30(28,16384,null,0,fl.a,[],null,null),(l()(),e._55(29,2,["\n ","\n "])),e._47(131072,$.a,[B.a,e.j]),(l()(),e._55(-1,null,["\n "])),(l()(),e._31(32,0,null,null,12,"ion-item",[["class","item item-block"],["text-wrap",""]],null,null,null,cl.b,cl.a)),e._30(33,1097728,null,3,rl.a,[_l.a,U.a,e.t,e.V,[2,sl.a]],null,null),e._52(335544320,4,{contentLabel:0}),e._52(603979776,5,{_buttons:1}),e._52(603979776,6,{_icons:1}),e._30(37,16384,null,0,fl.a,[],null,null),(l()(),e._55(-1,2,["\n "])),(l()(),e._31(39,0,null,2,4,"p",[],null,null,null,null,null)),(l()(),e._31(40,0,null,null,3,"a",[["core-link",""]],[[8,"href",4]],null,null,null,null)),e._30(41,81920,null,0,gl.a,[e.t,a.a,c.b,o.a,T.b,bl.a,[2,F.a],[2,nl.a],[2,A.a],dl.b,hl.a],{capture:[0,"capture"]},null),(l()(),e._55(42,null,["",""])),e._47(131072,$.a,[B.a,e.j]),(l()(),e._55(-1,2,["\n "])),(l()(),e._55(-1,null,["\n "])),(l()(),e._26(16777216,null,null,1,null,View_CoreLoginSitePolicyPage_1)),e._30(47,16384,null,0,pl.k,[e._11,e._6],{ngIf:[0,"ngIf"]},null),(l()(),e._55(-1,null,["\n "])),(l()(),e._31(49,0,null,null,16,"ion-item",[["class","item item-block"],["padding",""],["text-wrap",""]],null,null,null,cl.b,cl.a)),e._30(50,1097728,null,3,rl.a,[_l.a,U.a,e.t,e.V,[2,sl.a]],null,null),e._52(335544320,7,{contentLabel:0}),e._52(603979776,8,{_buttons:1}),e._52(603979776,9,{_icons:1}),e._30(54,16384,null,0,fl.a,[],null,null),(l()(),e._55(-1,2,["\n "])),(l()(),e._31(56,0,null,2,3,"button",[["block",""],["color","primary"],["ion-button",""]],null,[[null,"click"]],function(l,n,t){var e=!0;if("click"===n){e=!1!==l.component.accept()&&e}return e},yl.b,yl.a)),e._30(57,1097728,[[8,4]],0,Pl.a,[[8,""],U.a,e.t,e.V],{color:[0,"color"],block:[1,"block"]},null),(l()(),e._55(58,0,["",""])),e._47(131072,$.a,[B.a,e.j]),(l()(),e._55(-1,2,["\n "])),(l()(),e._31(61,0,null,2,3,"button",[["block",""],["ion-button",""]],null,[[null,"click"]],function(l,n,t){var e=!0;if("click"===n){e=!1!==l.component.cancel()&&e}return e},yl.b,yl.a)),e._30(62,1097728,[[8,4]],0,Pl.a,[[8,""],U.a,e.t,e.V],{block:[0,"block"]},null),(l()(),e._55(63,0,["",""])),e._47(131072,$.a,[B.a,e.j]),(l()(),e._55(-1,2,["\n "])),(l()(),e._55(-1,null,["\n "])),(l()(),e._55(-1,0,["\n "])),(l()(),e._55(-1,1,["\n"])),(l()(),e._55(-1,null,["\n"]))],function(l,n){var t=n.component;l(n,5,0);l(n,18,0,t.policyLoaded);l(n,41,0,!1);l(n,47,0,t.showInline);l(n,57,0,"primary","");l(n,62,0,"")},function(l,n){var t=n.component;l(n,3,0,e._44(n,4)._hidden,e._44(n,4)._sbPadding);l(n,9,0,e._56(n,9,0,e._44(n,10).transform("core.login.policyagreement")));l(n,14,0,e._44(n,15).statusbarPadding,e._44(n,15)._hasRefresher);l(n,29,0,e._56(n,29,0,e._44(n,30).transform("core.login.policyagree")));l(n,40,0,t.sitePolicy);l(n,42,0,e._56(n,42,0,e._44(n,43).transform("core.login.policyagreementclick")));l(n,58,0,e._56(n,58,0,e._44(n,59).transform("core.login.policyaccept")));l(n,63,0,e._56(n,63,0,e._44(n,64).transform("core.login.cancel")))})}Object.defineProperty(n,"__esModule",{value:!0});var e=t(0),i=t(5),o=t(1),a=t(4),u=t(63),c=t(2),r=t(127),_=this&&this.__decorate||function(l,n,t,e){var i,o=arguments.length,a=o<3?n:null===e?e=Object.getOwnPropertyDescriptor(n,t):e;if("object"==typeof Reflect&&"function"==typeof Reflect.decorate)a=Reflect.decorate(l,n,t,e);else for(var u=l.length-1;u>=0;u--)(i=l[u])&&(a=(o<3?i(a):o>3?i(n,t,a):i(n,t))||a);return o>3&&a&&Object.defineProperty(n,t,a),a},s=this&&this.__metadata||function(l,n){if("object"==typeof Reflect&&"function"==typeof Reflect.metadata)return Reflect.metadata(l,n)},f=function(){function CoreLoginSitePolicyPage(l,n,t,e,i,o,a){this.navCtrl=l,this.loginHelper=t,this.domUtils=e,this.sitesProvider=i,this.utils=o,this.mimeUtils=a,this.siteId=n.get("siteId")}return CoreLoginSitePolicyPage.prototype.ionViewDidLoad=function(){if(this.currentSite=this.sitesProvider.getCurrentSite(),this.currentSite){var l=this.currentSite.id;this.siteId=this.siteId||l,this.siteId==l&&this.currentSite.wsAvailable("core_user_agree_site_policy")?this.fetchSitePolicy():this.cancel()}else this.cancel()},CoreLoginSitePolicyPage.prototype.fetchSitePolicy=function(){var l=this;return this.loginHelper.getSitePolicy(this.siteId).then(function(n){return l.sitePolicy=n,l.utils.getMimeTypeFromUrl(n).then(function(t){var e=l.mimeUtils.getExtension(t,n);l.showInline="html"==e||"htm"==e}).catch(function(){l.showInline=!1}).finally(function(){l.policyLoaded=!0})}).catch(function(n){l.domUtils.showErrorModalDefault(n,"Error getting site policy."),l.cancel()})},CoreLoginSitePolicyPage.prototype.cancel=function(){var l=this;this.sitesProvider.logout().catch(function(){}).then(function(){l.navCtrl.setRoot("CoreLoginSitesPage")})},CoreLoginSitePolicyPage.prototype.accept=function(){var l=this,n=this.domUtils.showModalLoading("core.sending",!0);this.loginHelper.acceptSitePolicy(this.siteId).then(function(){return l.currentSite.invalidateWsCache().catch(function(){}).then(function(){return l.loginHelper.goToSiteInitialPage()})}).catch(function(n){l.domUtils.showErrorModalDefault(n,"Error accepting site policy.")}).finally(function(){n.dismiss()})},CoreLoginSitePolicyPage=_([Object(e.m)({selector:"page-core-login-site-policy",templateUrl:"site-policy.html"}),s("design:paramtypes",[i.s,i.t,r.a,a.a,o.a,c.b,u.b])],CoreLoginSitePolicyPage)}(),g=t(3),b=t(26),d=t(32),h=this&&this.__decorate||function(l,n,t,e){var i,o=arguments.length,a=o<3?n:null===e?e=Object.getOwnPropertyDescriptor(n,t):e;if("object"==typeof Reflect&&"function"==typeof Reflect.decorate)a=Reflect.decorate(l,n,t,e);else for(var u=l.length-1;u>=0;u--)(i=l[u])&&(a=(o<3?i(a):o>3?i(n,t,a):i(n,t))||a);return o>3&&a&&Object.defineProperty(n,t,a),a},p=function(){function CoreLoginSitePolicyPageModule(){}return CoreLoginSitePolicyPageModule=h([Object(e.I)({declarations:[f],imports:[b.a,d.a,i.l.forChild(f),g.b.forChild()]})],CoreLoginSitePolicyPageModule)}(),y=t(1528),P=t(1529),m=t(1530),v=t(1531),S=t(1532),L=t(1533),C=t(1534),w=t(1535),k=t(1536),V=t(1537),j=t(1538),I=t(1539),R=t(1540),M=t(78),U=t(8),O=t(325),x=t(260),D=t(6),E=t(43),H=t(82),F=t(20),T=t(22),A=t(28),G=t(15),J=t(373),K=t(40),N=t(723),W=t(214),q=t(35),z=t(472),B=t(18),Q=t(9),X=t(724),Y=t(317),Z=t(250),$=t(25),ll=t(183),nl=t(29),tl=t(34),el=t(109),il=t(56),ol=t(53),al=t(89),ul=t(45),cl=t(31),rl=t(21),_l=t(19),sl=t(27),fl=t(33),gl=t(176),bl=t(14),dl=t(11),hl=t(39),pl=t(7),yl=t(47),Pl=t(44),ml=t(70),vl=e._29({encapsulation:2,styles:[],data:{}}),Sl=e._27("page-core-login-site-policy",f,function View_CoreLoginSitePolicyPage_Host_0(l){return e._57(0,[(l()(),e._31(0,0,null,null,1,"page-core-login-site-policy",[],null,null,null,View_CoreLoginSitePolicyPage_0,vl)),e._30(1,49152,null,0,f,[F.a,ml.a,r.a,a.a,o.a,c.b,u.b],null,null)],null,null)},{},{},[]),Ll=t(23),Cl=t(369),wl=t(370),kl=t(372),Vl=t(371),jl=t(471),Il=t(722),Rl=t(108),Ml=t(272);t.d(n,"CoreLoginSitePolicyPageModuleNgFactory",function(){return Ul});var Ul=e._28(p,[],function(l){return e._40([e._41(512,e.o,e._21,[[8,[y.a,P.a,m.a,v.a,S.a,L.a,C.a,w.a,k.a,V.a,j.a,I.a,R.a,Sl]],[3,e.o],e.K]),e._41(4608,pl.m,pl.l,[e.G,[2,pl.w]]),e._41(4608,Ll.x,Ll.x,[]),e._41(4608,Ll.d,Ll.d,[]),e._41(4608,Cl.b,Cl.a,[]),e._41(4608,wl.a,wl.b,[]),e._41(4608,kl.b,kl.a,[]),e._41(4608,Vl.b,Vl.a,[]),e._41(4608,B.a,B.a,[jl.a,Cl.b,wl.a,kl.b,Vl.b,B.b,B.c]),e._41(512,pl.b,pl.b,[]),e._41(512,Ll.v,Ll.v,[]),e._41(512,Ll.i,Ll.i,[]),e._41(512,Ll.s,Ll.s,[]),e._41(512,Il.a,Il.a,[]),e._41(512,g.b,g.b,[]),e._41(512,d.a,d.a,[]),e._41(512,Rl.a,Rl.a,[]),e._41(512,b.a,b.a,[]),e._41(512,Il.b,Il.b,[]),e._41(512,p,p,[]),e._41(256,B.c,void 0,[]),e._41(256,B.b,void 0,[]),e._41(256,Ml.a,f,[])])})}});
|
import { createSelector } from 'reselect';
const selectRaw = (state) => state.taxonomy.list;
const selectLoading = createSelector(
[selectRaw],
(raw) => raw.loading,
);
const selectExportLoading = createSelector(
[selectRaw],
(raw) => raw.exportLoading,
);
const selectRows = createSelector(
[selectRaw],
(raw) => raw.rows,
);
const selectCount = createSelector(
[selectRaw],
(raw) => raw.count,
);
const selectHasRows = createSelector(
[selectCount],
(count) => count > 0,
);
const selectOrderBy = createSelector(
[selectRaw],
(raw) => {
const sorter = raw.sorter;
if (!sorter) {
return null;
}
if (!sorter.columnKey) {
return null;
}
let direction =
sorter.order === 'descend' ? 'DESC' : 'ASC';
return `${sorter.columnKey}_${direction}`;
},
);
const selectFilter = createSelector(
[selectRaw],
(raw) => {
return raw.filter;
},
);
const selectLimit = createSelector([selectRaw], (raw) => {
const pagination = raw.pagination;
return pagination.pageSize;
});
const selectOffset = createSelector(
[selectRaw],
(raw) => {
const pagination = raw.pagination;
if (!pagination || !pagination.pageSize) {
return 0;
}
const current = pagination.current || 1;
return (current - 1) * pagination.pageSize;
},
);
const selectPagination = createSelector(
[selectRaw, selectCount],
(raw, count) => {
return {
...raw.pagination,
total: count,
showSizeChanger: true,
};
},
);
const selectSelectedKeys = createSelector(
[selectRaw],
(raw) => {
return raw.selectedKeys;
},
);
const selectSelectedRows = createSelector(
[selectRaw, selectRows],
(raw, rows) => {
return rows.filter((row) =>
raw.selectedKeys.includes(row.id),
);
},
);
export default {
selectLoading,
selectRows,
selectCount,
selectOrderBy,
selectLimit,
selectFilter,
selectOffset,
selectPagination,
selectSelectedKeys,
selectSelectedRows,
selectHasRows,
selectExportLoading,
};
|
(function(angular) {
'use strict';
var webApp = angular.module('angularApp');
webApp.factory('ApiLink', function($resource) {
var _endpoint = 'http://localhost:8000/api/links/:id';
return $resource(
_endpoint,
{ id: '@id' },
{ update: { method: 'PUT' } }
);
});
})(angular);
|
/**
* Copyright 2018 The WPT Dashboard Project. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
import '../node_modules/@polymer/paper-toggle-button/paper-toggle-button.js';
import '../node_modules/@polymer/polymer/lib/elements/dom-if.js';
import { html, PolymerElement } from '../node_modules/@polymer/polymer/polymer-element.js';
import { LoadingState } from './loading-state.js';
import './test-file-results-table.js';
import { TestRunsUIQuery } from './test-runs-query.js';
import { TestRunsQueryLoader } from './test-runs.js';
import './wpt-colors.js';
import { WPTFlags } from './wpt-flags.js';
import { PathInfo } from './path.js';
class TestFileResults extends WPTFlags(LoadingState(PathInfo(
TestRunsQueryLoader(TestRunsUIQuery(PolymerElement))))) {
static get template() {
return html`
<style include="wpt-colors">
:host {
display: block;
font-size: 16px;
}
h1 {
font-size: 1.5em;
}
.right {
display: flex;
justify-content: flex-end;
}
.right .pad {
padding: 8px;
}
paper-toggle-button {
--paper-toggle-button-checked-bar-color: var(--paper-blue-500);
--paper-toggle-button-checked-button-color: var(--paper-blue-700);
--paper-toggle-button-checked-ink-color: var(--paper-blue-300);
}
</style>
<div class="right">
<label class="pad">Expand</label>
<paper-toggle-button class="pad" checked="{{isVerbose}}">
</paper-toggle-button>
</div>
<test-file-results-table test-runs="[[testRuns]]"
diff-run="[[diffRun]]"
only-show-differences="{{onlyShowDifferences}}"
path="[[path]]"
rows="[[rows]]"
verbose="[[isVerbose]]">
</test-file-results-table>
`;
}
static get is() {
return 'test-file-results';
}
static get properties() {
return {
diffRun: Object,
onlyShowDifferences: {
type: Boolean,
value: false,
},
structuredSearch: Object,
resultsTable: {
type: Array,
value: [],
},
isVerbose: {
type: Boolean,
value: false,
},
rows: {
type: Array,
computed: 'computeRows(resultsTable, onlyShowDifferences)'
}
};
}
async connectedCallback() {
await super.connectedCallback();
console.assert(this.path);
console.assert(this.path[0] === '/');
}
static get observers() {
return ['loadData(path, testRuns, structuredSearch, onlyShowDifferences)'];
}
async loadData(path, testRuns, structuredSearch) {
// Run a search query, including subtests, as well as fetching the results file.
let [searchResults, resultsTable] = await Promise.all([
this.fetchSearchResults(path, testRuns, structuredSearch),
this.fetchTestFile(path, testRuns),
]);
if (resultsTable && searchResults) {
const test = searchResults.results.find(r => r.test === path);
if (test) {
const subtests = new Set(test.subtests);
const [first, ...others] = resultsTable;
const matches = others.filter(t => subtests.has(t.name));
resultsTable = [first, ...matches];
}
}
this.resultsTable = resultsTable;
}
async fetchSearchResults(path, testRuns, structuredSearch) {
if (!testRuns || !testRuns.length || !this.structuredQueries || !structuredSearch) {
return;
}
// Combine the query with " and [path]".
const q = {
and: [
{pattern: path},
structuredSearch,
]
};
const url = new URL('/api/search', window.location);
url.searchParams.set('subtests', '');
if (this.diffRun) {
url.searchParams.set('diff', true);
}
const fetchOpts = {
method: 'POST',
body: JSON.stringify({
run_ids: testRuns.map(r => r.id),
query: q,
}),
};
return await this.retry(
async() => {
const r = await window.fetch(url, fetchOpts);
if (!r.ok) {
if (fetchOpts.method === 'POST' && r.status === 422) {
throw r.status;
}
throw 'Failed to fetch results data.';
}
return r.json();
},
err => err === 422,
testRuns.length + 1,
5000
);
}
async fetchTestFile(path, testRuns) {
this.resultsTable = []; // Clear any existing rows.
if (!path || !testRuns) {
return;
}
const resultsPerTestRun = await Promise.all(
testRuns.map(tr => this.loadResultFile(tr)));
// resultsTable[0].name set after discovering subtests.
let resultsTable = [{
results: resultsPerTestRun.map(data => {
const result = {
status: data && data.status,
message: data && data.message,
};
if (this.reftestAnalyzer && data && data.screenshots) {
result.screenshots = this.shuffleScreenshots(this.path, data.screenshots);
}
return result;
}),
}];
// Setup test name order according to when they appear in run results.
let allNames = [];
for (const runResults of resultsPerTestRun) {
if (runResults && runResults.subtests) {
this.mergeNamesInto(runResults.subtests.map(s => s.name), allNames);
}
}
// Copy results into resultsTable.
for (const name of allNames) {
let results = [];
for (const runResults of resultsPerTestRun) {
const result = runResults && runResults.subtests &&
runResults.subtests.find(sub => sub.name === name);
results.push(result ? {
status: result.status,
message: result.message,
} : {status: null, message: null});
}
resultsTable.push({
name,
results,
});
}
// Set name for test-level status entry after subtests discovered.
// Parameter is number of subtests.
resultsTable[0].name = this.statusName(resultsTable.length - 1);
return resultsTable;
}
async loadResultFile(testRun) {
const url = this.resultsURL(testRun, this.path);
const response = await window.fetch(url);
if (!response.ok) {
return null;
}
if (!this.reftestAnalyzerMockScreenshots) {
return response.json();
}
// Use some arbitrary screenshots for any without them.
const screenshots = {};
screenshots[this.path] = 'sha1:000c495e8f587dac40894d0cacb5a7ca769410c6';
screenshots[this.path.replace(/.html$/, '-ref.html')] = 'sha1:000c495e8f587dac40894d0cacb5a7ca769410c6';
return response.json()
.then(r => Object.assign({ screenshots }, r));
}
mergeNamesInto(names, allNames) {
if (!allNames.length) {
allNames.splice(0, 0, ...names);
return;
}
let lastOffset = 0;
let lastMatch = 0;
names.forEach((name, i) => {
// Optimization for "next item matches too".
let offset;
if (i === lastMatch + 1 && allNames[lastOffset + 1] === name) {
offset = lastOffset + 1;
} else {
offset = allNames.findIndex(n => n === name);
}
if (offset >= 0) {
lastOffset = offset;
lastMatch = i;
} else {
allNames.splice(lastOffset + i - lastMatch, 0, name);
}
});
}
resultsURL(testRun, path) {
path = this.encodeTestPath(path);
// This is relying on the assumption that result files end with '-summary.json.gz'.
const resultsBase = testRun.results_url.slice(0, testRun.results_url.lastIndexOf('-summary.json.gz'));
return `${resultsBase}${path}`;
}
statusName(numSubtests) {
return numSubtests > 0 ? 'Harness status' : 'Test status';
}
shuffleScreenshots(path, rawScreenshots) {
// Clone the data because we might modify it.
const screenshots = Object.assign({}, rawScreenshots);
// Make sure the test itself appears first in the Map to follow the
// convention of reftest-analyzer (actual, expected).
const firstScreenshot = [];
if (path in screenshots) {
firstScreenshot.push([path, screenshots[path]]);
delete screenshots[path];
}
return new Map([...firstScreenshot, ...Object.entries(screenshots)]);
}
computeRows(resultsTable, onlyShowDifferences) {
if (!resultsTable || !resultsTable.length || !onlyShowDifferences) {
return resultsTable;
}
const [first, ...others] = resultsTable;
return [first, ...others.filter(r => {
return r.results[0].status !== r.results[1].status;
})];
}
}
window.customElements.define(TestFileResults.is, TestFileResults);
export { TestFileResults };
|
;(function(root, factory) {
if (typeof module === 'object' && module.exports) {
/* eslint-disable global-require */
// CommonJS
var d3 = require('d3')
module.exports = factory(d3)
/* eslint-enable global-require */
} else {
// Browser global.
// eslint-disable-next-line no-param-reassign
root.d3.scaleRadial = factory(root.d3)
}
})(this, function(d3) {
function square(x) {
return x * x
}
function radial() {
var linear = d3.scaleLinear()
function scale(x) {
return Math.sqrt(linear(x))
}
scale.domain = function(_) {
return arguments.length ? (linear.domain(_), scale) : linear.domain()
}
scale.nice = function(count) {
return linear.nice(count), scale
}
scale.range = function(_) {
return arguments.length
? (linear.range(_.map(square)), scale)
: linear.range().map(Math.sqrt)
}
scale.ticks = linear.ticks
scale.tickFormat = linear.tickFormat
return scale
}
return radial
})
|
import os
import uuid
import yaml
from dagster_k8s.launcher import K8sRunLauncher
from dagster import __version__ as dagster_version
from dagster.core.storage.pipeline_run import PipelineRun
from dagster.utils import load_yaml_from_path
from .conftest import docker_image, environments_path # pylint: disable=unused-import
from .utils import parse_raw_res, remove_none_recursively, wait_for_job_success
EXPECTED_JOB_SPEC = '''
api_version: batch/v1
kind: Job
metadata:
labels:
app.kubernetes.io/instance: dagster
app.kubernetes.io/name: dagster
app.kubernetes.io/version: {dagster_version}
name: dagster-job-{run_id}
spec:
backoff_limit: 4
template:
metadata:
labels:
app.kubernetes.io/instance: dagster
app.kubernetes.io/name: dagster
app.kubernetes.io/version: {dagster_version}
name: dagster-job-pod-{run_id}
spec:
containers:
- args:
- -p
- startPipelineExecution
- -v
- '{{"executionParams": {{"environmentConfigData": {{"loggers": {{"console": {{"config":
{{"log_level": "DEBUG"}}}}}}, "solids": {{"multiply_the_word": {{"config": {{"factor":
2}}, "inputs": {{"word": "bar"}}}}}}}}, "executionMetadata": {{"runId": "{run_id}",
"tags": []}}, "mode": "default", "selector": {{"name": "demo_pipeline", "solidSubset":
null}}, "stepKeys": null}}}}'
command:
- dagster-graphql
env:
- name: DAGSTER_HOME
value: /opt/dagster/dagster_home
- name: DAGSTER_PG_PASSWORD
value_from:
secret_key_ref:
key: postgresql-password
name: dagster-postgresql
env_from: []
image: {job_image}
image_pull_policy: {image_pull_policy}
name: dagster-job-{run_id}
volume_mounts:
- mount_path: /opt/dagster/dagster_home/dagster.yaml
name: dagster-instance
sub_path: dagster.yaml
image_pull_secrets:
- name: element-dev-key
restart_policy: Never
service_account_name: dagit-admin
volumes:
- config_map:
name: dagster-instance
name: dagster-instance
ttl_seconds_after_finished: 100
'''
def test_valid_job_format(
kubeconfig, docker_image, image_pull_policy
): # pylint: disable=redefined-outer-name
run_id = uuid.uuid4().hex
environment_dict = load_yaml_from_path(os.path.join(environments_path(), 'env.yaml'))
pipeline_name = 'demo_pipeline'
run = PipelineRun.create_empty_run(pipeline_name, run_id, environment_dict)
run_launcher = K8sRunLauncher(
image_pull_policy=image_pull_policy,
image_pull_secrets=[{'name': 'element-dev-key'}],
service_account_name='dagit-admin',
instance_config_map='dagster-instance',
job_image=docker_image,
load_kubeconfig=True,
kubeconfig_file=kubeconfig,
)
job = run_launcher.construct_job(run)
assert (
yaml.dump(remove_none_recursively(job.to_dict()), default_flow_style=False).strip()
== EXPECTED_JOB_SPEC.format(
run_id=run_id,
job_image=docker_image,
image_pull_policy=image_pull_policy,
dagster_version=dagster_version,
).strip()
)
def test_k8s_run_launcher(dagster_instance): # pylint: disable=redefined-outer-name
run_id = uuid.uuid4().hex
environment_dict = load_yaml_from_path(os.path.join(environments_path(), 'env.yaml'))
pipeline_name = 'demo_pipeline'
run = PipelineRun.create_empty_run(pipeline_name, run_id, environment_dict)
dagster_instance.launch_run(run)
success, raw_logs = wait_for_job_success('dagster-job-%s' % run_id)
result = parse_raw_res(raw_logs.split('\n'))
assert success
assert not result.get('errors')
assert result['data']
assert result['data']['startPipelineExecution']['__typename'] == 'StartPipelineExecutionSuccess'
def test_failing_k8s_run_launcher(dagster_instance):
run_id = uuid.uuid4().hex
environment_dict = {'blah blah this is wrong': {}}
pipeline_name = 'demo_pipeline'
run = PipelineRun.create_empty_run(pipeline_name, run_id, environment_dict)
dagster_instance.launch_run(run)
success, raw_logs = wait_for_job_success('dagster-job-%s' % run_id)
result = parse_raw_res(raw_logs.split('\n'))
assert success
assert not result.get('errors')
assert result['data']
assert (
result['data']['startPipelineExecution']['__typename'] == 'PipelineConfigValidationInvalid'
)
assert len(result['data']['startPipelineExecution']['errors']) == 2
assert set(error['reason'] for error in result['data']['startPipelineExecution']['errors']) == {
'FIELD_NOT_DEFINED',
'MISSING_REQUIRED_FIELD',
}
|
_base_ = '../_base_/default_runtime.py'
# dataset settings
dataset_type = 'CocoDataset'
data_root = '/data/sophia/a/Xiaoke.Shen54/DATASET/sunrgbd_DO_NOT_DELETE/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
# In mstrain 3x config, img_scale=[(1333, 640), (1333, 800)],
# multiscale_mode='range'
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='LoadAnnotations',
with_bbox=True,
with_mask=True,
poly2mask=False),
dict(
type='Resize',
img_scale=[(1333, 640), (1333, 800)],
multiscale_mode='range',
keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1333, 800),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
# Use RepeatDataset to speed up training
data = dict(
samples_per_gpu=2,
workers_per_gpu=2,
train=dict(
type='RepeatDataset',
times=1,
dataset=dict(
type=dataset_type,
ann_file=data_root + 'train/gts/raw_gts/det_train.json',
img_prefix=data_root + 'train/dhs85/',
pipeline=train_pipeline)),
val=dict(
type=dataset_type,
ann_file=data_root + 'val/gts/raw_gts/det_val.json',
img_prefix=data_root + 'val/dhs85/',
pipeline=test_pipeline),
test=dict(
type=dataset_type,
ann_file=data_root + 'val/gts/raw_gts/det_val.json',
img_prefix=data_root + 'val/dhs85/',
pipeline=test_pipeline))
evaluation = dict(classwise=True, metric=['bbox', 'segm'])
# optimizer
optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=None)
# learning policy
# Experiments show that using step=[9, 11] has higher performance
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=0.001,
step=[9, 11])
runner = dict(type='EpochBasedRunner', max_epochs=50)
|
import json
import logging
import sys
from functools import lru_cache
from io import StringIO
from pathlib import Path
from tempfile import NamedTemporaryFile
from typing import List
import h5py
import pandas as pd
from lm_zoo import errors
from lm_zoo.backends import get_backend, get_compatible_backend
from lm_zoo.models import Registry, Model
__version__ = "1.2.3"
L = logging.getLogger("lm-zoo")
@lru_cache()
def get_registry():
return Registry()
def _make_in_stream(sentences):
"""
Convert a sentence list to a dummy UTF8 stream to pipe to containers.
"""
# Sentences should not have final newlines
sentences = [sentence.strip("\r\n") for sentence in sentences]
stream_str = "\n".join(sentences + [""])
return StringIO(stream_str)
def spec(model: Model, backend=None):
"""
Get a language model specification as a dict.
"""
ret = run_model_command_get_stdout(model, "spec", backend=backend)
return json.loads(ret)
def tokenize(model: Model, sentences: List[str], backend=None):
"""
Tokenize natural-language text according to a model's preprocessing
standards.
`sentences` should be a list of natural-language sentences.
This command returns a list of tokenized sentences, with each sentence a
list of token strings.
For each sentence, there is a one-to-one
mapping between the tokens output by this command and the tokens used by
the ``get-surprisals`` command.
"""
in_file = _make_in_stream(sentences)
ret = run_model_command_get_stdout(model, "tokenize /dev/stdin",
stdin=in_file, backend=backend)
sentences = ret.strip().split("\n")
sentences_tokenized = [sentence.split(" ") for sentence in sentences]
return sentences_tokenized
def unkify(model: Model, sentences: List[str], backend=None):
"""
Detect unknown words for a language model for the given natural language
text.
`sentences` should be a list of natural-language sentences.
Returns:
A list of sentence masks, each a list of ``0`` and ``1`` values. These
values correspond one-to-one with the model's tokenization of the
sentence (as returned by ``lm-zoo.tokenize``). The value ``0``
indicates that the corresponding token is in the model's vocabulary;
the value ``1`` indicates that the corresponding token is an unknown
word for the model.
"""
in_file = _make_in_stream(sentences)
ret = run_model_command_get_stdout(model, "unkify /dev/stdin",
stdin=in_file, backend=backend)
sentences = ret.strip().split("\n")
sentences_tokenized = [list(map(int, sentence.split(" ")))
for sentence in sentences]
return sentences_tokenized
def get_surprisals(model: Model, sentences: List[str], backend=None):
"""
Compute word-level surprisals from a language model for the given natural
language sentences. Returns a data frame with a MultiIndex ```(sentence_id,
token_id)`` (both one-indexed) and columns ``token`` and ``surprisal``.
The surprisal of a token :math:`w_i` is the negative logarithm of that
token's probability under a language model's predictive distribution:
.. math::
S(w_i) = -\log_2 p(w_i \mid w_1, w_2, \ldots, w_{i-1})
Note that surprisals are computed on the level of **tokens**, not words.
Models that insert extra tokens (e.g., an end-of-sentence token as above)
or which tokenize on the sub-word level (e.g. GPT2) will not have a
one-to-one mapping between rows of surprisal output from this command and
words.
There is guaranteed to be a one-to-one mapping, however, between the rows
of this file and the tokens produced by ``lm-zoo tokenize``.
"""
in_file = _make_in_stream(sentences)
out = StringIO()
ret = run_model_command(model, "get_surprisals /dev/stdin",
stdin=in_file, stdout=out, backend=backend)
out_value = out.getvalue()
ret = pd.read_csv(StringIO(out_value), sep="\t").set_index(["sentence_id", "token_id"])
return ret
def get_predictions(model: Model, sentences: List[str], backend=None):
"""
Compute token-level predictive distributions from a language model for the
given natural language sentences. Returns a h5py ``File`` object with the
following structure:
/sentence/<i>/predictions: N_tokens_i * N_vocabulary numpy ndarray of
log-probabilities (rows are log-probability distributions)
/sentence/<i>/tokens: sequence of integer token IDs corresponding to
indices in ``/vocabulary``
/vocabulary: byte-encoded ndarray of vocabulary items (decode with
``numpy.char.decode(vocabulary, "utf-8")``)
Args:
model: lm-zoo model reference
sentences: list of natural language sentence strings (not pre
tokenized)
"""
in_file = _make_in_stream(sentences)
with NamedTemporaryFile("rb") as hdf5_out:
# Bind mount as hdf5 output
host_path = Path(hdf5_out.name).resolve()
guest_path = "/predictions_out"
mount = (host_path, guest_path, "rw")
result = run_model_command(model, f"get_predictions.hdf5 /dev/stdin {guest_path}",
mounts=[mount],
stdin=in_file,
backend=backend)
ret = h5py.File(host_path, "r")
return ret
def run_model_command(model: Model, command_str,
backend=None, pull=False, mounts=None,
stdin=None, stdout=sys.stdout, stderr=sys.stderr,
progress_stream=sys.stderr,
raise_errors=True):
"""
Run the given shell command inside a container instantiating the given
model.
Args:
backend: Backend platform on which to execute the model. May be any of
the string keys of `lm_zoo.backends.BACKEND_DICT`, or a `Backend`
class.
mounts: List of bind mounts described as tuples `(guest_path,
host_path, mode)`, where `mode` is one of ``ro``, ``rw``
raise_errors: If ``True``, monitor command status/output and raise
errors when necessary.
Returns:
Docker API response as a Python dictionary. The key ``StatusCode`` may
be of interest.
"""
if mounts is None:
mounts = []
preferred_backends = [] if backend is None else [get_backend(backend)]
backend = get_compatible_backend(model, preferred_backends=preferred_backends)
if preferred_backends and backend.__class__ != preferred_backends[0]:
L.warn("Requested backend %s is not compatible with model %s; using %s instead",
preferred_backends[0].__name__, model, backend.__class__.__name__)
image_available = backend.image_exists(model)
if pull or not image_available:
backend.pull_image(model, progress_stream=progress_stream)
return backend.run_command(model, command_str, mounts=mounts,
stdin=stdin, stdout=stdout, stderr=stderr,
raise_errors=raise_errors)
def run_model_command_get_stdout(*args, **kwargs):
stdout = StringIO()
kwargs["stdout"] = stdout
run_model_command(*args, **kwargs)
return stdout.getvalue()
|
# Implemented in Python to support keyword arguments
def open(stream, *, flags=0, cachesize=0, pagesize=0, minkeypage=0):
return _open(stream, flags, cachesize, pagesize, minkeypage)
|
/* eslint-disable quotes, max-len */
export default [
{
weight: 0,
input: true,
label: 'Advanced Logic',
key: 'logic',
templates: {
header: '<div class="row"> \n <div class="col-sm-6">\n <strong>{{ value.length }} Advanced Logic Configured</strong>\n </div>\n</div>',
row: '<div class="row"> \n <div class="col-sm-6">\n <div>{{ row.name }} </div>\n </div>\n <div class="col-sm-2"> \n <div class="btn-group pull-right"> \n <div class="btn btn-default editRow">Edit</div> \n <div class="btn btn-danger removeRow">Delete</div> \n </div> \n </div> \n</div>',
footer: ''
},
type: 'editgrid',
addAnother: 'Add Logic',
saveRow: 'Save Logic',
components: [
{
weight: 0,
input: true,
inputType: 'text',
label: 'Logic Name',
key: 'name',
validate: {
required: true,
},
type: 'textfield'
},
{
weight: 10,
key: 'triggerPanel',
input: false,
title: 'Trigger',
tableView: false,
components: [
{
weight: 0,
input: true,
tableView: false,
components: [
{
weight: 0,
input: true,
label: 'Type',
key: 'type',
tableView: false,
data: {
values: [
{
value: 'simple',
label: 'Simple'
},
{
value: 'javascript',
label: 'Javascript'
},
{
value: 'json',
label: 'JSON Logic'
},
{
value: 'event',
label: 'Event'
}
],
},
dataSrc: 'values',
template: '<span>{{ item.label }}</span>',
type: 'select',
},
{
weight: 10,
label: '',
key: 'simple',
type: 'container',
tableView: false,
customConditional(context) {
return context.row.type === 'simple';
},
components: [
{
input: true,
key: 'show',
label: 'Show',
type: 'hidden',
tableView: false,
defaultValue: true
},
{
type: 'select',
input: true,
label: 'When the form component:',
key: 'when',
dataSrc: 'custom',
valueProperty: 'value',
tableView: false,
data: {
custom(context) {
var values = [];
context.utils.eachComponent(context.instance.root.editForm.components, function(component, path) {
if (component.key !== context.data.key) {
values.push({
label: component.label || component.key,
value: path
});
}
});
return values;
}
}
},
{
type: 'textfield',
input: true,
label: 'Has the value:',
key: 'eq',
tableView: false
}
]
},
{
weight: 10,
type: 'textarea',
key: 'javascript',
rows: 5,
editor: 'ace',
input: true,
tableView: false,
placeholder: `result = (data['mykey'] > 1);`,
description: '"row", "data", and "component" variables are available. Return "result".',
customConditional(context) {
return context.row.type === 'javascript';
}
},
{
weight: 10,
type: 'textarea',
key: 'json',
rows: 5,
editor: 'ace',
label: 'JSON Logic',
as: 'json',
input: true,
tableView: false,
placeholder: `{ ... }`,
description: '"row", "data", "component" and "_" variables are available. Return the result to be passed to the action if truthy.',
customConditional(context) {
return context.row.type === 'json';
}
},
{
weight: 10,
type: 'textfield',
key: 'event',
label: 'Event Name',
placeholder: 'event',
description: 'The event that will trigger this logic. You can trigger events externally or via a button.',
tableView: false,
customConditional(context) {
return context.row.type === 'event';
}
}
],
key: 'trigger',
type: 'container',
}
],
type: 'panel',
},
{
weight: 20,
input: true,
label: 'Actions',
key: 'actions',
tableView: false,
templates: {
header: '<div class="row"> \n <div class="col-sm-6"><strong>{{ value.length }} actions</strong></div>\n</div>',
row: '<div class="row"> \n <div class="col-sm-6">\n <div>{{ row.name }} </div>\n </div>\n <div class="col-sm-2"> \n <div class="btn-group pull-right"> \n <div class="btn btn-default editRow">Edit</div> \n <div class="btn btn-danger removeRow">Delete</div> \n </div> \n </div> \n</div>',
footer: ''
},
type: 'editgrid',
addAnother: 'Add Action',
saveRow: 'Save Action',
components: [
{
weight: 0,
title: 'Action',
input: false,
key: 'actionPanel',
type: 'panel',
components: [
{
weight: 0,
input: true,
inputType: 'text',
label: 'Action Name',
key: 'name',
validate: {
required: true,
},
type: 'textfield',
},
{
weight: 10,
input: true,
label: 'Type',
key: 'type',
data: {
values: [
{
value: 'property',
label: 'Property'
},
{
value: 'value',
label: 'Value'
},
{
label: 'Validation',
value: 'validation'
}
],
},
dataSrc: 'values',
template: '<span>{{ item.label }}</span>',
type: 'select',
},
{
weight: 20,
type: 'select',
template: '<span>{{ item.label }}</span>',
dataSrc: 'json',
tableView: false,
data: {
json: [
{
label: 'Hidden',
value: 'hidden',
type: 'boolean'
},
{
label: 'Required',
value: 'validate.required',
type: 'boolean'
},
{
label: 'Disabled',
value: 'disabled',
type: 'boolean'
},
{
label: 'Label',
value: 'label',
type: 'string'
},
{
label: 'Title',
value: 'title',
type: 'string'
},
{
label: 'Tooltip',
value: 'tooltip',
type: 'string'
},
{
label: 'Description',
value: 'description',
type: 'string'
},
{
label: 'Placeholder',
value: 'placeholder',
type: 'string'
},
{
label: 'CSS Class',
value: 'className',
type: 'string'
},
{
label: 'Container Custom Class',
value: 'customClass',
type: 'string'
}
],
values: []
},
key: 'property',
label: 'Component Property',
input: true,
customConditional(context) {
return context.row.type === 'property';
}
},
{
weight: 30,
input: true,
label: 'Set State',
key: 'state',
tableView: false,
data: {
values: [
{
label: 'True',
value: 'true'
},
{
label: 'False',
value: 'false'
}
],
},
dataSrc: 'values',
template: '<span>{{ item.label }}</span>',
type: 'select',
customConditional(context) {
return context.row.type === 'property' &&
context.row.hasOwnProperty('property') &&
context.row.property.type === 'boolean';
}
},
{
weight: 30,
type: 'textfield',
key: 'text',
label: 'Text',
inputType: 'text',
input: true,
tableView: false,
description: 'Can use templating with {{ data.myfield }}. "data", "row", "component" and "result" variables are available.',
customConditional(context) {
return context.row.type === 'property' &&
context.row.hasOwnProperty('property') &&
context.row.property.type === 'string' &&
!context.row.property.component;
}
},
{
weight: 20,
input: true,
label: 'Value (Javascript)',
key: 'value',
editor: 'ace',
rows: 5,
placeholder: `value = data.myfield;`,
type: 'textarea',
tableView: false,
description: '"row", "data", "component", and "result" variables are available. Return the value.',
customConditional(context) {
return context.row.type === 'value';
}
}
],
}
],
}
]
}
];
/* eslint-enable quotes, max-len */
|
# Import classes for input/output channels
from yggdrasil.interface.YggInterface import (
YggPandasInput, YggPandasOutput)
# Initialize input/output channels
in_channel = YggPandasInput('inputB')
out_channel = YggPandasOutput('outputB')
# Loop until there is no longer input or the queues are closed
while True:
# Receive input from input channel
# If there is an error, the flag will be False
flag, frame = in_channel.recv()
if not flag:
print("Model B: No more input.")
break
# Print received message
nrows = len(frame.index)
print('Model B: (%d rows)' % nrows)
print(frame)
# Send output to output channel
# If there is an error, the flag will be False
flag = out_channel.send(frame)
if not flag:
raise RuntimeError("Model B: Error sending output.")
|
def api_data():
return [
{
"name": "capture",
"endpoints": [
{"name": "create", "resp": "JCaptureCreate", "err": "JErr", "args": [
"str name",
"str visibility",
"str timezone",
"long start_ts",
"long finish_ts",
"list<str> device_uids",
"list<str> radius_uids"
]},
{"name": "exists", "resp": "JCaptureExists", "err": "JErr", "args": [ "str capture_uid" ]},
]
},
]
|
import yaml
import textwrap
import sxml.cli
import json
import pytest
from pathlib import Path
SIMPLE_CONFIG = r'''
$chain:
- $apply: html.loads
- $apply: sxml.find
attrs:
- name: title
query: h1
$chain:
- $apply: html.dumps
'''
def test_simple(tmp_path):
config_path = tmp_path / 'simple_config.yaml'
config_path.write_text(SIMPLE_CONFIG)
inp_path = tmp_path / 'inp.html'
inp_path.write_text('<h1>hello</h1>')
out_path = tmp_path / 'out.html'
args = sxml.cli.parse_args([
'-p', str(config_path),
'-i', str(inp_path),
'-o', str(out_path),
'-e', json.dumps({'url': 'https://example.com'}),
])
sxml.cli.main(args)
assert yaml.safe_load(out_path.read_text()) == {
'title': 'hello',
}
# sxml -p ./examples/wordnet.yaml -i ./examples/wordnet.html -o ./examples/wordnet.out.yaml -e '{"url": "http://wordnetweb.princeton.edu/perl/webwn?s=royal"}' -SUN 4
# sxml -p ./examples/python_releases.yaml -i ./examples/python_releases.html -o ./examples/python_releases.out.yaml -e '{"url": "https://www.python.org/downloads/source/"}' -SUN 4
# sxml -p ./examples/pypi.yaml -i ./examples/pypi.html -o ./examples/pypi.out.yaml -e '{"url": "https://pypi.org/project/lxml/"}' -SUN 4
EXAMPLES_OPTIONS = {
'python_releases': [
'-SUN', '4',
'-e', json.dumps({"url": "https://www.python.org/downloads/source/"}),
],
'wordnet': [
'-SUN', '4',
'-e', json.dumps({"url": "http://wordnetweb.princeton.edu/perl/webwn?s=royal"}),
],
'pypi': [
'-SUN', '4',
'-e', json.dumps({"url": "https://pypi.org/project/lxml/"}),
],
}
@pytest.mark.parametrize("name", [
'python_releases',
'wordnet',
'pypi',
])
def test_examples(name, tmp_path):
examples_path = Path(__file__).parent.parent / 'examples'
config_path = examples_path / f'{name}.yaml'
inp_path = examples_path / f'{name}.html'
out_path = tmp_path / 'out.html'
args = sxml.cli.parse_args([
'-p', str(config_path),
'-i', str(inp_path),
'-o', str(out_path),
*EXAMPLES_OPTIONS[name],
])
sxml.cli.main(args)
assert out_path.read_text() == (examples_path / f'{name}.out.yaml').read_text()
|
/* @flow */
// flow 注释必须在第一行
import config from 'core/config'
import { warn, cached } from 'core/util/index'
import { mark, measure } from 'core/util/perf'
import Vue from './runtime/index'
import { query } from './util/index'
import { compileToFunctions } from './compiler/index'
import {
shouldDecodeNewlines,
shouldDecodeNewlinesForHref,
} from './util/compat'
// 闭包 缓存 cache对象
const idToTemplate = cached((id) => {
// 查找 Element 找不到 返回 div dom
const el = query(id)
return el && el.innerHTML
})
const mount = Vue.prototype.$mount
// 重写 $mount 方法
Vue.prototype.$mount = function (
el?: string | Element,
hydrating?: boolean
): Component {
el = el && query(el)
/* istanbul ignore if */
// 不能挂载到 body html 节点
if (el === document.body || el === document.documentElement) {
process.env.NODE_ENV !== 'production' &&
warn(
'Do not mount Vue to <html> or <body> - mount to normal elements instead.'
)
return this
}
const options = this.$options
// resolve template/el and convert to render function
// 如果不存在 render 方法 , 将 template 转化成 render方法
// 如果同时存在template, 优先执行 render 方法
if (!options.render) {
let template = options.template
// 获取template的内容,如果以#开头,查找的dom,找不到创建一个div dom
if (template) {
if (typeof template === 'string') {
if (template.charAt(0) === '#') {
// 闭包,将 template dom 缓存
template = idToTemplate(template)
/* istanbul ignore if */
if (process.env.NODE_ENV !== 'production' && !template) {
warn(
`Template element not found or is empty: ${options.template}`,
this
)
}
}
} else if (template.nodeType) {// template 是一个真实的dom
template = template.innerHTML
} else {
if (process.env.NODE_ENV !== 'production') {
warn('invalid template option:' + template, this)
}
// this
return this
}
} else if (el) {
template = getOuterHTML(el)
}
if (template) {
/* istanbul ignore if */
if (process.env.NODE_ENV !== 'production' && config.performance && mark) {
mark('compile')
}
// staticRenderFns 是一个数组
const { render, staticRenderFns } = compileToFunctions(
template,
{
outputSourceRange: process.env.NODE_ENV !== 'production',
shouldDecodeNewlines,
shouldDecodeNewlinesForHref,
delimiters: options.delimiters,
comments: options.comments,
},
this
)
// render 、 staticRenderFns 挂载 到 $options 中
options.render = render
options.staticRenderFns = staticRenderFns
/* istanbul ignore if */
if (process.env.NODE_ENV !== 'production' && config.performance && mark) {
mark('compile end')
measure(`vue ${this._name} compile`, 'compile', 'compile end')
}
}
}
return mount.call(this, el, hydrating)
}
/**
* Get outerHTML of elements, taking care
* of SVG elements in IE as well.
*/
function getOuterHTML(el: Element): string {
if (el.outerHTML) {
return el.outerHTML
} else {
const container = document.createElement('div')
container.appendChild(el.cloneNode(true))
return container.innerHTML
}
}
Vue.compile = compileToFunctions
export default Vue
|
// import models
const Product = require('./Product');
const Category = require('./Category');
const Tag = require('./Tag');
const ProductTag = require('./ProductTag');
// Products belongsTo Category
Product.belongsTo(Category, {
foreignKey: 'category_id',
onDelete: 'set null'
});
// Categories have many Products
Category.hasMany(Product, {
foreignKey: 'category_id'
});
// Products belongToMany Tags (through ProductTag)
Product.belongsToMany(Tag, {
through: ProductTag,
as: 'product_tags',
foreignKey: 'product_id'
});
// Tags belongToMany Products (through ProductTag)
Tag.belongsToMany(Product, {
through: ProductTag,
as: 'tagged_products',
foreignKey: 'tag_id'
});
module.exports = {
Product,
Category,
Tag,
ProductTag,
};
|
import React from 'react';
import { render } from 'react-dom';
import App from './App.jsx';
import Styles from './styles.scss'
render(
<div className="app">
<App/>
</div>,
document.getElementById('root')
)
|
"""
Copyright 2013 Steven Diamond
This file is part of CVXPY.
CVXPY is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
CVXPY is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with CVXPY. If not, see <http://www.gnu.org/licenses/>.
"""
from cvxpy.expressions.expression import Expression
from cvxpy.atoms.sum_largest import sum_largest
def sum_smallest(x, k):
"""Sum of the smallest k values.
"""
x = Expression.cast_to_const(x)
return -sum_largest(-x, k)
|
# encoding: utf-8
import os, getpass
import os.path as osp
import argparse
from easydict import EasyDict as edict
from dataset.data_settings import load_dataset
from cvpack.utils.pyt_utils import ensure_dir
class Config:
# -------- Directoy Config -------- #
DATA_DIR = '/media/xuchengjun/datasets/CMU/refine_json_file'
# -------- Data Config -------- #
DATALOADER = edict()
DATALOADER.NUM_WORKERS = 8
DATALOADER.ASPECT_RATIO_GROUPING = False
DATALOADER.SIZE_DIVISIBILITY = 0
DATASET = edict()
DATASET.ROOT_IDX = 2 # pelvis
DATASET.MAX_PEOPLE = 20
# -------- Model Config -------- #
MODEL = edict()
MODEL.DEVICE = 'cuda'
MODEL.GPU_IDS = [0,1,2]
# -------- Training Config -------- #
SOLVER = edict()
SOLVER.BASE_LR = 0.002
SOLVER.BATCH_SIZE = 1024
SOLVER.NUM_EPOCHS = 250
SOLVER.LR_STEP_SIZE = 30
SOLVER.GAMMA = 0.5
SOLVER.DROP_STEP = [100, 150, 200, 250, 300, 350, 400, 425, 450, 475, 500, 525, 550, 575, 600]
# --------- Checkpoint Config -------- #
PRINT_FREQ = 1
CHECK_FREQ = 200
SAVE_FREQ = 1
SAVE_PATH = '/media/xuchengjun/zx/human_pose/pth/main/1.4'
CHECK_PATH = '/media/xuchengjun/zx/human_pose/pth/main/1.4/refine.pth'
PRETRAINED_PATH = '/home/xuchengjun/ZXin/smap/pretrained/RefineNet.pth'
# --------- Testing Config ----------- #
TEST = edict()
TEST.BATCH_SIZE = 1
cfg = Config()
def link_log_dir():
if not osp.exists('./log'):
ensure_dir(cfg.OUTPUT_DIR)
cmd = 'ln -s ' + cfg.OUTPUT_DIR + ' log'
os.system(cmd)
def make_parser():
parser = argparse.ArgumentParser()
parser.add_argument(
'-log', '--linklog', default=False, action='store_true')
return parser
if __name__ == '__main__':
parser = make_parser()
args = parser.parse_args()
if args.linklog:
link_log_dir()
|
from datacite import DataCiteMDSClient, schema42
# If you want to generate XML for earlier versions, you need to use either the
# schema31, schema40 or schema41 instead.
data = {
'identifiers': [{
'identifierType': 'DOI',
'identifier': '10.1234/foo.bar',
}],
'creators': [
{'name': 'Smith, John'},
],
'titles': [
{'title': 'Minimal Test Case', }
],
'publisher': 'Invenio Software',
'publicationYear': '2015',
'types': {
'resourceType': 'Dataset',
'resourceTypeGeneral': 'Dataset'
},
'schemaVersion': 'http://datacite.org/schema/kernel-4',
}
# Validate dictionary
assert schema42.validate(data)
# Generate DataCite XML from dictionary.
doc = schema42.tostring(data)
# Initialize the MDS client.
d = DataCiteMDSClient(
username='MYDC.MYACCOUNT',
password='mypassword',
prefix='10.1234',
test_mode=True,
)
# Set metadata for DOI
d.metadata_post(doc)
# Mint new DOI
d.doi_post('10.1234/test-doi', 'http://example.org/test-doi')
# Get DOI location
location = d.doi_get("10.1234/test-doi")
# Set alternate URL for content type (available through content negotiation)
d.media_post(
"10.1234/test-doi",
{"application/json": "http://example.org/test-doi/json/",
"application/xml": "http://example.org/test-doi/xml/"}
)
# Get alternate URLs
mapping = d.media_get("10.1234/test-doi")
assert mapping["application/json"] == "http://example.org/test-doi/json/"
# Get metadata for DOI
doc = d.metadata_get("10.1234/test-doi")
# Make DOI inactive
d.metadata_delete("10.1234/test-doi")
|
// Prefixes should be globs (i.e. of the form "/*" or "/foo/*")
const validatePrefixEntry = prefix => {
if (!prefix.match(/^\//) || !prefix.match(/\/\*$/)) {
throw Error(
`Plugin "gatsby-plugin-client-only-paths" found invalid prefix pattern: ${prefix}`
)
}
}
exports.onCreatePage = ({ page, store, actions }, { prefixes }) => {
const { createPage } = actions
const re = {}
prefixes.forEach(validatePrefixEntry)
return new Promise(resolve => {
// Don't set matchPath again if it's already been set.
if (page.matchPath || page.path.match(/dev-404-page/)) {
resolve()
}
prefixes.some(prefix => {
if (!re[prefix]) {
// Remove the * from the prefix and memoize
const trimmedPrefix = prefix.replace(/\*$/, ``)
re[prefix] = new RegExp(`^${trimmedPrefix}`)
}
// Ensure that the path ends in a trailing slash, since it can be removed.
const path = page.path.match(/\/$/) ? page.path : `${page.path}/`
if (path.match(re[prefix])) {
page.matchPath = prefix.replace(/\*$/, `*`)
createPage(page)
return true
}
return false
})
return resolve()
})
}
|
/*
* All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
* its licensors.
*
* For complete copyright and license terms please see the LICENSE at the root of this
* distribution (the "License"). All use of this software is governed by the License,
* or, if provided, by the license below or the license accompanying this file. Do not
* remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
*/
#pragma once
#include <AzCore/Memory/SystemAllocator.h>
#include <AzCore/RTTI/RTTI.h>
#include <AzCore/std/containers/unordered_map.h>
#include <AzCore/std/string/string.h>
#include <AzFramework/Asset/AssetSystemBus.h>
#include <AzToolsFramework/AssetBrowser/AssetBrowserFilterModel.h>
#include <GraphCanvas/Widgets/NodePalette/TreeItems/NodePaletteTreeItem.h>
#include <GraphCanvas/Widgets/GraphCanvasTreeCategorizer.h>
#include <ScriptEvents/ScriptEventsAsset.h>
#include <Editor/View/Widgets/NodePalette/NodePaletteModelBus.h>
#include <ScriptCanvas/Asset/Functions/ScriptCanvasFunctionAsset.h>
#include <ScriptCanvas/Core/Core.h>
namespace ScriptCanvasEditor
{
// Move these down into GraphCanvas for more general re-use
struct NodePaletteModelInformation
{
AZ_RTTI(NodeModelInformation, "{CC031806-7610-4C29-909D-9527F265E014}");
AZ_CLASS_ALLOCATOR(NodePaletteModelInformation, AZ::SystemAllocator, 0);
virtual ~NodePaletteModelInformation() = default;
void PopulateTreeItem(GraphCanvas::NodePaletteTreeItem& treeItem) const;
ScriptCanvas::NodeTypeIdentifier m_nodeIdentifier;
AZStd::string m_displayName;
AZStd::string m_toolTip;
AZStd::string m_categoryPath;
AZStd::string m_styleOverride;
AZStd::string m_titlePaletteOverride;
};
struct CategoryInformation
{
AZStd::string m_styleOverride;
AZStd::string m_paletteOverride = GraphCanvas::NodePaletteTreeItem::DefaultNodeTitlePalette;
AZStd::string m_tooltip;
};
class NodePaletteModel
: public GraphCanvas::CategorizerInterface
{
public:
typedef AZStd::unordered_map< ScriptCanvas::NodeTypeIdentifier, NodePaletteModelInformation* > NodePaletteRegistry;
AZ_CLASS_ALLOCATOR(NodePaletteModel, AZ::SystemAllocator, 0);
NodePaletteModel();
~NodePaletteModel();
NodePaletteId GetNotificationId() const;
void AssignAssetModel(AzToolsFramework::AssetBrowser::AssetBrowserFilterModel* assetModel);
void RepopulateModel();
void RegisterCustomNode(AZStd::string_view categoryPath, const AZ::Uuid& uuid, AZStd::string_view name, const AZ::SerializeContext::ClassData* classData);
void RegisterClassNode(const AZStd::string& categoryPath, const AZStd::string& methodClass, const AZStd::string& methodName, AZ::BehaviorMethod* behaviorMethod, AZ::BehaviorContext* behaviorContext);
void RegisterEBusHandlerNodeModelInformation(AZStd::string_view categoryPath, AZStd::string_view busName, AZStd::string_view eventName, const ScriptCanvas::EBusBusId& busId, const AZ::BehaviorEBusHandler::BusForwarderEvent& forwardEvent);
void RegisterEBusSenderNodeModelInformation(AZStd::string_view categoryPath, AZStd::string_view busName, AZStd::string_view eventName, const ScriptCanvas::EBusBusId& busId, const ScriptCanvas::EBusEventId& eventId, const AZ::BehaviorEBusEventSender& eventSender);
// Asset Based Registrations
AZStd::vector<ScriptCanvas::NodeTypeIdentifier> RegisterScriptEvent(ScriptEvents::ScriptEventsAsset* scriptEventAsset);
AZStd::vector<ScriptCanvas::NodeTypeIdentifier> RegisterFunctionInformation(ScriptCanvas::ScriptCanvasFunctionAsset* functionAsset);
void RegisterCategoryInformation(const AZStd::string& category, const CategoryInformation& categoryInformation);
const CategoryInformation* FindCategoryInformation(const AZStd::string& categoryStyle) const;
const CategoryInformation* FindBestCategoryInformation(AZStd::string_view categoryView) const;
const NodePaletteModelInformation* FindNodePaletteInformation(const ScriptCanvas::NodeTypeIdentifier& nodeTypeIdentifier) const;
const NodePaletteRegistry& GetNodeRegistry() const;
// GraphCanvas::CategorizerInterface
GraphCanvas::GraphCanvasTreeItem* CreateCategoryNode(AZStd::string_view categoryPath, AZStd::string_view categoryName, GraphCanvas::GraphCanvasTreeItem* treeItem) const override;
////
// Asset Node Support
void OnRowsInserted(const QModelIndex& parentIndex, int first, int last);
void OnRowsAboutToBeRemoved(const QModelIndex& parentIndex, int first, int last);
void TraverseTree(QModelIndex index = QModelIndex());
////
private:
AZStd::vector<ScriptCanvas::NodeTypeIdentifier> ProcessAsset(AzToolsFramework::AssetBrowser::AssetBrowserEntry* entry);
void RemoveAsset(const AZ::Data::AssetId& assetId);
void ClearRegistry();
AzToolsFramework::AssetBrowser::AssetBrowserFilterModel* m_assetModel = nullptr;
AZStd::vector< QMetaObject::Connection > m_lambdaConnections;
AZStd::unordered_map< AZStd::string, CategoryInformation > m_categoryInformation;
NodePaletteRegistry m_registeredNodes;
AZStd::unordered_multimap<AZ::Data::AssetId, ScriptCanvas::NodeTypeIdentifier> m_assetMapping;
NodePaletteId m_paletteId;
};
// Concrete Sub Classes with whatever extra data is required [ScriptCanvas Only]
struct CustomNodeModelInformation
: public NodePaletteModelInformation
{
AZ_RTTI(CustomNodeModelInformation, "{481FB8AE-8683-4E50-95C1-B4B1C1B6806C}", NodePaletteModelInformation);
AZ_CLASS_ALLOCATOR(CustomNodeModelInformation, AZ::SystemAllocator, 0);
AZ::Uuid m_typeId;
};
struct MethodNodeModelInformation
: public NodePaletteModelInformation
{
AZ_RTTI(CustomNodeModelInformation, "{9B6337F9-B8D0-4B63-9EE7-91079FE386B9}", NodePaletteModelInformation);
AZ_CLASS_ALLOCATOR(CustomNodeModelInformation, AZ::SystemAllocator, 0);
AZStd::string m_classMethod;
AZStd::string m_metehodName;
};
struct EBusHandlerNodeModelInformation
: public NodePaletteModelInformation
{
AZ_RTTI(EBusNodeModelInformation, "{D1438D14-0CE9-4202-A1C5-9F5F13DFC0C4}", NodePaletteModelInformation);
AZ_CLASS_ALLOCATOR(EBusHandlerNodeModelInformation, AZ::SystemAllocator, 0);
AZStd::string m_busName;
AZStd::string m_eventName;
ScriptCanvas::EBusBusId m_busId;
ScriptCanvas::EBusEventId m_eventId;
};
struct EBusSenderNodeModelInformation
: public NodePaletteModelInformation
{
AZ_RTTI(EBusSenderNodeModelInformation, "{EE0F0385-3596-4D4E-9DC7-BE147EBB3C15}", NodePaletteModelInformation);
AZ_CLASS_ALLOCATOR(EBusHandlerNodeModelInformation, AZ::SystemAllocator, 0);
AZStd::string m_busName;
AZStd::string m_eventName;
ScriptCanvas::EBusBusId m_busId;
ScriptCanvas::EBusEventId m_eventId;
};
struct ScriptEventHandlerNodeModelInformation
: public EBusHandlerNodeModelInformation
{
AZ_RTTI(ScriptEventHandlerNodeModelInformation, "{BCA92869-63F4-4A1F-B751-F3F28443BBFC}", EBusHandlerNodeModelInformation);
AZ_CLASS_ALLOCATOR(ScriptEventHandlerNodeModelInformation, AZ::SystemAllocator, 0);
};
struct ScriptEventSenderNodeModelInformation
: public EBusSenderNodeModelInformation
{
AZ_RTTI(ScriptEventSenderNodeModelInformation, "{99046345-080C-42A6-BE76-D09583055EED}", EBusSenderNodeModelInformation);
AZ_CLASS_ALLOCATOR(ScriptEventSenderNodeModelInformation, AZ::SystemAllocator, 0);
};
//! FunctionNodeModelInformation refers to function graph assets, not methods
struct FunctionNodeModelInformation
: public NodePaletteModelInformation
{
AZ_RTTI(FunctionNodeModelInformation, "{B84B4C2C-2F0B-4C0B-879A-956E83BD2874}", NodePaletteModelInformation);
AZ_CLASS_ALLOCATOR(FunctionNodeModelInformation, AZ::SystemAllocator, 0);
AZ::Color m_functionColor;
AZ::Data::AssetId m_functionAssetId;
};
}
|
// Generated with `xb buildshaders`.
#if 0
; SPIR-V
; Version: 1.0
; Generator: Khronos Glslang Reference Front End; 10
; Bound: 25179
; Schema: 0
OpCapability Shader
%1 = OpExtInstImport "GLSL.std.450"
OpMemoryModel Logical GLSL450
OpEntryPoint GLCompute %5663 "main" %gl_GlobalInvocationID
OpExecutionMode %5663 LocalSize 2 32 1
OpMemberDecorate %_struct_1161 0 Offset 0
OpMemberDecorate %_struct_1161 1 Offset 4
OpMemberDecorate %_struct_1161 2 Offset 8
OpMemberDecorate %_struct_1161 3 Offset 12
OpMemberDecorate %_struct_1161 4 Offset 16
OpMemberDecorate %_struct_1161 5 Offset 28
OpMemberDecorate %_struct_1161 6 Offset 32
OpMemberDecorate %_struct_1161 7 Offset 36
OpDecorate %_struct_1161 Block
OpDecorate %5245 DescriptorSet 2
OpDecorate %5245 Binding 0
OpDecorate %gl_GlobalInvocationID BuiltIn GlobalInvocationId
OpDecorate %_runtimearr_v4uint ArrayStride 16
OpMemberDecorate %_struct_1972 0 NonReadable
OpMemberDecorate %_struct_1972 0 Offset 0
OpDecorate %_struct_1972 BufferBlock
OpDecorate %5134 DescriptorSet 0
OpDecorate %5134 Binding 0
OpDecorate %_runtimearr_v4uint_0 ArrayStride 16
OpMemberDecorate %_struct_1973 0 NonWritable
OpMemberDecorate %_struct_1973 0 Offset 0
OpDecorate %_struct_1973 BufferBlock
OpDecorate %4218 DescriptorSet 1
OpDecorate %4218 Binding 0
OpDecorate %gl_WorkGroupSize BuiltIn WorkgroupSize
%void = OpTypeVoid
%1282 = OpTypeFunction %void
%uint = OpTypeInt 32 0
%v4uint = OpTypeVector %uint 4
%int = OpTypeInt 32 1
%v2int = OpTypeVector %int 2
%v3int = OpTypeVector %int 3
%bool = OpTypeBool
%v3uint = OpTypeVector %uint 3
%v2uint = OpTypeVector %uint 2
%uint_1 = OpConstant %uint 1
%uint_16711935 = OpConstant %uint 16711935
%uint_8 = OpConstant %uint 8
%uint_4278255360 = OpConstant %uint 4278255360
%uint_0 = OpConstant %uint 0
%int_5 = OpConstant %int 5
%uint_5 = OpConstant %uint 5
%uint_7 = OpConstant %uint 7
%int_7 = OpConstant %int 7
%int_14 = OpConstant %int 14
%int_2 = OpConstant %int 2
%int_n16 = OpConstant %int -16
%int_1 = OpConstant %int 1
%int_15 = OpConstant %int 15
%int_4 = OpConstant %int 4
%int_n512 = OpConstant %int -512
%int_3 = OpConstant %int 3
%int_16 = OpConstant %int 16
%int_448 = OpConstant %int 448
%int_8 = OpConstant %int 8
%int_6 = OpConstant %int 6
%int_63 = OpConstant %int 63
%uint_2 = OpConstant %uint 2
%uint_4 = OpConstant %uint 4
%uint_6 = OpConstant %uint 6
%int_268435455 = OpConstant %int 268435455
%int_n2 = OpConstant %int -2
%uint_3 = OpConstant %uint 3
%uint_32 = OpConstant %uint 32
%uint_64 = OpConstant %uint 64
%_struct_1161 = OpTypeStruct %uint %uint %uint %uint %v3uint %uint %uint %uint
%_ptr_Uniform__struct_1161 = OpTypePointer Uniform %_struct_1161
%5245 = OpVariable %_ptr_Uniform__struct_1161 Uniform
%int_0 = OpConstant %int 0
%_ptr_Uniform_uint = OpTypePointer Uniform %uint
%1915 = OpConstantComposite %v2uint %uint_4 %uint_6
%_ptr_Uniform_v3uint = OpTypePointer Uniform %v3uint
%_ptr_Input_v3uint = OpTypePointer Input %v3uint
%gl_GlobalInvocationID = OpVariable %_ptr_Input_v3uint Input
%2612 = OpConstantComposite %v3uint %uint_4 %uint_0 %uint_0
%v2bool = OpTypeVector %bool 2
%_runtimearr_v4uint = OpTypeRuntimeArray %v4uint
%_struct_1972 = OpTypeStruct %_runtimearr_v4uint
%_ptr_Uniform__struct_1972 = OpTypePointer Uniform %_struct_1972
%5134 = OpVariable %_ptr_Uniform__struct_1972 Uniform
%_runtimearr_v4uint_0 = OpTypeRuntimeArray %v4uint
%_struct_1973 = OpTypeStruct %_runtimearr_v4uint_0
%_ptr_Uniform__struct_1973 = OpTypePointer Uniform %_struct_1973
%4218 = OpVariable %_ptr_Uniform__struct_1973 Uniform
%_ptr_Uniform_v4uint = OpTypePointer Uniform %v4uint
%gl_WorkGroupSize = OpConstantComposite %v3uint %uint_2 %uint_32 %uint_1
%1870 = OpConstantComposite %v2uint %uint_3 %uint_3
%2510 = OpConstantComposite %v4uint %uint_16711935 %uint_16711935 %uint_16711935 %uint_16711935
%317 = OpConstantComposite %v4uint %uint_8 %uint_8 %uint_8 %uint_8
%1838 = OpConstantComposite %v4uint %uint_4278255360 %uint_4278255360 %uint_4278255360 %uint_4278255360
%uint_16 = OpConstant %uint 16
%5663 = OpFunction %void None %1282
%15110 = OpLabel
OpSelectionMerge %19578 None
OpSwitch %uint_0 %15137
%15137 = OpLabel
%12591 = OpLoad %v3uint %gl_GlobalInvocationID
%10229 = OpShiftLeftLogical %v3uint %12591 %2612
%25178 = OpAccessChain %_ptr_Uniform_v3uint %5245 %int_4
%22965 = OpLoad %v3uint %25178
%18835 = OpVectorShuffle %v2uint %10229 %10229 0 1
%6626 = OpVectorShuffle %v2uint %22965 %22965 0 1
%17032 = OpUGreaterThanEqual %v2bool %18835 %6626
%24679 = OpAny %bool %17032
OpSelectionMerge %6282 DontFlatten
OpBranchConditional %24679 %21992 %6282
%21992 = OpLabel
OpBranch %19578
%6282 = OpLabel
%6795 = OpBitcast %v3int %10229
%18792 = OpAccessChain %_ptr_Uniform_uint %5245 %int_6
%9788 = OpLoad %uint %18792
%20376 = OpCompositeExtract %uint %22965 1
%14692 = OpCompositeExtract %int %6795 0
%22810 = OpIMul %int %14692 %int_2
%6362 = OpCompositeExtract %int %6795 2
%14505 = OpBitcast %int %20376
%11279 = OpIMul %int %6362 %14505
%17598 = OpCompositeExtract %int %6795 1
%22228 = OpIAdd %int %11279 %17598
%22405 = OpBitcast %int %9788
%24535 = OpIMul %int %22228 %22405
%7061 = OpIAdd %int %22810 %24535
%19270 = OpBitcast %uint %7061
%19460 = OpAccessChain %_ptr_Uniform_uint %5245 %int_5
%22875 = OpLoad %uint %19460
%8517 = OpIAdd %uint %19270 %22875
%21670 = OpShiftRightLogical %uint %8517 %uint_4
%18404 = OpAccessChain %_ptr_Uniform_uint %5245 %int_1
%23432 = OpLoad %uint %18404
%22700 = OpAccessChain %_ptr_Uniform_uint %5245 %int_0
%20387 = OpLoad %uint %22700
%22279 = OpBitwiseAnd %uint %20387 %uint_2
%19223 = OpINotEqual %bool %22279 %uint_0
%17247 = OpCompositeConstruct %v2uint %20387 %20387
%22947 = OpShiftRightLogical %v2uint %17247 %1915
%6551 = OpBitwiseAnd %v2uint %22947 %1870
%18732 = OpAccessChain %_ptr_Uniform_uint %5245 %int_2
%24236 = OpLoad %uint %18732
%20458 = OpAccessChain %_ptr_Uniform_uint %5245 %int_3
%22167 = OpLoad %uint %20458
%18929 = OpCompositeExtract %uint %10229 0
%6638 = OpShiftRightLogical %uint %18929 %uint_3
%9988 = OpCompositeExtract %uint %10229 1
%23563 = OpCompositeConstruct %v2uint %6638 %9988
%8041 = OpUDiv %v2uint %23563 %6551
%13932 = OpCompositeExtract %uint %8041 0
%19789 = OpShiftLeftLogical %uint %13932 %uint_3
%20905 = OpCompositeExtract %uint %8041 1
%23022 = OpCompositeExtract %uint %10229 2
%9417 = OpCompositeConstruct %v3uint %19789 %20905 %23022
OpSelectionMerge %21313 DontFlatten
OpBranchConditional %19223 %21373 %11737
%21373 = OpLabel
%10608 = OpBitcast %v3int %9417
%17090 = OpCompositeExtract %int %10608 1
%9469 = OpShiftRightArithmetic %int %17090 %int_4
%10055 = OpCompositeExtract %int %10608 2
%16476 = OpShiftRightArithmetic %int %10055 %int_2
%23373 = OpShiftRightLogical %uint %22167 %uint_4
%6314 = OpBitcast %int %23373
%21281 = OpIMul %int %16476 %6314
%15143 = OpIAdd %int %9469 %21281
%9032 = OpShiftRightLogical %uint %24236 %uint_5
%12427 = OpBitcast %int %9032
%10360 = OpIMul %int %15143 %12427
%25154 = OpCompositeExtract %int %10608 0
%20423 = OpShiftRightArithmetic %int %25154 %int_5
%18940 = OpIAdd %int %20423 %10360
%8797 = OpShiftLeftLogical %int %18940 %uint_7
%11510 = OpBitwiseAnd %int %8797 %int_268435455
%18938 = OpShiftLeftLogical %int %11510 %int_1
%19768 = OpBitwiseAnd %int %25154 %int_7
%12600 = OpBitwiseAnd %int %17090 %int_6
%17741 = OpShiftLeftLogical %int %12600 %int_2
%17227 = OpIAdd %int %19768 %17741
%7048 = OpShiftLeftLogical %int %17227 %uint_7
%24035 = OpShiftRightArithmetic %int %7048 %int_6
%8725 = OpShiftRightArithmetic %int %17090 %int_3
%13731 = OpIAdd %int %8725 %16476
%23052 = OpBitwiseAnd %int %13731 %int_1
%16658 = OpShiftRightArithmetic %int %25154 %int_3
%18794 = OpShiftLeftLogical %int %23052 %int_1
%13501 = OpIAdd %int %16658 %18794
%19165 = OpBitwiseAnd %int %13501 %int_3
%21578 = OpShiftLeftLogical %int %19165 %int_1
%15435 = OpIAdd %int %23052 %21578
%13150 = OpBitwiseAnd %int %24035 %int_n16
%20336 = OpIAdd %int %18938 %13150
%23345 = OpShiftLeftLogical %int %20336 %int_1
%23274 = OpBitwiseAnd %int %24035 %int_15
%10332 = OpIAdd %int %23345 %23274
%18356 = OpBitwiseAnd %int %10055 %int_3
%21579 = OpShiftLeftLogical %int %18356 %uint_7
%16727 = OpIAdd %int %10332 %21579
%19166 = OpBitwiseAnd %int %17090 %int_1
%21580 = OpShiftLeftLogical %int %19166 %int_4
%16728 = OpIAdd %int %16727 %21580
%20438 = OpBitwiseAnd %int %15435 %int_1
%9987 = OpShiftLeftLogical %int %20438 %int_3
%13106 = OpShiftRightArithmetic %int %16728 %int_6
%14038 = OpBitwiseAnd %int %13106 %int_7
%13330 = OpIAdd %int %9987 %14038
%23346 = OpShiftLeftLogical %int %13330 %int_3
%23217 = OpBitwiseAnd %int %15435 %int_n2
%10908 = OpIAdd %int %23346 %23217
%23347 = OpShiftLeftLogical %int %10908 %int_2
%23218 = OpBitwiseAnd %int %16728 %int_n512
%10909 = OpIAdd %int %23347 %23218
%23348 = OpShiftLeftLogical %int %10909 %int_3
%21849 = OpBitwiseAnd %int %16728 %int_63
%24314 = OpIAdd %int %23348 %21849
%22127 = OpBitcast %uint %24314
OpBranch %21313
%11737 = OpLabel
%9761 = OpVectorShuffle %v2uint %9417 %9417 0 1
%22991 = OpBitcast %v2int %9761
%6403 = OpCompositeExtract %int %22991 0
%9470 = OpShiftRightArithmetic %int %6403 %int_5
%10056 = OpCompositeExtract %int %22991 1
%16477 = OpShiftRightArithmetic %int %10056 %int_5
%23374 = OpShiftRightLogical %uint %24236 %uint_5
%6315 = OpBitcast %int %23374
%21319 = OpIMul %int %16477 %6315
%16222 = OpIAdd %int %9470 %21319
%19086 = OpShiftLeftLogical %int %16222 %uint_8
%10934 = OpBitwiseAnd %int %6403 %int_7
%12601 = OpBitwiseAnd %int %10056 %int_14
%17742 = OpShiftLeftLogical %int %12601 %int_2
%17303 = OpIAdd %int %10934 %17742
%6375 = OpShiftLeftLogical %int %17303 %uint_1
%10161 = OpBitwiseAnd %int %6375 %int_n16
%12150 = OpShiftLeftLogical %int %10161 %int_1
%15436 = OpIAdd %int %19086 %12150
%13207 = OpBitwiseAnd %int %6375 %int_15
%19760 = OpIAdd %int %15436 %13207
%18357 = OpBitwiseAnd %int %10056 %int_1
%21581 = OpShiftLeftLogical %int %18357 %int_4
%16729 = OpIAdd %int %19760 %21581
%20514 = OpBitwiseAnd %int %16729 %int_n512
%9238 = OpShiftLeftLogical %int %20514 %int_3
%18995 = OpBitwiseAnd %int %10056 %int_16
%12151 = OpShiftLeftLogical %int %18995 %int_7
%16730 = OpIAdd %int %9238 %12151
%19167 = OpBitwiseAnd %int %16729 %int_448
%21582 = OpShiftLeftLogical %int %19167 %int_2
%16708 = OpIAdd %int %16730 %21582
%20611 = OpBitwiseAnd %int %10056 %int_8
%16831 = OpShiftRightArithmetic %int %20611 %int_2
%7916 = OpShiftRightArithmetic %int %6403 %int_3
%13750 = OpIAdd %int %16831 %7916
%21587 = OpBitwiseAnd %int %13750 %int_3
%21583 = OpShiftLeftLogical %int %21587 %int_6
%15437 = OpIAdd %int %16708 %21583
%11782 = OpBitwiseAnd %int %16729 %int_63
%14671 = OpIAdd %int %15437 %11782
%22128 = OpBitcast %uint %14671
OpBranch %21313
%21313 = OpLabel
%9468 = OpPhi %uint %22127 %21373 %22128 %11737
%16296 = OpIMul %v2uint %8041 %6551
%15292 = OpISub %v2uint %23563 %16296
%7303 = OpCompositeExtract %uint %6551 0
%22882 = OpCompositeExtract %uint %6551 1
%13170 = OpIMul %uint %7303 %22882
%15520 = OpIMul %uint %9468 %13170
%16084 = OpCompositeExtract %uint %15292 0
%15890 = OpIMul %uint %16084 %22882
%6886 = OpCompositeExtract %uint %15292 1
%11045 = OpIAdd %uint %15890 %6886
%24733 = OpShiftLeftLogical %uint %11045 %uint_3
%23219 = OpBitwiseAnd %uint %18929 %uint_7
%9559 = OpIAdd %uint %24733 %23219
%16557 = OpShiftLeftLogical %uint %9559 %uint_1
%20138 = OpIAdd %uint %15520 %16557
%17724 = OpIAdd %uint %23432 %20138
%14040 = OpShiftRightLogical %uint %17724 %uint_4
%11766 = OpShiftRightLogical %uint %20387 %uint_2
%8394 = OpBitwiseAnd %uint %11766 %uint_3
%20727 = OpAccessChain %_ptr_Uniform_v4uint %4218 %int_0 %14040
%9605 = OpLoad %v4uint %20727
%21106 = OpIEqual %bool %8394 %uint_1
OpSelectionMerge %11416 None
OpBranchConditional %21106 %10583 %11416
%10583 = OpLabel
%18271 = OpBitwiseAnd %v4uint %9605 %2510
%9425 = OpShiftLeftLogical %v4uint %18271 %317
%20652 = OpBitwiseAnd %v4uint %9605 %1838
%17549 = OpShiftRightLogical %v4uint %20652 %317
%16376 = OpBitwiseOr %v4uint %9425 %17549
OpBranch %11416
%11416 = OpLabel
%19767 = OpPhi %v4uint %9605 %21313 %16376 %10583
%24825 = OpAccessChain %_ptr_Uniform_v4uint %5134 %int_0 %21670
OpStore %24825 %19767
%15532 = OpIAdd %uint %21670 %int_1
%6417 = OpUGreaterThan %bool %7303 %uint_1
OpSelectionMerge %24764 DontFlatten
OpBranchConditional %6417 %20612 %20628
%20612 = OpLabel
%13975 = OpUDiv %uint %6638 %7303
%9086 = OpIMul %uint %13975 %7303
%12657 = OpISub %uint %6638 %9086
%9511 = OpIAdd %uint %12657 %uint_1
%13375 = OpIEqual %bool %9511 %7303
OpSelectionMerge %7917 None
OpBranchConditional %13375 %22174 %8593
%22174 = OpLabel
%19289 = OpIMul %uint %uint_64 %7303
%21519 = OpShiftLeftLogical %uint %12657 %uint_4
%18756 = OpISub %uint %19289 %21519
OpBranch %7917
%8593 = OpLabel
OpBranch %7917
%7917 = OpLabel
%10540 = OpPhi %uint %18756 %22174 %uint_16 %8593
OpBranch %24764
%20628 = OpLabel
OpBranch %24764
%24764 = OpLabel
%10684 = OpPhi %uint %10540 %7917 %uint_64 %20628
%18731 = OpIMul %uint %10684 %22882
%16493 = OpShiftRightLogical %uint %18731 %uint_4
%13163 = OpIAdd %uint %14040 %16493
%22298 = OpAccessChain %_ptr_Uniform_v4uint %4218 %int_0 %13163
%6578 = OpLoad %v4uint %22298
OpSelectionMerge %11417 None
OpBranchConditional %21106 %10584 %11417
%10584 = OpLabel
%18272 = OpBitwiseAnd %v4uint %6578 %2510
%9426 = OpShiftLeftLogical %v4uint %18272 %317
%20653 = OpBitwiseAnd %v4uint %6578 %1838
%17550 = OpShiftRightLogical %v4uint %20653 %317
%16377 = OpBitwiseOr %v4uint %9426 %17550
OpBranch %11417
%11417 = OpLabel
%19769 = OpPhi %v4uint %6578 %24764 %16377 %10584
%8053 = OpAccessChain %_ptr_Uniform_v4uint %5134 %int_0 %15532
OpStore %8053 %19769
OpBranch %19578
%19578 = OpLabel
OpReturn
OpFunctionEnd
#endif
const uint32_t texture_load_16bpb_scaled_cs[] = {
0x07230203, 0x00010000, 0x0008000A, 0x0000625B, 0x00000000, 0x00020011,
0x00000001, 0x0006000B, 0x00000001, 0x4C534C47, 0x6474732E, 0x3035342E,
0x00000000, 0x0003000E, 0x00000000, 0x00000001, 0x0006000F, 0x00000005,
0x0000161F, 0x6E69616D, 0x00000000, 0x00000F48, 0x00060010, 0x0000161F,
0x00000011, 0x00000002, 0x00000020, 0x00000001, 0x00050048, 0x00000489,
0x00000000, 0x00000023, 0x00000000, 0x00050048, 0x00000489, 0x00000001,
0x00000023, 0x00000004, 0x00050048, 0x00000489, 0x00000002, 0x00000023,
0x00000008, 0x00050048, 0x00000489, 0x00000003, 0x00000023, 0x0000000C,
0x00050048, 0x00000489, 0x00000004, 0x00000023, 0x00000010, 0x00050048,
0x00000489, 0x00000005, 0x00000023, 0x0000001C, 0x00050048, 0x00000489,
0x00000006, 0x00000023, 0x00000020, 0x00050048, 0x00000489, 0x00000007,
0x00000023, 0x00000024, 0x00030047, 0x00000489, 0x00000002, 0x00040047,
0x0000147D, 0x00000022, 0x00000002, 0x00040047, 0x0000147D, 0x00000021,
0x00000000, 0x00040047, 0x00000F48, 0x0000000B, 0x0000001C, 0x00040047,
0x000007DC, 0x00000006, 0x00000010, 0x00040048, 0x000007B4, 0x00000000,
0x00000019, 0x00050048, 0x000007B4, 0x00000000, 0x00000023, 0x00000000,
0x00030047, 0x000007B4, 0x00000003, 0x00040047, 0x0000140E, 0x00000022,
0x00000000, 0x00040047, 0x0000140E, 0x00000021, 0x00000000, 0x00040047,
0x000007DD, 0x00000006, 0x00000010, 0x00040048, 0x000007B5, 0x00000000,
0x00000018, 0x00050048, 0x000007B5, 0x00000000, 0x00000023, 0x00000000,
0x00030047, 0x000007B5, 0x00000003, 0x00040047, 0x0000107A, 0x00000022,
0x00000001, 0x00040047, 0x0000107A, 0x00000021, 0x00000000, 0x00040047,
0x00000BB1, 0x0000000B, 0x00000019, 0x00020013, 0x00000008, 0x00030021,
0x00000502, 0x00000008, 0x00040015, 0x0000000B, 0x00000020, 0x00000000,
0x00040017, 0x00000017, 0x0000000B, 0x00000004, 0x00040015, 0x0000000C,
0x00000020, 0x00000001, 0x00040017, 0x00000012, 0x0000000C, 0x00000002,
0x00040017, 0x00000016, 0x0000000C, 0x00000003, 0x00020014, 0x00000009,
0x00040017, 0x00000014, 0x0000000B, 0x00000003, 0x00040017, 0x00000011,
0x0000000B, 0x00000002, 0x0004002B, 0x0000000B, 0x00000A0D, 0x00000001,
0x0004002B, 0x0000000B, 0x000008A6, 0x00FF00FF, 0x0004002B, 0x0000000B,
0x00000A22, 0x00000008, 0x0004002B, 0x0000000B, 0x000005FD, 0xFF00FF00,
0x0004002B, 0x0000000B, 0x00000A0A, 0x00000000, 0x0004002B, 0x0000000C,
0x00000A1A, 0x00000005, 0x0004002B, 0x0000000B, 0x00000A19, 0x00000005,
0x0004002B, 0x0000000B, 0x00000A1F, 0x00000007, 0x0004002B, 0x0000000C,
0x00000A20, 0x00000007, 0x0004002B, 0x0000000C, 0x00000A35, 0x0000000E,
0x0004002B, 0x0000000C, 0x00000A11, 0x00000002, 0x0004002B, 0x0000000C,
0x000009DB, 0xFFFFFFF0, 0x0004002B, 0x0000000C, 0x00000A0E, 0x00000001,
0x0004002B, 0x0000000C, 0x00000A38, 0x0000000F, 0x0004002B, 0x0000000C,
0x00000A17, 0x00000004, 0x0004002B, 0x0000000C, 0x0000040B, 0xFFFFFE00,
0x0004002B, 0x0000000C, 0x00000A14, 0x00000003, 0x0004002B, 0x0000000C,
0x00000A3B, 0x00000010, 0x0004002B, 0x0000000C, 0x00000388, 0x000001C0,
0x0004002B, 0x0000000C, 0x00000A23, 0x00000008, 0x0004002B, 0x0000000C,
0x00000A1D, 0x00000006, 0x0004002B, 0x0000000C, 0x00000AC8, 0x0000003F,
0x0004002B, 0x0000000B, 0x00000A10, 0x00000002, 0x0004002B, 0x0000000B,
0x00000A16, 0x00000004, 0x0004002B, 0x0000000B, 0x00000A1C, 0x00000006,
0x0004002B, 0x0000000C, 0x0000078B, 0x0FFFFFFF, 0x0004002B, 0x0000000C,
0x00000A05, 0xFFFFFFFE, 0x0004002B, 0x0000000B, 0x00000A13, 0x00000003,
0x0004002B, 0x0000000B, 0x00000A6A, 0x00000020, 0x0004002B, 0x0000000B,
0x00000ACA, 0x00000040, 0x000A001E, 0x00000489, 0x0000000B, 0x0000000B,
0x0000000B, 0x0000000B, 0x00000014, 0x0000000B, 0x0000000B, 0x0000000B,
0x00040020, 0x00000706, 0x00000002, 0x00000489, 0x0004003B, 0x00000706,
0x0000147D, 0x00000002, 0x0004002B, 0x0000000C, 0x00000A0B, 0x00000000,
0x00040020, 0x00000288, 0x00000002, 0x0000000B, 0x0005002C, 0x00000011,
0x0000077B, 0x00000A16, 0x00000A1C, 0x00040020, 0x00000291, 0x00000002,
0x00000014, 0x00040020, 0x00000292, 0x00000001, 0x00000014, 0x0004003B,
0x00000292, 0x00000F48, 0x00000001, 0x0006002C, 0x00000014, 0x00000A34,
0x00000A16, 0x00000A0A, 0x00000A0A, 0x00040017, 0x0000000F, 0x00000009,
0x00000002, 0x0003001D, 0x000007DC, 0x00000017, 0x0003001E, 0x000007B4,
0x000007DC, 0x00040020, 0x00000A31, 0x00000002, 0x000007B4, 0x0004003B,
0x00000A31, 0x0000140E, 0x00000002, 0x0003001D, 0x000007DD, 0x00000017,
0x0003001E, 0x000007B5, 0x000007DD, 0x00040020, 0x00000A32, 0x00000002,
0x000007B5, 0x0004003B, 0x00000A32, 0x0000107A, 0x00000002, 0x00040020,
0x00000294, 0x00000002, 0x00000017, 0x0006002C, 0x00000014, 0x00000BB1,
0x00000A10, 0x00000A6A, 0x00000A0D, 0x0005002C, 0x00000011, 0x0000074E,
0x00000A13, 0x00000A13, 0x0007002C, 0x00000017, 0x000009CE, 0x000008A6,
0x000008A6, 0x000008A6, 0x000008A6, 0x0007002C, 0x00000017, 0x0000013D,
0x00000A22, 0x00000A22, 0x00000A22, 0x00000A22, 0x0007002C, 0x00000017,
0x0000072E, 0x000005FD, 0x000005FD, 0x000005FD, 0x000005FD, 0x0004002B,
0x0000000B, 0x00000A3A, 0x00000010, 0x00050036, 0x00000008, 0x0000161F,
0x00000000, 0x00000502, 0x000200F8, 0x00003B06, 0x000300F7, 0x00004C7A,
0x00000000, 0x000300FB, 0x00000A0A, 0x00003B21, 0x000200F8, 0x00003B21,
0x0004003D, 0x00000014, 0x0000312F, 0x00000F48, 0x000500C4, 0x00000014,
0x000027F5, 0x0000312F, 0x00000A34, 0x00050041, 0x00000291, 0x0000625A,
0x0000147D, 0x00000A17, 0x0004003D, 0x00000014, 0x000059B5, 0x0000625A,
0x0007004F, 0x00000011, 0x00004993, 0x000027F5, 0x000027F5, 0x00000000,
0x00000001, 0x0007004F, 0x00000011, 0x000019E2, 0x000059B5, 0x000059B5,
0x00000000, 0x00000001, 0x000500AE, 0x0000000F, 0x00004288, 0x00004993,
0x000019E2, 0x0004009A, 0x00000009, 0x00006067, 0x00004288, 0x000300F7,
0x0000188A, 0x00000002, 0x000400FA, 0x00006067, 0x000055E8, 0x0000188A,
0x000200F8, 0x000055E8, 0x000200F9, 0x00004C7A, 0x000200F8, 0x0000188A,
0x0004007C, 0x00000016, 0x00001A8B, 0x000027F5, 0x00050041, 0x00000288,
0x00004968, 0x0000147D, 0x00000A1D, 0x0004003D, 0x0000000B, 0x0000263C,
0x00004968, 0x00050051, 0x0000000B, 0x00004F98, 0x000059B5, 0x00000001,
0x00050051, 0x0000000C, 0x00003964, 0x00001A8B, 0x00000000, 0x00050084,
0x0000000C, 0x0000591A, 0x00003964, 0x00000A11, 0x00050051, 0x0000000C,
0x000018DA, 0x00001A8B, 0x00000002, 0x0004007C, 0x0000000C, 0x000038A9,
0x00004F98, 0x00050084, 0x0000000C, 0x00002C0F, 0x000018DA, 0x000038A9,
0x00050051, 0x0000000C, 0x000044BE, 0x00001A8B, 0x00000001, 0x00050080,
0x0000000C, 0x000056D4, 0x00002C0F, 0x000044BE, 0x0004007C, 0x0000000C,
0x00005785, 0x0000263C, 0x00050084, 0x0000000C, 0x00005FD7, 0x000056D4,
0x00005785, 0x00050080, 0x0000000C, 0x00001B95, 0x0000591A, 0x00005FD7,
0x0004007C, 0x0000000B, 0x00004B46, 0x00001B95, 0x00050041, 0x00000288,
0x00004C04, 0x0000147D, 0x00000A1A, 0x0004003D, 0x0000000B, 0x0000595B,
0x00004C04, 0x00050080, 0x0000000B, 0x00002145, 0x00004B46, 0x0000595B,
0x000500C2, 0x0000000B, 0x000054A6, 0x00002145, 0x00000A16, 0x00050041,
0x00000288, 0x000047E4, 0x0000147D, 0x00000A0E, 0x0004003D, 0x0000000B,
0x00005B88, 0x000047E4, 0x00050041, 0x00000288, 0x000058AC, 0x0000147D,
0x00000A0B, 0x0004003D, 0x0000000B, 0x00004FA3, 0x000058AC, 0x000500C7,
0x0000000B, 0x00005707, 0x00004FA3, 0x00000A10, 0x000500AB, 0x00000009,
0x00004B17, 0x00005707, 0x00000A0A, 0x00050050, 0x00000011, 0x0000435F,
0x00004FA3, 0x00004FA3, 0x000500C2, 0x00000011, 0x000059A3, 0x0000435F,
0x0000077B, 0x000500C7, 0x00000011, 0x00001997, 0x000059A3, 0x0000074E,
0x00050041, 0x00000288, 0x0000492C, 0x0000147D, 0x00000A11, 0x0004003D,
0x0000000B, 0x00005EAC, 0x0000492C, 0x00050041, 0x00000288, 0x00004FEA,
0x0000147D, 0x00000A14, 0x0004003D, 0x0000000B, 0x00005697, 0x00004FEA,
0x00050051, 0x0000000B, 0x000049F1, 0x000027F5, 0x00000000, 0x000500C2,
0x0000000B, 0x000019EE, 0x000049F1, 0x00000A13, 0x00050051, 0x0000000B,
0x00002704, 0x000027F5, 0x00000001, 0x00050050, 0x00000011, 0x00005C0B,
0x000019EE, 0x00002704, 0x00050086, 0x00000011, 0x00001F69, 0x00005C0B,
0x00001997, 0x00050051, 0x0000000B, 0x0000366C, 0x00001F69, 0x00000000,
0x000500C4, 0x0000000B, 0x00004D4D, 0x0000366C, 0x00000A13, 0x00050051,
0x0000000B, 0x000051A9, 0x00001F69, 0x00000001, 0x00050051, 0x0000000B,
0x000059EE, 0x000027F5, 0x00000002, 0x00060050, 0x00000014, 0x000024C9,
0x00004D4D, 0x000051A9, 0x000059EE, 0x000300F7, 0x00005341, 0x00000002,
0x000400FA, 0x00004B17, 0x0000537D, 0x00002DD9, 0x000200F8, 0x0000537D,
0x0004007C, 0x00000016, 0x00002970, 0x000024C9, 0x00050051, 0x0000000C,
0x000042C2, 0x00002970, 0x00000001, 0x000500C3, 0x0000000C, 0x000024FD,
0x000042C2, 0x00000A17, 0x00050051, 0x0000000C, 0x00002747, 0x00002970,
0x00000002, 0x000500C3, 0x0000000C, 0x0000405C, 0x00002747, 0x00000A11,
0x000500C2, 0x0000000B, 0x00005B4D, 0x00005697, 0x00000A16, 0x0004007C,
0x0000000C, 0x000018AA, 0x00005B4D, 0x00050084, 0x0000000C, 0x00005321,
0x0000405C, 0x000018AA, 0x00050080, 0x0000000C, 0x00003B27, 0x000024FD,
0x00005321, 0x000500C2, 0x0000000B, 0x00002348, 0x00005EAC, 0x00000A19,
0x0004007C, 0x0000000C, 0x0000308B, 0x00002348, 0x00050084, 0x0000000C,
0x00002878, 0x00003B27, 0x0000308B, 0x00050051, 0x0000000C, 0x00006242,
0x00002970, 0x00000000, 0x000500C3, 0x0000000C, 0x00004FC7, 0x00006242,
0x00000A1A, 0x00050080, 0x0000000C, 0x000049FC, 0x00004FC7, 0x00002878,
0x000500C4, 0x0000000C, 0x0000225D, 0x000049FC, 0x00000A1F, 0x000500C7,
0x0000000C, 0x00002CF6, 0x0000225D, 0x0000078B, 0x000500C4, 0x0000000C,
0x000049FA, 0x00002CF6, 0x00000A0E, 0x000500C7, 0x0000000C, 0x00004D38,
0x00006242, 0x00000A20, 0x000500C7, 0x0000000C, 0x00003138, 0x000042C2,
0x00000A1D, 0x000500C4, 0x0000000C, 0x0000454D, 0x00003138, 0x00000A11,
0x00050080, 0x0000000C, 0x0000434B, 0x00004D38, 0x0000454D, 0x000500C4,
0x0000000C, 0x00001B88, 0x0000434B, 0x00000A1F, 0x000500C3, 0x0000000C,
0x00005DE3, 0x00001B88, 0x00000A1D, 0x000500C3, 0x0000000C, 0x00002215,
0x000042C2, 0x00000A14, 0x00050080, 0x0000000C, 0x000035A3, 0x00002215,
0x0000405C, 0x000500C7, 0x0000000C, 0x00005A0C, 0x000035A3, 0x00000A0E,
0x000500C3, 0x0000000C, 0x00004112, 0x00006242, 0x00000A14, 0x000500C4,
0x0000000C, 0x0000496A, 0x00005A0C, 0x00000A0E, 0x00050080, 0x0000000C,
0x000034BD, 0x00004112, 0x0000496A, 0x000500C7, 0x0000000C, 0x00004ADD,
0x000034BD, 0x00000A14, 0x000500C4, 0x0000000C, 0x0000544A, 0x00004ADD,
0x00000A0E, 0x00050080, 0x0000000C, 0x00003C4B, 0x00005A0C, 0x0000544A,
0x000500C7, 0x0000000C, 0x0000335E, 0x00005DE3, 0x000009DB, 0x00050080,
0x0000000C, 0x00004F70, 0x000049FA, 0x0000335E, 0x000500C4, 0x0000000C,
0x00005B31, 0x00004F70, 0x00000A0E, 0x000500C7, 0x0000000C, 0x00005AEA,
0x00005DE3, 0x00000A38, 0x00050080, 0x0000000C, 0x0000285C, 0x00005B31,
0x00005AEA, 0x000500C7, 0x0000000C, 0x000047B4, 0x00002747, 0x00000A14,
0x000500C4, 0x0000000C, 0x0000544B, 0x000047B4, 0x00000A1F, 0x00050080,
0x0000000C, 0x00004157, 0x0000285C, 0x0000544B, 0x000500C7, 0x0000000C,
0x00004ADE, 0x000042C2, 0x00000A0E, 0x000500C4, 0x0000000C, 0x0000544C,
0x00004ADE, 0x00000A17, 0x00050080, 0x0000000C, 0x00004158, 0x00004157,
0x0000544C, 0x000500C7, 0x0000000C, 0x00004FD6, 0x00003C4B, 0x00000A0E,
0x000500C4, 0x0000000C, 0x00002703, 0x00004FD6, 0x00000A14, 0x000500C3,
0x0000000C, 0x00003332, 0x00004158, 0x00000A1D, 0x000500C7, 0x0000000C,
0x000036D6, 0x00003332, 0x00000A20, 0x00050080, 0x0000000C, 0x00003412,
0x00002703, 0x000036D6, 0x000500C4, 0x0000000C, 0x00005B32, 0x00003412,
0x00000A14, 0x000500C7, 0x0000000C, 0x00005AB1, 0x00003C4B, 0x00000A05,
0x00050080, 0x0000000C, 0x00002A9C, 0x00005B32, 0x00005AB1, 0x000500C4,
0x0000000C, 0x00005B33, 0x00002A9C, 0x00000A11, 0x000500C7, 0x0000000C,
0x00005AB2, 0x00004158, 0x0000040B, 0x00050080, 0x0000000C, 0x00002A9D,
0x00005B33, 0x00005AB2, 0x000500C4, 0x0000000C, 0x00005B34, 0x00002A9D,
0x00000A14, 0x000500C7, 0x0000000C, 0x00005559, 0x00004158, 0x00000AC8,
0x00050080, 0x0000000C, 0x00005EFA, 0x00005B34, 0x00005559, 0x0004007C,
0x0000000B, 0x0000566F, 0x00005EFA, 0x000200F9, 0x00005341, 0x000200F8,
0x00002DD9, 0x0007004F, 0x00000011, 0x00002621, 0x000024C9, 0x000024C9,
0x00000000, 0x00000001, 0x0004007C, 0x00000012, 0x000059CF, 0x00002621,
0x00050051, 0x0000000C, 0x00001903, 0x000059CF, 0x00000000, 0x000500C3,
0x0000000C, 0x000024FE, 0x00001903, 0x00000A1A, 0x00050051, 0x0000000C,
0x00002748, 0x000059CF, 0x00000001, 0x000500C3, 0x0000000C, 0x0000405D,
0x00002748, 0x00000A1A, 0x000500C2, 0x0000000B, 0x00005B4E, 0x00005EAC,
0x00000A19, 0x0004007C, 0x0000000C, 0x000018AB, 0x00005B4E, 0x00050084,
0x0000000C, 0x00005347, 0x0000405D, 0x000018AB, 0x00050080, 0x0000000C,
0x00003F5E, 0x000024FE, 0x00005347, 0x000500C4, 0x0000000C, 0x00004A8E,
0x00003F5E, 0x00000A22, 0x000500C7, 0x0000000C, 0x00002AB6, 0x00001903,
0x00000A20, 0x000500C7, 0x0000000C, 0x00003139, 0x00002748, 0x00000A35,
0x000500C4, 0x0000000C, 0x0000454E, 0x00003139, 0x00000A11, 0x00050080,
0x0000000C, 0x00004397, 0x00002AB6, 0x0000454E, 0x000500C4, 0x0000000C,
0x000018E7, 0x00004397, 0x00000A0D, 0x000500C7, 0x0000000C, 0x000027B1,
0x000018E7, 0x000009DB, 0x000500C4, 0x0000000C, 0x00002F76, 0x000027B1,
0x00000A0E, 0x00050080, 0x0000000C, 0x00003C4C, 0x00004A8E, 0x00002F76,
0x000500C7, 0x0000000C, 0x00003397, 0x000018E7, 0x00000A38, 0x00050080,
0x0000000C, 0x00004D30, 0x00003C4C, 0x00003397, 0x000500C7, 0x0000000C,
0x000047B5, 0x00002748, 0x00000A0E, 0x000500C4, 0x0000000C, 0x0000544D,
0x000047B5, 0x00000A17, 0x00050080, 0x0000000C, 0x00004159, 0x00004D30,
0x0000544D, 0x000500C7, 0x0000000C, 0x00005022, 0x00004159, 0x0000040B,
0x000500C4, 0x0000000C, 0x00002416, 0x00005022, 0x00000A14, 0x000500C7,
0x0000000C, 0x00004A33, 0x00002748, 0x00000A3B, 0x000500C4, 0x0000000C,
0x00002F77, 0x00004A33, 0x00000A20, 0x00050080, 0x0000000C, 0x0000415A,
0x00002416, 0x00002F77, 0x000500C7, 0x0000000C, 0x00004ADF, 0x00004159,
0x00000388, 0x000500C4, 0x0000000C, 0x0000544E, 0x00004ADF, 0x00000A11,
0x00050080, 0x0000000C, 0x00004144, 0x0000415A, 0x0000544E, 0x000500C7,
0x0000000C, 0x00005083, 0x00002748, 0x00000A23, 0x000500C3, 0x0000000C,
0x000041BF, 0x00005083, 0x00000A11, 0x000500C3, 0x0000000C, 0x00001EEC,
0x00001903, 0x00000A14, 0x00050080, 0x0000000C, 0x000035B6, 0x000041BF,
0x00001EEC, 0x000500C7, 0x0000000C, 0x00005453, 0x000035B6, 0x00000A14,
0x000500C4, 0x0000000C, 0x0000544F, 0x00005453, 0x00000A1D, 0x00050080,
0x0000000C, 0x00003C4D, 0x00004144, 0x0000544F, 0x000500C7, 0x0000000C,
0x00002E06, 0x00004159, 0x00000AC8, 0x00050080, 0x0000000C, 0x0000394F,
0x00003C4D, 0x00002E06, 0x0004007C, 0x0000000B, 0x00005670, 0x0000394F,
0x000200F9, 0x00005341, 0x000200F8, 0x00005341, 0x000700F5, 0x0000000B,
0x000024FC, 0x0000566F, 0x0000537D, 0x00005670, 0x00002DD9, 0x00050084,
0x00000011, 0x00003FA8, 0x00001F69, 0x00001997, 0x00050082, 0x00000011,
0x00003BBC, 0x00005C0B, 0x00003FA8, 0x00050051, 0x0000000B, 0x00001C87,
0x00001997, 0x00000000, 0x00050051, 0x0000000B, 0x00005962, 0x00001997,
0x00000001, 0x00050084, 0x0000000B, 0x00003372, 0x00001C87, 0x00005962,
0x00050084, 0x0000000B, 0x00003CA0, 0x000024FC, 0x00003372, 0x00050051,
0x0000000B, 0x00003ED4, 0x00003BBC, 0x00000000, 0x00050084, 0x0000000B,
0x00003E12, 0x00003ED4, 0x00005962, 0x00050051, 0x0000000B, 0x00001AE6,
0x00003BBC, 0x00000001, 0x00050080, 0x0000000B, 0x00002B25, 0x00003E12,
0x00001AE6, 0x000500C4, 0x0000000B, 0x0000609D, 0x00002B25, 0x00000A13,
0x000500C7, 0x0000000B, 0x00005AB3, 0x000049F1, 0x00000A1F, 0x00050080,
0x0000000B, 0x00002557, 0x0000609D, 0x00005AB3, 0x000500C4, 0x0000000B,
0x000040AD, 0x00002557, 0x00000A0D, 0x00050080, 0x0000000B, 0x00004EAA,
0x00003CA0, 0x000040AD, 0x00050080, 0x0000000B, 0x0000453C, 0x00005B88,
0x00004EAA, 0x000500C2, 0x0000000B, 0x000036D8, 0x0000453C, 0x00000A16,
0x000500C2, 0x0000000B, 0x00002DF6, 0x00004FA3, 0x00000A10, 0x000500C7,
0x0000000B, 0x000020CA, 0x00002DF6, 0x00000A13, 0x00060041, 0x00000294,
0x000050F7, 0x0000107A, 0x00000A0B, 0x000036D8, 0x0004003D, 0x00000017,
0x00002585, 0x000050F7, 0x000500AA, 0x00000009, 0x00005272, 0x000020CA,
0x00000A0D, 0x000300F7, 0x00002C98, 0x00000000, 0x000400FA, 0x00005272,
0x00002957, 0x00002C98, 0x000200F8, 0x00002957, 0x000500C7, 0x00000017,
0x0000475F, 0x00002585, 0x000009CE, 0x000500C4, 0x00000017, 0x000024D1,
0x0000475F, 0x0000013D, 0x000500C7, 0x00000017, 0x000050AC, 0x00002585,
0x0000072E, 0x000500C2, 0x00000017, 0x0000448D, 0x000050AC, 0x0000013D,
0x000500C5, 0x00000017, 0x00003FF8, 0x000024D1, 0x0000448D, 0x000200F9,
0x00002C98, 0x000200F8, 0x00002C98, 0x000700F5, 0x00000017, 0x00004D37,
0x00002585, 0x00005341, 0x00003FF8, 0x00002957, 0x00060041, 0x00000294,
0x000060F9, 0x0000140E, 0x00000A0B, 0x000054A6, 0x0003003E, 0x000060F9,
0x00004D37, 0x00050080, 0x0000000B, 0x00003CAC, 0x000054A6, 0x00000A0E,
0x000500AC, 0x00000009, 0x00001911, 0x00001C87, 0x00000A0D, 0x000300F7,
0x000060BC, 0x00000002, 0x000400FA, 0x00001911, 0x00005084, 0x00005094,
0x000200F8, 0x00005084, 0x00050086, 0x0000000B, 0x00003697, 0x000019EE,
0x00001C87, 0x00050084, 0x0000000B, 0x0000237E, 0x00003697, 0x00001C87,
0x00050082, 0x0000000B, 0x00003171, 0x000019EE, 0x0000237E, 0x00050080,
0x0000000B, 0x00002527, 0x00003171, 0x00000A0D, 0x000500AA, 0x00000009,
0x0000343F, 0x00002527, 0x00001C87, 0x000300F7, 0x00001EED, 0x00000000,
0x000400FA, 0x0000343F, 0x0000569E, 0x00002191, 0x000200F8, 0x0000569E,
0x00050084, 0x0000000B, 0x00004B59, 0x00000ACA, 0x00001C87, 0x000500C4,
0x0000000B, 0x0000540F, 0x00003171, 0x00000A16, 0x00050082, 0x0000000B,
0x00004944, 0x00004B59, 0x0000540F, 0x000200F9, 0x00001EED, 0x000200F8,
0x00002191, 0x000200F9, 0x00001EED, 0x000200F8, 0x00001EED, 0x000700F5,
0x0000000B, 0x0000292C, 0x00004944, 0x0000569E, 0x00000A3A, 0x00002191,
0x000200F9, 0x000060BC, 0x000200F8, 0x00005094, 0x000200F9, 0x000060BC,
0x000200F8, 0x000060BC, 0x000700F5, 0x0000000B, 0x000029BC, 0x0000292C,
0x00001EED, 0x00000ACA, 0x00005094, 0x00050084, 0x0000000B, 0x0000492B,
0x000029BC, 0x00005962, 0x000500C2, 0x0000000B, 0x0000406D, 0x0000492B,
0x00000A16, 0x00050080, 0x0000000B, 0x0000336B, 0x000036D8, 0x0000406D,
0x00060041, 0x00000294, 0x0000571A, 0x0000107A, 0x00000A0B, 0x0000336B,
0x0004003D, 0x00000017, 0x000019B2, 0x0000571A, 0x000300F7, 0x00002C99,
0x00000000, 0x000400FA, 0x00005272, 0x00002958, 0x00002C99, 0x000200F8,
0x00002958, 0x000500C7, 0x00000017, 0x00004760, 0x000019B2, 0x000009CE,
0x000500C4, 0x00000017, 0x000024D2, 0x00004760, 0x0000013D, 0x000500C7,
0x00000017, 0x000050AD, 0x000019B2, 0x0000072E, 0x000500C2, 0x00000017,
0x0000448E, 0x000050AD, 0x0000013D, 0x000500C5, 0x00000017, 0x00003FF9,
0x000024D2, 0x0000448E, 0x000200F9, 0x00002C99, 0x000200F8, 0x00002C99,
0x000700F5, 0x00000017, 0x00004D39, 0x000019B2, 0x000060BC, 0x00003FF9,
0x00002958, 0x00060041, 0x00000294, 0x00001F75, 0x0000140E, 0x00000A0B,
0x00003CAC, 0x0003003E, 0x00001F75, 0x00004D39, 0x000200F9, 0x00004C7A,
0x000200F8, 0x00004C7A, 0x000100FD, 0x00010038,
};
|
# Copyright (C) 2021-2022 Modin authors
#
# SPDX-License-Identifier: Apache-2.0
|
/*
* Copyright 2018 Jonathan Dieter <jdieter@gmail.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <stdint.h>
#include <stdbool.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <unistd.h>
#include <argp.h>
#include <zck.h>
#include "util_common.h"
static char doc[] = "zck_delta_size - Calculate the difference between"
" two zchunk files";
static char args_doc[] = "<file 1> <file 2>";
static struct argp_option options[] = {
{"verbose", 'v', 0, 0,
"Increase verbosity (can be specified more than once for debugging)"},
{"version", 'V', 0, 0, "Show program version"},
{ 0 }
};
struct arguments {
char *args[2];
zck_log_type log_level;
bool exit;
};
static error_t parse_opt (int key, char *arg, struct argp_state *state) {
struct arguments *arguments = state->input;
if(arguments->exit)
return 0;
switch (key) {
case 'v':
arguments->log_level--;
if(arguments->log_level < ZCK_LOG_DDEBUG)
arguments->log_level = ZCK_LOG_DDEBUG;
break;
case 'V':
version();
arguments->exit = true;
break;
case ARGP_KEY_ARG:
if (state->arg_num >= 2) {
argp_usage (state);
return EINVAL;
}
arguments->args[state->arg_num] = arg;
break;
case ARGP_KEY_END:
if (state->arg_num < 2) {
argp_usage (state);
return EINVAL;
}
break;
default:
return ARGP_ERR_UNKNOWN;
}
return 0;
}
static struct argp argp = {options, parse_opt, args_doc, doc};
int main (int argc, char *argv[]) {
struct arguments arguments = {0};
/* Defaults */
arguments.log_level = ZCK_LOG_ERROR;
int retval = argp_parse(&argp, argc, argv, 0, 0, &arguments);
if(retval || arguments.exit)
exit(retval);
zck_set_log_level(arguments.log_level);
int src_fd = open(arguments.args[0], O_RDONLY | O_BINARY);
if(src_fd < 0) {
LOG_ERROR("Unable to open %s\n", arguments.args[0]);
perror("");
exit(1);
}
zckCtx *zck_src = zck_create();
if(zck_src == NULL) {
LOG_ERROR("%s", zck_get_error(NULL));
zck_clear_error(NULL);
exit(1);
}
if(!zck_init_read(zck_src, src_fd)) {
LOG_ERROR("Error reading %s: %s", arguments.args[0],
zck_get_error(zck_src));
zck_free(&zck_src);
exit(1);
}
close(src_fd);
int tgt_fd = open(arguments.args[1], O_RDONLY | O_BINARY);
if(tgt_fd < 0) {
LOG_ERROR("Unable to open %s\n", arguments.args[1]);
perror("");
zck_free(&zck_src);
exit(1);
}
zckCtx *zck_tgt = zck_create();
if(zck_tgt == NULL) {
LOG_ERROR("%s", zck_get_error(NULL));
zck_clear_error(NULL);
zck_free(&zck_src);
exit(1);
}
if(!zck_init_read(zck_tgt, tgt_fd)) {
LOG_ERROR("Error reading %s: %s", arguments.args[1],
zck_get_error(zck_tgt));
zck_free(&zck_src);
zck_free(&zck_tgt);
exit(1);
}
close(tgt_fd);
if(zck_get_chunk_hash_type(zck_tgt) != zck_get_chunk_hash_type(zck_src)) {
LOG_ERROR("ERROR: Chunk hash types don't match:\n");
LOG_ERROR(" %s: %s\n", arguments.args[0],
zck_hash_name_from_type(zck_get_chunk_hash_type(zck_tgt)));
LOG_ERROR(" %s: %s\n", arguments.args[1],
zck_hash_name_from_type(zck_get_chunk_hash_type(zck_src)));
zck_free(&zck_src);
zck_free(&zck_tgt);
exit(1);
}
zckChunk *tgt_idx = zck_get_first_chunk(zck_tgt);
zckChunk *src_idx = zck_get_first_chunk(zck_src);
if(tgt_idx == NULL || src_idx == NULL)
exit(1);
if(!zck_compare_chunk_digest(tgt_idx, src_idx))
LOG_ERROR("WARNING: Dicts don't match\n");
ssize_t dl_size = zck_get_header_length(zck_tgt);
if(dl_size < 0)
exit(1);
ssize_t total_size = zck_get_header_length(zck_tgt);
ssize_t matched_chunks = 0;
for(tgt_idx = zck_get_first_chunk(zck_tgt); tgt_idx;
tgt_idx = zck_get_next_chunk(tgt_idx)) {
bool found = false;
for(src_idx = zck_get_first_chunk(zck_src); src_idx;
src_idx = zck_get_next_chunk(src_idx)) {
if(zck_compare_chunk_digest(tgt_idx, src_idx)) {
found = true;
break;
}
}
if(!found) {
dl_size += zck_get_chunk_comp_size(tgt_idx);
} else {
matched_chunks += 1;
}
total_size += zck_get_chunk_comp_size(tgt_idx);
}
printf("Would download %li of %li bytes\n", (long)dl_size,
(long)total_size);
printf("Matched %li of %lu chunks\n", (long)matched_chunks,
(long unsigned)zck_get_chunk_count(zck_tgt));
zck_free(&zck_tgt);
zck_free(&zck_src);
}
|
const fs = require('fs-extra')
const path = require('path')
const defaultOptions = {
cleanup: true
}
class LocalFsStorage {
/**
* @param {object} options
* @param {!string|function<...data>} options.publicBasepath
* - basepath relative to public folder
* @param {!string} options.pathToPublic
* - absolute path to public folder
*/
constructor ({ publicBasepath, pathToPublic } = defaultOptions) {
if (!publicBasepath) throw new Error(NO_PUBLIC_BASEPATH_ERROR)
if (!pathToPublic) throw new Error(NO_PATH_TO_PUBLIC_ERROR)
this.publicBasepath = publicBasepath
this.pathToPublic = pathToPublic
}
getBasepath (...data) {
const publicPath =
typeof this.publicBasepath === 'function'
? this.publicBasepath(...data)
: this.publicBasepath
return path.join(this.pathToPublic, publicPath)
}
getPublicPath (absPath) {
const reg = new RegExp(`^${this.pathToPublic}`)
return path.normalize(absPath.replace(reg, '/'))
}
getAbsPath (publicPath) {
return (...data) => {
const basename = path.basename(publicPath)
return path.join(this.getBasepath(...data), basename)
}
}
/**
* @param {string|object} file - local file to store
* @param {any} ...data - will be passed as parameters to constructor's
* options.publicBasepath if it's a function
* @return {Promise<string, Error>} storedPath
*/
async write (filename, ...data) {
const basepath = this.getBasepath(...data)
const basename = path.basename(filename)
const absPath = path.join(basepath, basename)
try {
await fs.ensureDir(basepath)
if (filename !== absPath) {
// copy file to destination
await fs.copy(filename, absPath)
// remove file from tmp location
await fs.remove(filename)
}
return this.getPublicPath(absPath)
} catch (err) {
throw err
}
}
/**
* @param {string|object} publicPath - publicPath to remove
* @param {any} ...data - will be passed as parameters to constructor's
* options.publicBasepath if it's a function
* @return {Promise<, Error>}
*/
async remove (publicPath, ...data) {
try {
let filename = this.getAbsPath(publicPath)(...data)
let files = []
do {
await fs.remove(filename)
filename = path.dirname(filename)
files = await fs.readdir(filename)
} while (!files.length && filename !== this.pathToPublic)
} catch (err) {
// don't throw if file was already removed
if (err.code !== 'ENOENT') {
throw err
}
console.warn(err)
}
}
}
const NO_PUBLIC_BASEPATH_ERROR =
'options.publicBasepath must be provided for LocalFsStorage'
const NO_PATH_TO_PUBLIC_ERROR =
'options.pathToPublic must be provided for LocalFsStorage'
if (process.env.NODE_ENV === 'test') {
Object.assign(LocalFsStorage, {
NO_PUBLIC_BASEPATH_ERROR,
NO_PATH_TO_PUBLIC_ERROR
})
}
module.exports = LocalFsStorage
|
# Imports
from random import uniform, randint
class Number:
def __init__(self, type_n='random', max_n=9999, min_n=-9999):
self.type = 'Number'
self.type_n = type_n # integer | float | random
self.max_n = float(max_n)
self.min_n = float(min_n)
def generate(self):
if self.max_n < self.min_n:
self.max_n, self.min_n = self.min_n, self.max_n
n = uniform(self.min_n, self.max_n)
if self.type_n == 'integer' or (self.type_n == 'random' and randint(0, 10) % 2):
n = int(n)
return str(n)
def config(self, current=None):
if current == None:
num_type = 'integer'
max_n = '9999'
min_n = '-9999'
else:
num_type = current.num_type
max_n = current.max_n
min_n = current.min_n
while True:
print('\nEdit the configuration of the column (type: Number):')
print('''
[1] Number type: ''' + num_type + '''
[2] Maximum number: ''' + max_n + '''
[3] Minimum number: ''' + min_n + '''
[0] Save column configuration and exit''')
selection = input('Select an option: ')
while selection == '' or not selection.isdigit():
selection = input('Select an option: ')
if selection == '0':
break
elif selection == '1':
selection = input('\nSelect a type [integer, float, random]: ')
while not selection in ['integer', 'float', 'random']:
selection = input('Select a type [integer, float, random]: ')
num_type = selection
elif selection == '2':
selection = input('\nSelect the maximum number: ')
while not selection.replace('-', '').replace('.', '').isdigit():
selection = input('\nSelect the maximum number: ')
max_n = selection
elif selection == '3':
selection = input('\nSelect the minimum number: ')
while not selection.replace('-', '').replace('.', '').isdigit():
selection = input('\nSelect the minimum number: ')
min_n = selection
self.type_n = num_type
self.max_n = max_n
self.min_n = min_n
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
class AlipayOfflineProviderEquipmentAuthRemoveResponse(AlipayResponse):
def __init__(self):
super(AlipayOfflineProviderEquipmentAuthRemoveResponse, self).__init__()
self._device_id = None
self._merchant_pid = None
@property
def device_id(self):
return self._device_id
@device_id.setter
def device_id(self, value):
self._device_id = value
@property
def merchant_pid(self):
return self._merchant_pid
@merchant_pid.setter
def merchant_pid(self, value):
self._merchant_pid = value
def parse_response_content(self, response_content):
response = super(AlipayOfflineProviderEquipmentAuthRemoveResponse, self).parse_response_content(response_content)
if 'device_id' in response:
self.device_id = response['device_id']
if 'merchant_pid' in response:
self.merchant_pid = response['merchant_pid']
|
module.exports = {
componentFramework: 'vuetify',
};
|
const mongoose = require("mongoose")
const UserSchema = new mongoose.Schema(
{
username:{
type:String,
required:true,
unique:true
},
email:{
type:String,
required:true,
unique:true
},
password:{
type: String,
required: true,
},
},
{timestamps:true }
);
module.exports = mongoose.model("User", UserSchema)
|
g_db.quests[11373]={id:11373,name:"Initial Move",type:0,trigger_policy:0,on_give_up_parent_fail:1,on_success_parent_success:0,can_give_up:1,can_retake:1,can_retake_after_failure:1,on_fail_parent_fail:0,fail_on_death:1,simultaneous_player_limit:0,ai_trigger:0,ai_trigger_enable:0,auto_trigger:0,trigger_on_death:0,remove_obtained_items:1,recommended_level:0,show_quest_title:0,show_as_gold_quest:0,start_npc:18325,finish_npc:0,is_craft_skill_quest:0,can_be_found:0,show_direction:1,level_min:60,level_max:69,dontshow_under_level_min:1,premise_coins:0,dontshow_without_premise_coins:1,req_reputation_min:0,req_reputation_max:0,dontshow_without_req_reputation:1,premise_quests:[],req_cultivation:0,dontshow_without_req_cultivation:1,req_faction_role:0,dontshow_without_req_faction_role:1,req_gender:0,dontshow_wrong_gender:1,req_class:0,dontshow_wrong_class:1,req_be_married:0,dontshow_without_marriage:0,req_be_gm:0,req_global_quest:0,req_global_quest_cond:0,quests_mutex:[11173,],req_blacksmith_level:0,req_tailor_level:0,req_craftsman_level:0,req_apothecary_level:0,special_award_type:0,is_team_task:0,recv_in_team_only:0,req_success_type:1,req_npc_type:0,briefing:"Finish the quest from the teleport master and he will reward you with the teleport die.",parent_quest:0,previous_quest:0,next_quest:0,sub_quest_first:0,dialogue:{},on_success:{normal:{xp:0,sp:0,coins:0,rep:0,culti:0,chi:0,level_multiplier:0,new_waypoint:0,storage_slots:0,inventory_slots:0,petbag_slots:0,ai_trigger:0,ai_trigger_enable:0,divorce:0,item_groups:[{chosen_randomly:1,items:[{id:18449,is_common:1,amount:1,probability:0.17000000,},{id:18450,is_common:1,amount:1,probability:0.17000000,},{id:18451,is_common:1,amount:1,probability:0.16500001,},{id:18452,is_common:1,amount:1,probability:0.16500001,},{id:18453,is_common:1,amount:1,probability:0.16500001,},{id:18454,is_common:1,amount:1,probability:0.16500001,},]},],},by_time:[],by_item_cnt:[],},on_failure:{normal:{xp:0,sp:0,coins:0,rep:0,culti:0,chi:0,level_multiplier:0,new_waypoint:0,storage_slots:0,inventory_slots:0,petbag_slots:0,ai_trigger:0,ai_trigger_enable:0,divorce:0,item_groups:[{chosen_randomly:0,items:[{id:18699,is_common:1,amount:1,probability:1.00000000,},]},],},by_time:[],by_item_cnt:[],},children:[]};
|
from string import ascii_lowercase
file = open("input")
polymers = list(file.read().strip())
def react(polymers):
reacted = []
for polymer in polymers:
if len(reacted) == 0:
reacted.append(polymer)
continue
lastreacted = reacted.pop()
if (polymer.upper() == lastreacted.upper()) and (polymer.isupper() != lastreacted.isupper()):
continue
else:
reacted.append(lastreacted)
reacted.append(polymer)
return reacted
print len(react(polymers))
file = open("input")
polymers = list(file.read().strip())
minlen = len(polymers)
for c in ascii_lowercase:
filtered = filter(lambda x: x.lower() != c, polymers)
thislen = len(react(filtered))
if (thislen < minlen):
minlen = thislen
print minlen
|
"""Module for the frequency weightings of the sound pressure level.
The standards for the weightings are defined by ANSI [1] and IEC [2].
The digital filters designed by bilinear transform with prewarping is
introduced by [3].
References
----------
[1] American National Standards Institute, “ANSI S1.43:
Specifications for Integrating-averaging Sound Level Meters,”
Acoustical Society of America, 1997.
[2] International Electrotechnical Commission, “Electroacoustics:
Sound Level Meters. Specifications. Part 1,”
International Electrotechnical Commission, 2013.
[3] A. N. Rimell, N. J. Mansfield, and G. S. Paddan,
“Design of digital filters for frequency weightings (A and C)
required for risk assessments of workers exposed to noise,”
Ind. Health, pp. 3–2013, 2014.
"""
from collections.abc import Sized
from functools import lru_cache
from typing import Optional, Union, Sequence, Tuple
import warnings
import librosa
import numpy as np
import pandas as pd
from scipy import signal
################################################################################
################################################################################
### Constant values
################################################################################
################################################################################
TOLERANCE = pd.DataFrame(
[
[ 10, -70.4, -38.2, -14.3, 0.0, +3.0, float('-inf'), +5.0, float('-inf'), +2.0, -5.0, +4.0, -4.0, +5.0, float('-inf')],
[ 12.5, -63.4, -33.2, -11.2, 0.0, +2.5, float('-inf'), +5.0, float('-inf'), +2.0, -4.0, +3.5, -3.5, +5.0, float('-inf')],
[ 16, -56.7, -28.5, -8.5, 0.0, +2.0, -4.0, +5.0, float('-inf'), +2.0, -3.0, +3.0, -3.0, +5.0, float('-inf')],
[ 20, -50.5, -24.2, -6.2, 0.0, +2.0, -2.0, +3.0, -3.0, +2.0, -2.0, +2.5, -2.5, +3.0, -3.0],
[ 25, -44.7, -20.4, -4.4, 0.0, +2.0, -1.5, +3.0, -3.0, +1.5, -1.5, +2.0, -2.0, +3.0, -3.0],
[ 31.5, -39.4, -17.1, -3.0, 0.0, +1.5, -1.5, +3.0, -3.0, +1.0, -1.0, +1.5, -1.5, +3.0, -3.0],
[ 40, -34.6, -14.2, -2.0, 0.0, +1.0, -1.0, +2.0, -2.0, +1.0, -1.0, +1.5, -1.5, +2.0, -2.0],
[ 50, -30.2, -11.6, -1.3, 0.0, +1.0, -1.0, +2.0, -2.0, +1.0, -1.0, +1.0, -1.0, +2.0, -2.0],
[ 63, -26.2, -9.3, -0.8, 0.0, +1.0, -1.0, +2.0, -2.0, +1.0, -1.0, +1.0, -1.0, +2.0, -2.0],
[ 80, -22.5, -7.4, -0.5, 0.0, +1.0, -1.0, +2.0, -2.0, +1.0, -1.0, +1.0, -1.0, +1.5, -1.5],
[ 100, -19.1, -5.6, -0.3, 0.0, +1.0, -1.0, +1.5, -1.5, +0.7, -0.7, +1.0, -1.0, +1.5, -1.5],
[ 125, -16.1, -4.2, -0.2, 0.0, +1.0, -1.0, +1.5, -1.5, +0.7, -0.7, +1.0, -1.0, +1.5, -1.5],
[ 160, -13.4, -3.0, -0.1, 0.0, +1.0, -1.0, +1.5, -1.5, +0.7, -0.7, +1.0, -1.0, +1.5, -1.5],
[ 200, -10.9, -2.0, 0.0, 0.0, +1.0, -1.0, +1.5, -1.5, +0.7, -0.7, +1.0, -1.0, +1.5, -1.5],
[ 250, -8.6, -1.3, 0.0, 0.0, +1.0, -1.0, +1.5, -1.5, +0.7, -0.7, +1.0, -1.0, +1.5, -1.5],
[ 315, -6.6, -0.8, 0.0, 0.0, +1.0, -1.0, +1.5, -1.5, +0.7, -0.7, +1.0, -1.0, +1.5, -1.5],
[ 400, -4.8, -0.5, 0.0, 0.0, +1.0, -1.0, +1.5, -1.5, +0.7, -0.7, +1.0, -1.0, +1.5, -1.5],
[ 500, -3.2, -0.3, 0.0, 0.0, +1.0, -1.0, +1.5, -1.5, +0.7, -0.7, +1.0, -1.0, +1.5, -1.5],
[ 630, -1.9, -0.1, 0.0, 0.0, +1.0, -1.0, +1.5, -1.5, +0.7, -0.7, +1.0, -1.0, +1.5, -1.5],
[ 800, -0.8, 0.0, 0.0, 0.0, +1.0, -1.0, +1.5, -1.5, +0.7, -0.7, +1.0, -1.0, +1.5, -1.5],
[ 1_000, 0.0, 0.0, 0.0, 0.0, +0.7, -0.7, +1.0, -1.0, +0.7, -0.7, +1.0, -1.0, +1.5, -1.5],
[ 1_250, +0.6, 0.0, 0.0, 0.0, +1.0, -1.0, +1.5, -1.5, +0.7, -0.7, +1.0, -1.0, +1.5, -1.5],
[ 1_600, +1.0, 0.0, -0.1, 0.0, +1.0, -1.0, +2.0, -2.0, +0.7, -0.7, +1.0, -1.0, +2.0, -2.0],
[ 2_000, +1.2, -0.1, -0.2, 0.0, +1.0, -1.0, +2.0, -2.0, +0.7, -0.7, +1.0, -1.0, +2.0, -2.0],
[ 2_500, +1.3, -0.2, -0.3, 0.0, +1.0, -1.0, +2.5, -2.5, +0.7, -0.7, +1.0, -1.0, +2.5, -2.5],
[ 3_150, +1.2, -0.4, -0.5, 0.0, +1.0, -1.0, +2.5, -2.5, +0.7, -0.7, +1.0, -1.0, +2.5, -2.5],
[ 4_000, +1.0, -0.7, -0.8, 0.0, +1.0, -1.0, +3.0, -3.0, +0.7, -0.7, +1.0, -1.0, +3.0, -3.0],
[ 5_000, +0.5, -1.2, -1.3, 0.0, +1.5, -1.5, +3.5, -3.5, +1.0, -1.0, +1.5, -1.5, +3.5, -3.5],
[ 6_300, -0.1, -1.9, -2.0, 0.0, +1.5, -2.0, +4.5, -4.5, +1.0, -1.5, +1.5, -2.0, +4.5, -4.5],
[ 8_000, -1.1, -2.9, -3.0, 0.0, +1.5, -2.5, +5.0, -5.0, +1.0, -2.0, +1.5, -3.0, +5.0, -5.0],
[10_000, -2.5, -4.3, -4.4, 0.0, +2.0, -3.0, +5.0, float('-inf'), +2.0, -3.0, +2.0, -4.0, +5.0, float('-inf')],
[12_500, -4.3, -6.1, -6.2, 0.0, +2.0, -5.0, +5.0, float('-inf'), +2.0, -3.0, +3.0, -6.0, +5.0, float('-inf')],
[16_000, -6.6, -8.4, -8.5, 0.0, +2.5, -16.0, +5.0, float('-inf'), +2.0, -3.0, +3.0, float('-inf'), +5.0, float('-inf')],
[20_000, -9.3, -11.1, -11.2, 0.0, +3.0, float('-inf'), +5.0, float('-inf'), +2.0, -3.0, +3.0, float('-inf'), +5.0, float('-inf')],
],
columns=[
'Frequency', 'A', 'B', 'C', 'Z',
'IEC1_upper', 'IEC1_lower',
'IEC2_upper', 'IEC2_lower',
'ANSI0_upper', 'ANSI0_lower',
'ANSI1_upper', 'ANSI1_lower',
'ANSI2_upper', 'ANSI2_lower',
]
)
FREQUENCY = TOLERANCE['Frequency'].values
FREQUENCY.setflags(write=False)
################################################################################
################################################################################
### Helper functions
################################################################################
################################################################################
def abs2(x: np.ndarray) -> np.ndarray:
"""Calculate the squared absolute value of a given array.
Parameters
----------
x : numpy.ndarray
Wave.
Returns
-------
x_abs2 : numpy.ndarray
The squared absolute value of `x`.
"""
if np.issubdtype(x.dtype, np.complexfloating):
return x.real**2 + x.imag**2
return x**2
def calc_db(x: np.ndarray, min_db: Optional[float] = None) -> np.ndarray:
"""Calculate the decibel value of a given array.
Parameters
----------
x : numpy.ndarray
Wave.
min_db : float, optional
Minimum threshold [dB].
If not `None`, values less than `min_db` will be clipped to `min_db`.
By default, `None`.
Returns
-------
x_db : numpy.ndarray
The decibel value of `x`.
"""
with warnings.catch_warnings():
warnings.simplefilter('ignore', RuntimeWarning)
db = 10*np.log10(abs2(x))
if min_db is not None:
db[db < min_db] = min_db
return db
def frame_wave(
x: np.ndarray,
fs: float,
sec: float = 1.0,
) -> np.ndarray:
"""Split a wave into frames.
The length of each frame is `round(fs * sec)` [sample].
Trailing edge that is not divisible by
`round(fs * sec)` will be truncated.
Parameters
----------
x : numpy.ndarray
Wave.
fs : float
Sampling frequency [Hz].
sec : float
Frame width [second].
By default, `1.0`.
Returns
-------
x_frame : np.ndarray [shape=(n_frames, round(fs*sec))]
The frames.
"""
assert x.ndim == 1, 'x.ndim must be 1.'
assert fs > 0.0, 'fs must be greater than 0.0.'
assert sec > 0.0, 'sec must be greater than 0.0.'
sample = round(fs * sec)
length = x.shape[0]
length -= length % sample
return x[:length].reshape(-1, sample)
################################################################################
################################################################################
### Fiilter parameters
################################################################################
################################################################################
@lru_cache()
def zpk_s_weighting(
weighting: str = 'A',
*,
fs: float = 96_000.0,
prewarping: bool = False,
) -> Tuple[np.ndarray, np.ndarray, float]:
"""Return the zeros, poles and gain of the weightings (analogue).
The parameters for the weightings are referred to [1].
Parameters
----------
weighting : str
Frequency weighting of the sound pressure.
`weighting` must be either `'A'`, `'B'`, `'C'`, or `'Z'`.
By default, `'A'`.
fs : float
Sampling frequency [Hz].
This parameter is used only when `prewarping == True`.
By default, `96_000.0`.
prewarping : bool
Whether to apply prewarping for the bilinear transform.
If `False`, this function returns the analogue filter prameters.
By default, `False`.
Returns
-------
zeros : np.ndarray
poles : np.ndarray
gain : np.float64
References
----------
[1] American National Standards Institute, “ANSI S1.43:
Specifications for Integrating-averaging Sound Level Meters,”
Acoustical Society of America, 1997.
"""
f = np.array([
20.598997,
107.65265,
737.86223,
12194.22,
158.48932
])
w = f * (2*np.pi)
if prewarping:
w = (2*fs)*np.tan(f/fs*np.pi)
else:
w = f * (2*np.pi)
G_A = 10 ** ( 1.9997 / 20.0)
G_B = 10 ** ( -0.1696 / 20.0)
G_C = 10 ** ( -0.0619 / 20.0)
if weighting == 'A':
zeros = np.array([0.0, 0.0, 0.0, 0.0])
poles = np.array([-w[0], -w[0], -w[1], -w[2], -w[3], -w[3]])
gain = G_A * w[3]**2
elif weighting == 'B':
zeros = np.array([0.0, 0.0, 0.0])
poles = np.array([-w[0], -w[0], -w[3], -w[3], -w[4]])
gain = G_B * w[3]**2
elif weighting == 'C':
zeros = np.array([0.0, 0.0])
poles = np.array([-w[0], -w[0], -w[3], -w[3]])
gain = G_C * w[3]**2
elif weighting == 'Z':
zeros = np.array([])
poles = np.array([])
gain = 1
else:
raise ValueError(
f"weighting must be either 'A', 'B', 'C', or 'Z'; found {weighting}"
)
zeros.setflags(write=False)
poles.setflags(write=False)
return zeros, poles, gain
@lru_cache()
def zpk_z_weighting(
weighting: str = 'A',
fs: float = 96_000.0,
) -> Tuple[np.ndarray, np.ndarray, float]:
"""Return the zeros, poles and gain of weightings (digital).
The parameters for the weightings are referred to [1].
The digital filter is designed by the bilinear transform [2].
Parameters
----------
weighting : str
Frequency weighting of the sound pressure.
`weighting` must be either `'A'`, `'B'`, `'C'`, or `'Z'`.
By default `'A'`.
fs : float
Sampling frequency [Hz].
By default `96_000.0`.
Returns
-------
zeros : np.ndarray
poles : np.ndarray
gain : np.float64
References
----------
[1] American National Standards Institute, “ANSI S1.43:
Specifications for Integrating-averaging Sound Level Meters,”
Acoustical Society of America, 1997.
[2] A. N. Rimell, N. J. Mansfield, and G. S. Paddan,
“Design of digital filters for frequency weightings (A and C)
required for risk assessments of workers exposed to noise,”
Ind. Health, pp. 3–2013, 2014.
"""
zpk_s = zpk_s_weighting(weighting, fs=fs, prewarping=True)
zeros, poles, gain = signal.bilinear_zpk(*zpk_s, fs)
zeros.setflags(write=False)
poles.setflags(write=False)
return zeros, poles, gain
@lru_cache()
def sos_weighting(
weighting: str = 'A',
fs: float = 96_000.0,
) -> np.ndarray:
"""Return the series of second-order sections of weightings.
The parameters for the weightings are referred to [1].
The digital filter is designed by the bilinear transform [2].
Parameters
----------
weighting : str
Frequency weighting of the sound pressure.
`weighting` must be either `'A'`, `'B'`, `'C'`, or `'Z'`.
By default `'A'`.
fs : float
Sampling frequency [Hz].
By default `96_000.0`.
Returns
-------
sos : np.ndarray
Series of second-order sections.
References
----------
[1] American National Standards Institute, “ANSI S1.43:
Specifications for Integrating-averaging Sound Level Meters,”
Acoustical Society of America, 1997.
[2] A. N. Rimell, N. J. Mansfield, and G. S. Paddan,
“Design of digital filters for frequency weightings (A and C)
required for risk assessments of workers exposed to noise,”
Ind. Health, pp. 3–2013, 2014.
"""
zpk_z = zpk_z_weighting(weighting, fs)
return signal.zpk2sos(*zpk_z)
################################################################################
################################################################################
### Validation
################################################################################
################################################################################
@lru_cache()
def torelance_standard(
weighting: str = 'A',
standard: str = 'ANSI0',
) -> Tuple[np.ndarray, np.ndarray]:
"""Return the tolerance for ANSI or IEC.
Parameters
----------
weighting : str
Frequency weighting of the sound pressure.
`weighting` must be either 'A', 'B', 'C'`, or `'Z'`.
By default, `'A'`.
standard : str
Standard referenced.
Choose one of the following:
- `'ANSI0'`: ANSI S.1-43 Type 0 [1]
- `'ANSI1'`: ANSI S.1-43 Type 1 [1]
- `'ANSI2'`: ANSI S.1-43 Type 2 [1]
- `'IEC1'`: IEC 61672-1 Class 1 [2]
- `'IEC2'`: IEC 61672-1 Class 2 [2]
By default, `'ANSI0'` (the strictest standard).
Returns
-------
freq : numpy.ndarray [shape=(34, )]
Frequencies.
tol : numpy.ndarray [shape=(34, 2)]
The decibel value of `x`.
`tol[:, 0]` is the upper bound and
`tol[:, 1]` is the lower bound.
`tol[f]` corresponds to `freq[f]`.
References
----------
[1] American National Standards Institute, “ANSI S1.43:
Specifications for Integrating-averaging Sound Level Meters,”
Acoustical Society of America, 1997.
[2] International Electrotechnical Commission, “Electroacoustics:
Sound Level Meters. Specifications. Part 1,”
International Electrotechnical Commission, 2013.
"""
if weighting not in {'A', 'B', 'C', 'Z'}:
raise ValueError()
if standard not in {'IEC1', 'IEC2', 'ANSI0', 'ANSI1', 'ANSI2'}:
raise ValueError()
db_center = TOLERANCE[weighting].values[:, None]
db_limit = TOLERANCE[[f'{standard}_upper', f'{standard}_lower']].values
tol = db_center + db_limit
return FREQUENCY, tol
@lru_cache()
def satisfied_standard(
fs: float,
weighting: str = 'A',
standard: str = 'ANSI0',
) -> bool:
"""Return whether `standard` for `weighting` can be
satisfied by a sampling frequency `fs`.
Parameters
----------
fs : float
Sampling frequency [Hz].
weighting : str
Frequency weighting of the sound pressure.
`weighting` must be either 'A', 'B', 'C'`, or `'Z'`.
By default, `'A'`.
standard : str
Standard referenced.
Choose one of the following:
- `'ANSI0'`: ANSI S.1-43 Type 0 [1]
- `'ANSI1'`: ANSI S.1-43 Type 1 [1]
- `'ANSI2'`: ANSI S.1-43 Type 2 [1]
- `'IEC1'`: IEC 61672-1 Class 1 [2]
- `'IEC2'`: IEC 61672-1 Class 2 [2]
By default, `'ANSI0'` (the strictest standard).
Returns
-------
satisfied : bool
Whether `fs` satisfies `standard` for `weighting`.
References
----------
[1] American National Standards Institute, “ANSI S1.43:
Specifications for Integrating-averaging Sound Level Meters,”
Acoustical Society of America, 1997.
[2] International Electrotechnical Commission, “Electroacoustics:
Sound Level Meters. Specifications. Part 1,”
International Electrotechnical Commission, 2013.
"""
f, tol = torelance_standard(weighting, standard)
f = f / fs # normalized frequency
worN = (2*np.pi)*f # normalized angular frequency
_, h = signal.freqz_zpk(*zpk_z_weighting(weighting, fs), worN=worN)
# The frequency response above the Nyquist frequency is regarded as -inf dB.
h[f >= 0.5] = 0.0
h_db = calc_db(h)
return np.all((tol[:, 1] <= h_db) & (h_db <= tol[:, 0]))
@lru_cache()
def minimum_fs(
weighting: str = 'A',
standard: str = 'ANSI0',
) -> float:
"""Return the minimum sampling frequency
to satisfy `standard` for `weighting`.
Parameters
----------
weighting : str
Frequency weighting of the sound pressure.
`weighting` must be either 'A', 'B', 'C'`, or `'Z'`.
By default, `'A'`.
standard : str
Standard referenced.
Choose one of the following:
- `'ANSI0'`: ANSI S.1-43 Type 0 [1]
- `'ANSI1'`: ANSI S.1-43 Type 1 [1]
- `'ANSI2'`: ANSI S.1-43 Type 2 [1]
- `'IEC1'`: IEC 61672-1 Class 1 [2]
- `'IEC2'`: IEC 61672-1 Class 2 [2]
By default, `'ANSI0'` (the strictest standard).
Returns
-------
fs_min : float
The minimum sampling frequency to satisfy `standard` for `weighting`.
References
----------
[1] American National Standards Institute, “ANSI S1.43:
Specifications for Integrating-averaging Sound Level Meters,”
Acoustical Society of America, 1997.
[2] International Electrotechnical Commission, “Electroacoustics:
Sound Level Meters. Specifications. Part 1,”
International Electrotechnical Commission, 2013.
"""
fs = 10_000.0
while True:
if satisfied_standard(fs, weighting, standard):
return fs
fs += 100.0
def validate_standard(
fs: float,
weighting: str = 'A',
standard: str = 'ANSI0',
raise_error: bool = False,
) -> bool:
"""Return whether `standard` for `weighting` can be satisfied
by a sampling frequency `fs`.
This function is the same as `satisfied_standard()`,
except that it raise an error or a warning
if `standard` is not satisfied.
Parameters
----------
fs : float
Sampling frequency [Hz].
weighting : str
Frequency weighting of the sound pressure.
`weighting` must be either 'A', 'B', 'C'`, or `'Z'`.
By default, `'A'`.
standard : str
Standard referenced.
Choose one of the following:
- `'ANSI0'`: ANSI S.1-43 Type 0 [1]
- `'ANSI1'`: ANSI S.1-43 Type 1 [1]
- `'ANSI2'`: ANSI S.1-43 Type 2 [1]
- `'IEC1'`: IEC 61672-1 Class 1 [2]
- `'IEC2'`: IEC 61672-1 Class 2 [2]
By default, `'ANSI0'` (the strictest standard).
raise_error : bool
If `True`, raise a `ValueError` when `standard` is not satisfied.
If `False`, throw a warning when `standard` is not satisfied.
By default, `False`.
Returns
-------
satisfied : bool
Whether `fs` satisfies `standard` for `weighting`.
References
----------
[1] American National Standards Institute, “ANSI S1.43:
Specifications for Integrating-averaging Sound Level Meters,”
Acoustical Society of America, 1997.
[2] International Electrotechnical Commission, “Electroacoustics:
Sound Level Meters. Specifications. Part 1,”
International Electrotechnical Commission, 2013.
"""
satisfied = satisfied_standard(fs, weighting, standard)
if not satisfied:
fs_required = minimum_fs(weighting, standard)
msg = \
f'Criterion {standard} is not satisfied at fs={fs} Hz. ' \
f'To satisfy it, fs must be >= {fs_required} Hz.'
if raise_error:
raise ValueError(msg)
else:
warnings.warn(msg)
return satisfied
################################################################################
################################################################################
### Equivalent continuous sound level and percentile sound pressure level
################################################################################
################################################################################
def weight_wave(
wave: np.ndarray,
fs_orig: float,
*,
weighting: str = 'A',
fs_weighting: float = 96_000.0,
standard: str = 'ANSI0',
must_satisfy_standard: bool = True,
) -> np.ndarray:
"""Filter a wave by a digital filter of a weighting.
The parameters for the weightings are referred to [1].
The digital filter is designed by the bilinear transform [2].
Parameters
----------
wave : numpy.ndarray [shape=(wave_len, )]
Wave.
fs_orig : float
Sampling frequency [Hz] of `wave`.
weighting : str
Frequency weighting of the sound pressure.
`weighting` must be either `'A'`, `'B'`, `'C'`, or `'Z'`.
By default `'A'`.
fs_weighting : float
Sampling frequency [Hz] for digital-filtering.
By default `96_000.0`.
standard : str
Standard referenced.
If `fs_weighting` does not satisfy `standard`,
raise a warning (if `must_satisfy_standard == False`)
or an error (if `must_satisfy_standard == True`).
Choose one of the following:
- `'ANSI0'`: ANSI S.1-43 Type 0 [1]
- `'ANSI1'`: ANSI S.1-43 Type 1 [1]
- `'ANSI2'`: ANSI S.1-43 Type 2 [1]
- `'IEC1'`: IEC 61672-1 Class 1 [3]
- `'IEC2'`: IEC 61672-1 Class 2 [3]
By default, `'ANSI0'` (the strictest standard).
must_satisfy_standard : bool
If `fs_weighting` does not satisfy `standard`,
raise a warning (if `must_satisfy_standard == False`)
or an error (if `must_satisfy_standard == True`).
Returns
-------
wave_weighted : np.ndarray [shape=(wave_resampled_len, )]
Weighted wave.
References
----------
[1] American National Standards Institute, “ANSI S1.43:
Specifications for Integrating-averaging Sound Level Meters,”
Acoustical Society of America, 1997.
[2] A. N. Rimell, N. J. Mansfield, and G. S. Paddan,
“Design of digital filters for frequency weightings (A and C)
required for risk assessments of workers exposed to noise,”
Ind. Health, pp. 3–2013, 2014.
[3] International Electrotechnical Commission, “Electroacoustics:
Sound Level Meters. Specifications. Part 1,”
International Electrotechnical Commission, 2013.
"""
validate_standard(fs_weighting, weighting, standard,
raise_error=must_satisfy_standard)
wave = librosa.resample(wave, fs_orig, fs_weighting)
if weighting == 'Z':
return wave
sos = sos_weighting(weighting, fs_weighting)
return signal.sosfilt(sos, wave)
def equivalent_level(
wave: np.ndarray,
fs_orig: float,
*,
weighting: str = 'A',
fs_weighting: float = 96_000.0,
standard: str = 'ANSI0',
must_satisfy_standard: bool = True,
) -> float:
"""Calculate equivalent continuous sound level (L_eq).
The parameters for the weightings are referred to [1].
The digital filter is designed by the bilinear transform [2].
Parameters
----------
wave : numpy.ndarray [shape=(wave_len, )]
Audio wave.
fs_orig : float
Sampling frequency [Hz] of `wave`.
weighting : str
Frequency weighting of the sound pressure.
`weighting` must be either `'A'`, `'B'`, `'C'`, or `'Z'`.
By default `'A'`.
fs_weighting : float
Sampling frequency [Hz] for digital-filtering.
By default `96_000.0`.
standard : str
Standard referenced.
If `fs_weighting` does not satisfy `standard`,
raise a warning (if `must_satisfy_standard == False`)
or an error (if `must_satisfy_standard == True`).
Choose one of the following:
- `'ANSI0'`: ANSI S.1-43 Type 0 [1]
- `'ANSI1'`: ANSI S.1-43 Type 1 [1]
- `'ANSI2'`: ANSI S.1-43 Type 2 [1]
- `'IEC1'`: IEC 61672-1 Class 1 [3]
- `'IEC2'`: IEC 61672-1 Class 2 [3]
By default, `'ANSI0'` (the strictest standard).
must_satisfy_standard : bool
If `fs_weighting` does not satisfy `standard`,
raise a warning (if `must_satisfy_standard == False`)
or an error (if `must_satisfy_standard == True`).
Returns
-------
L_eq : float
Equivalent continuous sound level.
References
----------
[1] American National Standards Institute, “ANSI S1.43:
Specifications for Integrating-averaging Sound Level Meters,”
Acoustical Society of America, 1997.
[2] A. N. Rimell, N. J. Mansfield, and G. S. Paddan,
“Design of digital filters for frequency weightings (A and C)
required for risk assessments of workers exposed to noise,”
Ind. Health, pp. 3–2013, 2014.
[3] International Electrotechnical Commission, “Electroacoustics:
Sound Level Meters. Specifications. Part 1,”
International Electrotechnical Commission, 2013.
"""
wave = weight_wave(wave, fs_orig,
weighting=weighting,
fs_weighting=fs_weighting,
standard=standard,
must_satisfy_standard=must_satisfy_standard)
return 10*np.log10(np.var(wave))
def percentile_level(
wave: np.ndarray,
fs_orig: float,
x : Union[float, Sequence[float]] = (5, 50, 95),
*,
weighting: str = 'A',
fs_weighting: float = 96_000.0,
frame_sec: float = 1.0,
standard: str = 'ANSI0',
must_satisfy_standard: bool = True,
) -> Union[float, np.ndarray]:
"""Calculate percentile sound pressure level (L_x).
The parameters for the weightings are referred to [1].
The digital filter is designed by the bilinear transform [2].
Parameters
----------
wave : numpy.ndarray [shape=(wave_len, )]
Audio wave.
fs_orig : float
Sampling frequency [Hz] of `wave`.
weighting : str
Frequency weighting of the sound pressure.
`weighting` must be either `'A'`, `'B'`, `'C'`, or `'Z'`.
By default `'A'`.
fs_weighting : float
Sampling frequency [Hz] for digital-filtering.
By default `96_000.0`.
frame_sec : float
Frame width [second].
By default, `1.0`.
standard : str
Standard referenced.
If `fs_weighting` does not satisfy `standard`,
raise a warning (if `must_satisfy_standard == False`)
or an error (if `must_satisfy_standard == True`).
Choose one of the following:
- `'ANSI0'`: ANSI S.1-43 Type 0 [1]
- `'ANSI1'`: ANSI S.1-43 Type 1 [1]
- `'ANSI2'`: ANSI S.1-43 Type 2 [1]
- `'IEC1'`: IEC 61672-1 Class 1 [3]
- `'IEC2'`: IEC 61672-1 Class 2 [3]
By default, `'ANSI0'` (the strictest standard).
must_satisfy_standard : bool
If `fs_weighting` does not satisfy `standard`,
raise a warning (if `must_satisfy_standard == False`)
or an error (if `must_satisfy_standard == True`).
Returns
-------
L_eq : float
Equivalent continuous sound level.
References
----------
[1] American National Standards Institute, “ANSI S1.43:
Specifications for Integrating-averaging Sound Level Meters,”
Acoustical Society of America, 1997.
[2] A. N. Rimell, N. J. Mansfield, and G. S. Paddan,
“Design of digital filters for frequency weightings (A and C)
required for risk assessments of workers exposed to noise,”
Ind. Health, pp. 3–2013, 2014.
[3] International Electrotechnical Commission, “Electroacoustics:
Sound Level Meters. Specifications. Part 1,”
International Electrotechnical Commission, 2013.
"""
wave = weight_wave(wave, fs_orig,
weighting=weighting,
fs_weighting=fs_weighting,
standard=standard,
must_satisfy_standard=must_satisfy_standard)
power_frames = 10*np.log10(
np.var(frame_wave(wave, fs_weighting, frame_sec), axis=1)
)
if power_frames.shape[0] > 0:
L_x = np.percentile(
power_frames, 100 - np.array(x),
interpolation='linear'
)
else:
L_x = np.full(len(x), np.nan) if isinstance(x, Sized) else np.nan
return L_x
def level_metrics(
wave: np.ndarray,
fs_orig: float,
*,
weighting: str = 'A',
fs_weighting: float = 96_000.0,
frame_sec: float = 1.0,
standard: str = 'ANSI0',
must_satisfy_standard: bool = True,
) -> Tuple[float, float, float, float]:
"""Calculate equivalent continuous sound level (L_eq)
and percentile sound pressure level (L_5, L_50, L_95).
The parameters for the weightings are referred to [1].
The digital filter is designed by the bilinear transform [2].
Parameters
----------
wave : numpy.ndarray [shape=(wave_len, )]
Audio wave.
fs_orig : float
Sampling frequency [Hz] of `wave`.
weighting : str
Frequency weighting of the sound pressure.
`weighting` must be either `'A'`, `'B'`, `'C'`, or `'Z'`.
By default `'A'`.
fs_weighting : float
Sampling frequency [Hz] for digital-filtering.
By default `96_000.0`.
frame_sec : float
Frame width [second].
By default, `1.0`.
This parameter is used to calculate L_95, L_50, L_5.
standard : str
Standard referenced.
If `fs_weighting` does not satisfy `standard`,
raise a warning (if `must_satisfy_standard == False`)
or an error (if `must_satisfy_standard == True`).
Choose one of the following:
- `'ANSI0'`: ANSI S.1-43 Type 0 [1]
- `'ANSI1'`: ANSI S.1-43 Type 1 [1]
- `'ANSI2'`: ANSI S.1-43 Type 2 [1]
- `'IEC1'`: IEC 61672-1 Class 1 [3]
- `'IEC2'`: IEC 61672-1 Class 2 [3]
By default, `'ANSI0'` (the strictest standard).
must_satisfy_standard : bool
If `fs_weighting` does not satisfy `standard`,
raise a warning (if `must_satisfy_standard == False`)
or an error (if `must_satisfy_standard == True`).
Returns
-------
L_eq : float
Equivalent continuous sound level.
L_5 : float
Percentile sound pressure level (top 5%)
L_50 : float
Percentile sound pressure level (top 50%)
L_95 : float
Percentile sound pressure level (top 95%)
References
----------
[1] American National Standards Institute, “ANSI S1.43:
Specifications for Integrating-averaging Sound Level Meters,”
Acoustical Society of America, 1997.
[2] A. N. Rimell, N. J. Mansfield, and G. S. Paddan,
“Design of digital filters for frequency weightings (A and C)
required for risk assessments of workers exposed to noise,”
Ind. Health, pp. 3–2013, 2014.
[3] International Electrotechnical Commission, “Electroacoustics:
Sound Level Meters. Specifications. Part 1,”
International Electrotechnical Commission, 2013.
"""
wave = weight_wave(wave, fs_orig,
weighting=weighting,
fs_weighting=fs_weighting,
standard=standard,
must_satisfy_standard=must_satisfy_standard)
L_eq = 10*np.log10(np.var(wave))
power_frames = 10*np.log10(
np.var(frame_wave(wave, fs_weighting, frame_sec), axis=1)
)
if power_frames.shape[0] > 0:
L_5, L_50, L_95 = np.percentile(
power_frames, [95.0, 50.0, 5.0],
interpolation='linear'
)
else:
L_5, L_50, L_95 = np.nan, np.nan, np.nan
return L_eq, L_5, L_50, L_95
|
from __future__ import absolute_import
from sentry.models import TagKey, TagKeyStatus
from sentry.web.frontend.base import ProjectView
class ProjectTagsView(ProjectView):
def get(self, request, organization, team, project):
tag_list = TagKey.objects.filter(
project=project,
status=TagKeyStatus.VISIBLE,
)
context = {
'tag_list': tag_list,
'page': 'tags',
}
return self.respond('sentry/projects/manage_tags.html', context)
|
import sys
input = sys.stdin.readline
a, b = [int(i) for i in input().split()]
str_a = str(a) * b
str_b = str(b) * a
print(min(str_a, str_b))
|
module.exports=require('../../decode-ranges.js')('wYMANACAg_JkVgx4PY_AJA')
|
import {
getBreak,
getCommonContainer,
getCommonGrayCard,
getCommonSubHeader,
getLabel,
getLabelWithValue
} from "egov-ui-framework/ui-config/screens/specs/utils";
import { gotoApplyWithStep , getsocialmediaLabelWithValue} from "../../utils/index";
import {
getQueryArg,
getTransformedLocale
} from "egov-ui-framework/ui-utils/commons";
import { httpRequest } from "../../../../../ui-utils";
import set from "lodash/set";
import get from "lodash/get";
import store from "../../../../../ui-redux/store";
import "./index.css";
const state = store.getState();
import { Link } from "react-router-dom";
import { localStorageGet } from "egov-ui-kit/utils/localStorageUtils";
import {convertTime} from "../../../../../ui-utils/commons";
// const convertTime =(time)=> {
// // Check correct time format and split into components
// //time=time+":00"
// time = time.toString().match(/^([01]\d|2[0-3])(:)([0-5]\d)?$/) || [time];
// if (time.length > 1) { // If time format correct
// time = time.slice(1); // Remove full string match value
// time[5] = +time[0] < 12 ? ' AM' : ' PM'; // Set AM/PM
// time[0] = +time[0] % 12 || 12; // Adjust hours
// }
// return time.join(''); // return adjusted time or original string
// }
const test = value => {
value = value ? value.split(".")[0] : "";
return value;
};
const tenantId = getQueryArg(window.location.href, "tenantId");
var fblink = get( state,
"screenConfiguration.preparedFinalObject.eventDetails[0].facebookUrl"
);
const redirecttosocialmedia = (state, dispatch,value) =>{
window.open(value)
}
const getEmp=mdmsBody=>{
let data = httpRequest(
"post",
"/egov-mdms-service/v1/_search",
"_search",
[],
mdmsBody
);
return data
}
const getHeader = label => {
return {
uiFramework: "custom-molecules-local",
moduleName: "egov-noc",
componentPath: "DividerWithLabel",
props: {
className: "hr-generic-divider-label",
labelProps: {},
dividerProps: {},
label
},
type: "array"
};
};
const EventDetails = getCommonGrayCard({
propertyLocationContainer: getCommonContainer({
eventType: getLabelWithValue(
{
labelName: "Enter Event Title",
labelKey: "PR_EVENT_TITLE_LABEL"
},
{
jsonPath: "eventDetails[0].eventTitle"
}
),
area: getLabelWithValue(
{
labelName: "Area",
labelKey: "PR_AREA_LABEL"
},
{
jsonPath: "eventDetails[0].area"
}
),
eventLocation: getLabelWithValue(
{
labelName: "Event Location",
labelKey: "PR_EVENT_LOCATION_LABEL"
},
{
jsonPath: "eventDetails[0].eventLocation"
}
),
sector: getLabelWithValue(
{
labelName: "Sector",
labelKey: "PR_SECTOR_LABEL"
},
{
jsonPath: "eventDetails[0].sector"
}
),
organizerDetails: getLabelWithValue(
{
labelName: "Organizer Details",
labelKey: "PR_ORGANIZER_DETAILS_LABEL"
},
{
jsonPath: "eventDetails[0].EmpName",
}
),
organizerEmployee: getLabelWithValue(
{
labelName: "Organizer Employee",
labelKey: "PR_ORGANIZER_EMPLOYEE_LABEL"
},
{
jsonPath: "eventDetails[0].organizerUsernName",
}
),
TypeOfEvent: getLabelWithValue(
{
labelName: "Type Of Event",
labelKey: "PR_TYPE_OF_EVENT_LABEL"
},
{
jsonPath: "eventDetails[0].eventType"
}
),
EventBudjet: getLabelWithValue(
{
labelName: "Event Budjet",
labelKey: "PR_EVENT_BUDJET_LABEL"
},
{
jsonPath: "eventDetails[0].eventBudget"
}
),
committiee:localStorageGet("modulecode")==="SCP"?{}: getLabelWithValue(
{
labelName: "Committiee",
labelKey: "PR_COMMITTEE_LABEL"
},
{
jsonPath: "eventDetails[0].committeeName"
}
)
})
})
const EventDataAndTime = getCommonGrayCard({
propertyLocationContainer: getCommonContainer({
startDate: getLabelWithValue(
{
labelName: "Start Date",
labelKey: "PR_START_DATE_LABEL"+"&"+"START_TIME_LABEL"
},
{
jsonPath: "eventDetails[0].startDateStartTime",
callBack: value =>
{
if(value)
{
let time= value.split(" ")[0]+" "+convertTime(value.split(" ")[1])
return time;
}
else
{ return '' }
}
}
),
endDate: getLabelWithValue(
{
labelName: "End Date",
labelKey: "PR_END_DATE_LABEL"+"&"+"END_TIME_LABEL"
},
{
jsonPath: "eventDetails[0].endDateEndTime",
callBack: value => {
if(value)
{
let time= value.split(" ")[0]+" "+convertTime(value.split(" ")[1])
return time;
}
else
{ return '' }
}
}
)
})
})
const EventSocialMediaLinks = getCommonGrayCard({
propertyLocationContainer: getCommonContainer({
fblink: {
uiFramework: "custom-atoms",
componentPath: "Div",
children: {
eventFacebookUrl: getsocialmediaLabelWithValue(
{
//labelName: "Event Facebook Url",
labelKey: "PR_EVENT_FACEBOOK_URL_LABEL"
},
{
jsonPath: "eventDetails[0].facebookUrl",
},
),
},
props: {
className: "Sociallink"
},
gridDefination : {
xs : 12,
sm : 4,
md : 4
},
onClickDefination: {
action: "condition",
callBack: (state, dispatch) => {
var fblink = get( state,
"screenConfiguration.preparedFinalObject.eventDetails[0].facebookUrl"
);
redirecttosocialmedia(state, dispatch, fblink )
}
},
break: getBreak(),
},
twlink: {
uiFramework: "custom-atoms",
componentPath: "Div",
children: {
eventTwitterUrl: getsocialmediaLabelWithValue(
{
labelName: "Event Twitter Url",
labelKey: "PR_EVENT_TWITTER_URL_LABEL"
},
{
jsonPath: "eventDetails[0].twitterUrl"
}
)
},
props: {
className: "Sociallink"
},
gridDefination : {
xs : 12,
sm : 4,
md : 4
},
onClickDefination: {
action: "condition",
callBack: (state, dispatch) => {
var fblink = get( state,
"screenConfiguration.preparedFinalObject.eventDetails[0].twitterUrl"
);
redirecttosocialmedia(state, dispatch, fblink )
}
},
break: getBreak(),
},
instlink: {
uiFramework: "custom-atoms",
componentPath: "Div",
children: {
eventInstagram: getsocialmediaLabelWithValue(
{
labelName: "Event Instagram",
labelKey: "PR_EVENT_INSTAGRAM_LABEL"
},
{
jsonPath: "eventDetails[0].instagramUrl"
}
),
},
props: {
className: "Sociallink"
},
gridDefination : {
xs : 12,
sm : 4,
md : 4
},
onClickDefination: {
action: "condition",
callBack: (state, dispatch) => {
var fblink = get( state,
"screenConfiguration.preparedFinalObject.eventDetails[0].instagramUrl"
);
redirecttosocialmedia(state, dispatch, fblink )
}
},
}
})
})
const EventDescription = getCommonGrayCard({
propertyLocationContainer: getCommonContainer({
eventDescription: getLabelWithValue(
{
labelName: "Event Description",
labelKey: "PR_EVENT_DESCRIPTION_LABEL"
},
{
jsonPath: "eventDetails[0].eventDescription"
}
)
})
})
export const eventdetailsSummary = getCommonGrayCard({
header: {
uiFramework: "custom-atoms",
componentPath: "Container",
props: {
style: { marginBottom: "10px" }
},
children: {
header: {
gridDefination: {
xs: 8
},
},
editSection: {
componentPath: "Button",
props: {
color: "primary",
style: {
marginTop: "-10px",
marginRight: "-18px"
}
},
gridDefination: {
xs: 4,
align: "right"
},
children: {
editIcon: {
uiFramework: "custom-atoms",
componentPath: "Icon",
props: {
iconName: "edit"
}
},
buttonLabel: getLabel({
labelKey: "PR_SUMMARY_EDIT"
})
},
onClickDefination: {
action: "condition",
callBack: (state, dispatch) => {
gotoApplyWithStep(state, dispatch, 0);
}
}
}
}
},
eventDetailsHeader: getHeader("Event Details"),
break: getBreak(),
cardOne: EventDetails,
dateTimeHeader: getHeader("Event Schedule "),
cardTwo: EventDataAndTime,
socialMediaHeader: getHeader("Event Social Media Link"),
cardThree: EventSocialMediaLinks,
descriptionHeader: getHeader("Event Description"),
cardFour: EventDescription
});
|
"""
File: train_emotion_classifier.py
Author: Octavio Arriaga
Email: arriaga.camargo@gmail.com
Github: https://github.com/oarriaga
Description: Train emotion classification model
"""
import warnings
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
import os
os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"]="2,3"
from keras.callbacks import ReduceLROnPlateau,TensorBoard
from keras.callbacks import CSVLogger, ModelCheckpoint, EarlyStopping
from keras.preprocessing.image import ImageDataGenerator
from keras.utils import multi_gpu_model
from keras.models import load_model
import tensorflow as tf
import argparse
from utils.datasets import DataManager
from utils.preprocessor import preprocess_input
# from utils.visual_callbacks import AccLossPlotter
from models.cnn import mini_XCEPTION, mini_concate_V3_XCEPTION
from models.cnn import mini_concate_V2_XCEPTION
from models.compare_cnn import InceptionV3, InceptionResNetV2
# parameters
num_epochs = 10000
input_shape = (64, 64, 1)
verbose = 1
num_classes = 7
patience = 100
gpu_count = 2
batch_size = 16* gpu_count
base_path = '../trained_models/emotion_models/'
# models_path = base_path + 'fer2013_0903_mini_concate_V2_XCEPTION.105-0.6762.hdf5'
# retrain
# model = load_model(models_path)
# data generator
data_generator = ImageDataGenerator(
featurewise_center=False,
featurewise_std_normalization=False,
rotation_range=10,
width_shift_range=0.1,
height_shift_range=0.1,
zoom_range=.1,
horizontal_flip=True)
# model parameters/compilation; Configures the model for training
# optimizer: SGD suitable small datasets
# check to see if we are compiling using just a single GPU
# Instantiate the base model
# (here, we do it on CPU, which is optional).
# with tf.device('/cpu:0' if gpu_count > 1 else '/gpu:0'):
# model = load_model(models_path)
model = InceptionResNetV2(input_shape, num_classes)
# Replicates the model on N GPUs.
# This assumes that your machine has N available GPUs.
if gpu_count > 1:
model = multi_gpu_model(model, gpus=gpu_count)
else:
model = model
model.compile(optimizer='adam', loss='categorical_crossentropy',
metrics=['accuracy'])
model.summary()
datasets = ['fer2013']
for dataset_name in datasets:
print('Training dataset:', dataset_name)
# saving model after one epoch finishing
trained_models_path = base_path + dataset_name + '_InceptionResNetV2_0918'
model_names = trained_models_path + '.{epoch:02d}-{val_acc:.4f}.hdf5'
model_checkpoint = ModelCheckpoint(model_names, monitor='val_acc', verbose=1,
save_best_only=True, mode='auto',period=1)
# view on internal states and statistics of the model during training
# a set callbacks functions, and visualization by tensorboard
log_file_path = base_path + dataset_name + trained_models_path[40:] + '_emotion_training.log'
csv_logger = CSVLogger(log_file_path, append=False)
early_stop = EarlyStopping('val_acc', patience=patience)
reduce_lr = ReduceLROnPlateau('val_acc', factor=0.1,
patience=int(patience/4), verbose=1)
tensor_board = TensorBoard(log_dir='../log_dir/InceptionResNetV2/',
histogram_freq=1,
write_graph=True,
write_images=True)
# plotter = AccLossPlotter(graphs=['acc', 'loss'], save_graph=True)
callbacks = [model_checkpoint, csv_logger, early_stop, reduce_lr, tensor_board]
# loading train dataset
train_data_loader = DataManager(dataset_mode='train', image_size=input_shape[:2])
train_faces, train_emotions = train_data_loader.load_fer2013()
train_faces = preprocess_input(train_faces)
num_samples, num_classes = train_emotions.shape
# loading val dataset, PublicData
val_data_loader = DataManager(dataset_mode='val', image_size=input_shape[:2])
val_faces, val_emotions = val_data_loader.load_fer2013()
val_faces = preprocess_input(val_faces)
num_samples, num_classes = val_emotions.shape
# Efficiency: generator run by paralle
# Trains the model on data generated batch-by-batch by a Python generator
model.fit_generator(data_generator.flow(train_faces, train_emotions,
batch_size),
steps_per_epoch=len(train_faces) / batch_size,
shuffle=True,
epochs=num_epochs, verbose=1, callbacks=callbacks,
validation_data=(val_faces, val_emotions))
|
/* markdown: a C implementation of John Gruber's Markdown markup language.
*
* Copyright (C) 2007 David L Parsons.
* The redistribution terms are provided in the COPYRIGHT file that must
* be distributed with this source code.
*/
#include <stdio.h>
#include <string.h>
#include <stdarg.h>
#include <stdlib.h>
#include <time.h>
#include <ctype.h>
#include "config.h"
#include "cstring.h"
#include "markdown.h"
#include "amalloc.h"
/* return the xml version of a character
*/
static char *
mkd_xmlchar(unsigned char c)
{
switch (c) {
case '<': return "<";
case '>': return ">";
case '&': return "&";
case '"': return """;
case '\'': return "'";
default: if ( isascii(c) || (c & 0x80) )
return 0;
return "";
}
}
/* write output in XML format
*/
int
mkd_generatexml(char *p, int size, FILE *out)
{
unsigned char c;
char *entity;
while ( size-- > 0 ) {
c = *p++;
if ( (entity = mkd_xmlchar(c)) )
fputs(entity, out);
else
fputc(c, out);
}
return 0;
}
/* build a xml'ed version of a string
*/
int
mkd_xml(char *p, int size, char **res)
{
unsigned char c;
char *entity;
Cstring f;
CREATE(f);
RESERVE(f, 100);
while ( size-- > 0 ) {
c = *p++;
if ( (entity = mkd_xmlchar(c)) )
Cswrite(&f, entity, strlen(entity));
else
Csputc(c, &f);
}
/* HACK ALERT! HACK ALERT! HACK ALERT! */
*res = T(f); /* we know that a T(Cstring) is a character pointer */
/* so we can simply pick it up and carry it away, */
return S(f); /* leaving the husk of the Ctring on the stack */
/* END HACK ALERT */
}
|
"""show_platform.py
IOS parsers for the following show commands:
* show version
* dir
* show redundancy
* show inventory
* show bootvar
* show processes cpu sorted
* show processes cpu sorted <1min|5min|5sec>
* show processes cpu sorted | include <WORD>
* show processes cpu sorted <1min|5min|5sec> | include <WORD>
* show processes cpu
* show processes cpu | include <WORD>
"""
# python
import re
# genie
from genie.metaparser import MetaParser
from genie.metaparser.util.schemaengine import Schema, \
Any, Optional
# import iosxe parser
from genie.libs.parser.iosxe.show_platform import \
ShowVersion as ShowVersion_iosxe, \
Dir as Dir_iosxe, \
ShowInventorySchema as ShowInventorySchema_iosxe, \
ShowRedundancy as ShowRedundancy_iosxe, \
ShowProcessesCpuSorted as ShowProcessesCpuSorted_iosxe, \
ShowProcessesCpu as ShowProcessesCpu_iosxe, \
ShowVersionRp as ShowVersionRp_iosxe, \
ShowPlatform as ShowPlatform_iosxe, \
ShowPlatformPower as ShowPlatformPower_iosxe, \
ShowProcessesCpuHistory as ShowProcessesCpuHistory_iosxe, \
ShowProcessesCpuPlatform as ShowProcessesCpuPlatform_iosxe, \
ShowPlatformSoftwareStatusControl as ShowPlatformSoftwareStatusControl_iosxe, \
ShowPlatformSoftwareSlotActiveMonitorMem as ShowPlatformSoftwareSlotActiveMonitorMem_iosxe, \
ShowPlatformHardware as ShowPlatformHardware_iosxe, \
ShowPlatformHardwarePlim as ShowPlatformHardwarePlim_iosxe, \
ShowPlatformHardwareQfpBqsOpmMapping as ShowPlatformHardwareQfpBqsOpmMapping_iosxe, \
ShowPlatformHardwareQfpBqsIpmMapping as ShowPlatformHardwareQfpBqsIpmMapping_iosxe, \
ShowPlatformHardwareSerdes as ShowPlatformHardwareSerdes_iosxe, \
ShowPlatformHardwareSerdesInternal as ShowPlatformHardwareSerdesInternal_iosxe, \
ShowPlatformHardwareQfpBqsStatisticsChannelAll as ShowPlatformHardwareQfpBqsStatisticsChannelAll_iosxe, \
ShowPlatformHardwareQfpInterfaceIfnameStatistics as ShowPlatformHardwareQfpInterfaceIfnameStatistics_iosxe, \
ShowPlatformHardwareQfpStatisticsDrop as ShowPlatformHardwareQfpStatisticsDrop_iosxe, \
ShowEnvironment as ShowEnvironment_iosxe, \
ShowModule as ShowModule_iosxe, \
ShowSwitch as ShowSwitch_iosxe, \
ShowSwitchDetail as ShowSwitchDetail_iosxe
class ShowVersion(ShowVersion_iosxe):
"""Parser for show version
"""
exclude = ['system_restarted_at', 'uptime_this_cp', 'uptime']
pass
class Dir(Dir_iosxe):
"""Parser for dir
"""
exclude = ['last_modified_date', 'bytes_free', 'files']
pass
class ShowRedundancyIosSchema(MetaParser):
"""Schema for show redundancy """
schema = {
'red_sys_info': {
'available_system_uptime': str,
'switchovers_system_experienced': str,
'standby_failures': str,
'last_switchover_reason': str,
'hw_mode': str,
Optional('conf_red_mode'): str,
Optional('oper_red_mode'): str,
'maint_mode': str,
'communications': str,
Optional('communications_reason'): str,
},
'slot': {
Any(): {
'curr_sw_state': str,
'uptime_in_curr_state': str,
'image_ver': str,
Optional('boot'): str,
Optional('config_file'): str,
Optional('bootldr'): str,
'config_register': str,
}
}
}
class ShowRedundancy(ShowRedundancyIosSchema, ShowRedundancy_iosxe):
"""Parser for show redundancy
"""
pass
class ShowInventory(ShowInventorySchema_iosxe):
"""
Parser for:
* show inventory
"""
cli_command = 'show inventory'
def cli(self, output=None):
if output is None:
output = self.device.execute(self.cli_command)
# Init vars
parsed_output = {}
flag_is_slot = False
oc_key_values = ['power', 'fan', 'clock']
# NAME: "CLK-7600 1", DESCR: "OSR-7600 Clock FRU 1"
# NAME: "WS-C6504-E", DESCR: "Cisco Systems Cisco 6500 4-slot Chassis System"
r1 = re.compile(r'NAME\:\s*\"(?P<name>.+)\"\,\s*DESCR:\s*\"(?P<description>.+)\"')
# 1
# 2
# 3
r1_1 = re.compile(r'(?P<slot>\d+)')
# msfc sub-module of 1
# VS-F6K-PFC4 Policy Feature Card 4 EARL sub-module of 1
r1_2 = re.compile(r'.*module of (?P<slot>\d+).*')
# Switch 1 - Power Supply 1
r1_2_2 = re.compile(r'Switch +(?P<subslot>\d+).*')
# Transceiver Te2/1
# Transceiver Te2/15
# Transceiver Te5/1
r1_3 = re.compile(r'Transceiver\s+Te(?P<slot>\d+)\/(?P<subslot>\d+)')
# TenGigabitEthernet2 / 1 / 1
# GigabitEthernet3 / 0 / 50
r1_3_2 = re.compile(r'(?:Ten)?GigabitEthernet(?P<subslot>[\d\s\/]+)$')
# VS-SUP2T-10G 5 ports Supervisor Engine 2T 10GE w/ CTS Rev. 1.5
# WS-SUP720-3BXL 2 ports Supervisor Engine 720 Rev. 5.6
r1_4 = re.compile(r'.*ports\s+Supervisor\s+Engine.*')
# WS-X6824-SFP CEF720 24 port 1000mb SFP Rev. 1.0
# WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet Rev. 3.4
r1_5 = re.compile(r'.*WS\-X.*')
# NAME: "IOSv"
r1_6 = re.compile(r'.*IOSv.*')
# PID: WS-C6504-E , VID: V01, SN: FXS1712Q1R8
# PID: CLK-7600 , VID: , SN: FXS170802GL
r2 = re.compile(r'PID:\s*(?P<pid>.+)\s*\,\s*VID:\s*(?P<vid>.*)\,\s*SN:\s*(?P<sn>.+)')
for line in output.splitlines():
line = line.strip()
result = r1.match(line)
if result:
group = result.groupdict()
name = group['name']
descr = group['description']
continue
result = r2.match(line)
if result:
group = result.groupdict()
pid = group['pid'].strip()
vid = group.get('vid', '')
sn = group['sn']
if 'Chassis' in descr:
chassis_dict = parsed_output.setdefault('main', {})\
.setdefault('chassis', {})\
.setdefault(pid, {})
chassis_dict['name'] = name
chassis_dict['descr'] = descr
chassis_dict['pid'] = pid
chassis_dict['vid'] = vid
chassis_dict['sn'] = sn
continue
# 1
# 2
# 3
result = r1_1.match(name)
if result:
flag_is_slot = True
group = result.groupdict()
slot = group['slot']
# VS-SUP2T-10G 5 ports Supervisor Engine 2T 10GE w/ CTS Rev. 1.5
# WS-SUP720-3BXL 2 ports Supervisor Engine 720 Rev. 5.6
# PID: WS-C3750X-48T-S , VID: V02 , SN: FDO1511R12W
if r1_4.match(descr) or 'WS-C' in pid:
slot_code = 'rp'
# WS-X6824-SFP CEF720 24 port 1000mb SFP Rev. 1.0
# WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet Rev. 3.4
if r1_5.match(descr):
slot_code = 'lc'
slot_dict = parsed_output\
.setdefault('slot', {})\
.setdefault(slot, {})\
.setdefault(slot_code, {})\
.setdefault(pid, {})
slot_dict['name'] = name
slot_dict['descr'] = descr
slot_dict['pid'] = pid
slot_dict['vid'] = vid
slot_dict['sn'] = sn
continue
# msfc sub-module of 1
# VS-F6K-PFC4 Policy Feature Card 4 EARL sub-module of 1
result = r1_2.match(name)
if result:
group = result.groupdict()
slot = group['slot']
subslot = "0"
subslot_dict = slot_dict\
.setdefault('subslot', {})\
.setdefault(subslot, {})\
.setdefault(pid, {})
subslot_dict['descr'] = descr
subslot_dict['name'] = name
subslot_dict['pid'] = pid
subslot_dict['sn'] = sn
subslot_dict['vid'] = vid
continue
# Switch 1 - Power Supply 1
# TenGigabitEthernet2 / 1 / 1
# GigabitEthernet3/0/50
result = r1_2_2.match(name)
result_2 = r1_3_2.match(name)
if result or result_2:
if result:
group = result.groupdict()
elif result_2:
group = result_2.groupdict()
subslot = group['subslot']
subslot_dict = slot_dict \
.setdefault('subslot', {}) \
.setdefault(subslot, {}) \
.setdefault(pid, {})
subslot_dict['descr'] = descr
subslot_dict['name'] = name
subslot_dict['pid'] = pid
subslot_dict['sn'] = sn
subslot_dict['vid'] = vid
continue
# Transceiver Te2/1
# Transceiver Te2/15
# Transceiver Te5/1
result = r1_3.match(name)
if result:
group = result.groupdict()
slot = group['slot']
subslot = group['subslot']
subslot_dict = slot_dict\
.setdefault('subslot', {})\
.setdefault(subslot, {})\
.setdefault(pid, {})
subslot_dict['descr'] = descr
subslot_dict['name'] = name
subslot_dict['pid'] = pid
subslot_dict['sn'] = sn
subslot_dict['vid'] = vid
continue
# NAME: "IOSv"
result = r1_6.match(name)
if result:
slot = '1'
slot_dict = parsed_output\
.setdefault('slot', {})\
.setdefault(slot, {})\
.setdefault('rp', {})\
.setdefault(pid, {})
slot_dict['name'] = name
slot_dict['descr'] = descr
slot_dict['pid'] = pid
slot_dict['vid'] = vid
slot_dict['sn'] = sn
continue
# Name could be:
# 2700W AC power supply for CISCO7604 2
# High Speed Fan Module for CISCO7604 1
if any(key in descr.lower() for key in oc_key_values):
other_dict = parsed_output\
.setdefault('slot', {})\
.setdefault(name, {})\
.setdefault('other', {})\
.setdefault(name, {})
other_dict['name'] = name
other_dict['descr'] = descr
other_dict['pid'] = pid
other_dict['vid'] = vid
other_dict['sn'] = sn
continue
return parsed_output
class ShowBootvarSchema(MetaParser):
"""Schema for show bootvar"""
schema = {
Optional('current_boot_variable'): str,
Optional('next_reload_boot_variable'): str,
Optional('config_file'): str,
Optional('bootldr'): str,
Optional('active'): {
'configuration_register': str,
Optional('boot_variable'): str,
},
Optional('standby'): {
'configuration_register': str,
Optional('boot_variable'): str,
},
}
class ShowBootvar(ShowBootvarSchema):
"""Parser for show boot"""
cli_command = 'show boot'
def cli(self, output=None):
if output is None:
out = self.device.execute(self.cli_command)
else:
out = output
boot_dict = {}
boot_variable = None
# BOOT variable = bootflash:/asr1000rpx.bin,12;
# BOOT variable = flash:cat3k_caa-universalk9.BLD_POLARIS_DEV_LATEST_20150907_031219.bin;flash:cat3k_caa-universalk9.BLD_POLARIS_DEV_LATEST_20150828_174328.SSA.bin;flash:ISSUCleanGolden;
p1 = re.compile(r'^BOOT +variable +=( *(?P<var>\S+);)?$')
# Standby BOOT variable = bootflash:/asr1000rpx.bin,12;
p2 = re.compile(r'^Standby +BOOT +variable +=( *(?P<var>\S+);)?$')
# Configuration register is 0x2002
p3 = re.compile(r'^Configuration +register +is +(?P<var>\w+)$')
# Standby Configuration register is 0x2002
p4 = re.compile(r'^Standby +Configuration +register +is +(?P<var>\w+)$')
# CONFIG_FILE variable =
p5 = re.compile(r'^CONFIG_FILE +variable += +(?P<var>\S+)$')
# BOOTLDR variable =
p6 = re.compile(r'^BOOTLDR +variable += +(?P<var>\S+)$')
for line in out.splitlines():
line = line.strip()
# BOOT variable = disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12;
m = p1.match(line)
if m:
boot = m.groupdict()['var']
if boot:
boot_dict['next_reload_boot_variable'] = boot
boot_dict.setdefault('active', {})['boot_variable'] = boot
continue
# Standby BOOT variable = bootflash:/asr1000rpx.bin,12;
m = p2.match(line)
if m:
boot = m.groupdict()['var']
if boot:
boot_dict.setdefault('standby', {})['boot_variable'] = boot
continue
# Configuration register is 0x2002
m = p3.match(line)
if m:
boot_dict.setdefault('active', {})['configuration_register'] = m.groupdict()['var']
continue
# Standby Configuration register is 0x2002
m = p4.match(line)
if m:
boot_dict.setdefault('standby', {})['configuration_register'] = m.groupdict()['var']
continue
# CONFIG_FILE variable =
m = p5.match(line)
if m:
if m.groupdict()['var']:
boot_dict.setdefault('active', {})['config_file'] = m.groupdict()['var']
continue
# BOOTLDR variable =
m = p6.match(line)
if m:
if m.groupdict()['var']:
boot_dict.setdefault('standby', {})['bootldr'] = m.groupdict()['var']
continue
return boot_dict
class ShowProcessesCpuSorted(ShowProcessesCpuSorted_iosxe):
"""Parser for show processes cpu sorted
show processes cpu sorted <1min|5min|5sec>
show processes cpu sorted | include <WORD>
show processes cpu sorted <1min|5min|5sec> | include <WORD>
"""
pass
class ShowProcessesCpu(ShowProcessesCpu_iosxe):
"""Parser for show processes cpu
show processes cpu | include <WORD>"""
pass
class ShowVersionRp(ShowVersionRp_iosxe):
"""Parser for show version RP active [running|provisioned|installed]
show version RP standby [running|provisioned|installed]"""
pass
class ShowPlatform(ShowPlatform_iosxe):
"""Parser for Parser for show platform"""
pass
class ShowPlatformPower(ShowPlatformPower_iosxe):
"""Parser for Parser for show platform power"""
pass
class ShowProcessesCpuHistory(ShowProcessesCpuHistory_iosxe):
"""Parser for show processes cpu history"""
pass
class ShowProcessesCpuPlatform(ShowProcessesCpuPlatform_iosxe):
"""Parser for show processes cpu platform"""
pass
class ShowPlatformSoftwareStatusControl(ShowPlatformSoftwareStatusControl_iosxe):
"""Parser for show platform software status control-processor brief"""
pass
class ShowPlatformSoftwareSlotActiveMonitorMem(ShowPlatformSoftwareSlotActiveMonitorMem_iosxe):
"""Parser for show platform software process slot switch active R0 monitor | inc Mem :|Swap:"""
pass
class ShowPlatformHardware(ShowPlatformHardware_iosxe):
"""Parser for show platform hardware qfp active infrastructure bqs queue output default all"""
pass
class ShowPlatformHardwarePlim(ShowPlatformHardwarePlim_iosxe):
"""Parser for show platform hardware port <x/x/x> plim statistics
show platform hardware slot <x> plim statistics
show platform hardware slot <x> plim statistics internal
show platform hardware subslot <x/x> plim statistics"""
pass
class ShowPlatformHardwareQfpBqsOpmMapping(ShowPlatformHardwareQfpBqsOpmMapping_iosxe):
"""Parser for show platform hardware qfp active bqs <x> opm mapping
show platform hardware qfp standby bqs <x> opm mapping"""
pass
class ShowPlatformHardwareQfpBqsIpmMapping(ShowPlatformHardwareQfpBqsIpmMapping_iosxe):
"""Parser for show platform hardware qfp active bqs <x> ipm mapping
show platform hardware qfp standby bqs <x> ipm mapping"""
pass
class ShowPlatformHardwareSerdes(ShowPlatformHardwareSerdes_iosxe):
"""Parser for show platform hardware slot <x> serdes statistics"""
pass
class ShowPlatformHardwareSerdesInternal(ShowPlatformHardwareSerdesInternal_iosxe):
"""Parser for show platform hardware slot <x> serdes statistics internal"""
pass
class ShowPlatformHardwareQfpBqsStatisticsChannelAll(ShowPlatformHardwareQfpBqsStatisticsChannelAll_iosxe):
"""Parser for show platform hardware qfp active bqs <x> ipm statistics channel all
show platform hardware qfp standby bqs <x> ipm statistics channel all
show platform hardware qfp active bqs <x> opm statistics channel all
show platform hardware qfp standby bqs <x> opm statistics channel all"""
pass
class ShowPlatformHardwareQfpInterfaceIfnameStatistics(ShowPlatformHardwareQfpInterfaceIfnameStatistics_iosxe):
"""Parser for show platform hardware qfp active interface if-name <interface> statistics
show platform hardware qfp standby interface if-name <interface> statistics"""
pass
class ShowPlatformHardwareQfpStatisticsDrop(ShowPlatformHardwareQfpStatisticsDrop_iosxe):
"""Parser for show platform hardware qfp active statistics drop
show platform hardware qfp standby statistics drop"""
pass
class ShowEnvironment(ShowEnvironment_iosxe):
"""Parser for show environment"""
pass
class ShowModule(ShowModule_iosxe):
"""Parser for show module"""
pass
class ShowSwitch(ShowSwitch_iosxe):
"""Parser for show switch"""
pass
class ShowSwitchDetail(ShowSwitchDetail_iosxe):
"""Parser for show switch detail"""
pass
|
/* global describe beforeEach it */
const {expect} = require('chai')
const request = require('supertest')
const db = require('../db')
const app = require('../index')
const User = db.model('user')
describe('User routes', () => {
beforeEach(() => {
return db.sync({force: true})
})
describe('/api/users/', () => {
const codysEmail = 'cody@puppybook.com'
beforeEach(() => {
return User.create({
email: codysEmail,
password: 'pass'
})
})
// it('GET /api/users', async () => {
// const res = await request(app)
// .get('/api/users')
// .expect(200)
// expect(res.body).to.be.an('array')
// expect(res.body[0].email).to.be.equal(codysEmail)
// })
}) // end describe('/api/users')
}) // end describe('User routes')
|
import contextlib
import datetime
import getpass
import sqlalchemy as sa
from sqlalchemy.dialects.mssql.pyodbc import MSDialect_pyodbc
import ibis.expr.datatypes as dt
import ibis.expr.operations as ops
import ibis.expr.schema as sch
import ibis.sql.alchemy as alch
from ibis_mssql.compiler import MSSQLDialect
import pyodbc # NOQA fail early if the driver is missing
@dt.dtype.register(MSDialect_pyodbc, sa.dialects.mssql.UNIQUEIDENTIFIER)
def sa_string(_, satype, nullable=True):
return dt.String(nullable=nullable)
@dt.dtype.register(MSDialect_pyodbc, sa.dialects.mssql.BIT)
def sa_boolean(_, satype, nullable=True):
return dt.Boolean(nullable=nullable)
class MSSQLTable(alch.AlchemyTable):
pass
class MSSQLSchema(alch.AlchemyDatabaseSchema):
pass
class MSSQLDatabase(alch.AlchemyDatabase):
schema_class = MSSQLSchema
class MSSQLClient(alch.AlchemyClient):
"""The Ibis MSSQL client class.
Attributes
----------
con : sqlalchemy.engine.Engine
"""
dialect = MSSQLDialect
database_class = MSSQLDatabase
table_class = MSSQLTable
def __init__(
self,
engine=None,
host='localhost',
user=None,
password=None,
port=1433,
database='master',
url=None,
driver='pyodbc',
odbc_driver='ODBC Driver 17 for SQL Server',
):
if engine is None:
if url is None:
if driver != 'pyodbc':
raise NotImplementedError(
'pyodbc is currently the only supported driver'
)
user = user or getpass.getuser()
url = sa.engine.url.URL(
'mssql+pyodbc',
host=host,
port=port,
username=user,
password=password,
database=database,
query={'driver': odbc_driver, 'autocommit': True},
)
engine = sa.create_engine(url)
else:
url = sa.engine.url.make_url(url)
engine = sa.create_engine(url)
super().__init__(engine)
self.database_name = engine.url.database
def _execute(self, query, results = True):
"""
If autocommit is enabled, disable explicit transaction
begin. Otherwise, it leads to 'pyodbc
function sequence error'.
This method overrides the parent
ibis.sql.alchemy.AlchemyClient._execute.
TODO: Fix this issue in Ibis
"""
QUERY = 'query'
AUTOCOMMIT = 'autocommit'
if (QUERY in self.con.url.__dict__
and AUTOCOMMIT in self.con.url.query
and self.con.url.query[AUTOCOMMIT] == True):
return alch.AlchemyProxy(self.con.execute(query))
else:
super()._execute(query, results)
@contextlib.contextmanager
def begin(self):
"""Start transaction with client to database."""
with super().begin() as bind:
# set timezone utc
yield bind
# set timezone previous timezone
def database(self, name=None):
"""Connect to a database called `name`.
Parameters
----------
name : string, optional
The name of the database to connect to. If ``None``, return
the database named ``self.current_database``.
Returns
-------
db : MSSQLDatabase
An :class:`ibis.sql.mssql.client.MSSQLDatabase` instance.
Notes
-----
This creates a new connection if `name` is both not ``None`` and not
equal to the current database.
"""
if name == self.current_database or (
name is None and name != self.current_database
):
return self.database_class(self.current_database, self)
else:
url = self.con.url
client_class = type(self)
new_client = client_class(
host=url.host,
user=url.username,
port=url.port,
password=url.password,
database=name,
)
return self.database_class(name, new_client)
def schema(self, name):
"""Get a schema object from the current database for the schema named `name`.
Parameters
----------
name : string
Returns
-------
schema : MSSQLSchema
An :class:`ibis.sql.mssql.client.MSSQLSchema` instance.
"""
return self.database().schema(name)
@property
def current_database(self):
"""Database client is currently connected to."""
return self.database_name
def list_databases(self):
"""List all databases for client to connect to."""
return [
row.name
for row in self.con.execute(
'SELECT name FROM master.dbo.sysdatabases'
)
]
def list_schemas(self):
"""List all the schemas in the current database."""
return self.inspector.get_schema_names()
def set_database(self, name):
"""Set current database that client is connected to."""
raise NotImplementedError(
'Cannot set database with MSSQL client. To use a different'
' database, use client.database({!r})'.format(name)
)
@property
def client(self):
return self
def table(self, name, database=None, schema=None):
"""Create an expression that references a particular table.
Parameters
----------
name : string
The name of the table to retrieve.
database : string, optional
The database in which the table referred to by `name` resides. If
``None`` then the ``current_database`` is used.
schema : string, optional
The schema in which the table resides. If ``None`` then the
`public` schema is assumed.
Returns
-------
table : TableExpr
A table expression.
"""
if database is not None and database != self.current_database:
return self.database(name=database).table(name=name, schema=schema)
else:
alch_table = self._get_sqla_table(name, schema=schema)
node = self.table_class(alch_table, self, self._schemas.get(name))
return self.table_expr_class(node)
def list_tables(self, like=None, database=None, schema=None):
"""
List tables avilable in current database
Parameters
----------
like : string, default None
e.g. 'foo*' to match all tables starting with 'foo'.
database : string, default None
Specific database to list available tables
schema : string, default None
Specific schema to list available tables
Returns
-------
list
A list with all tables available for the current database.
"""
if database is not None and database != self.current_database:
return self.database(name=database).list_tables(
like=like, schema=schema
)
else:
parent = super(MSSQLClient, self)
return parent.list_tables(like=like, schema=schema)
def sql(self, query):
"""
Convert a MSSQL query to an Ibis table expression
Parameters
----------
query: string
SQL query to execute on connection
Returns
-------
table : TableExpr
"""
limited_query = 'SELECT TOP 0 * FROM ({}) t0'.format(query)
schema = self._get_schema_using_query(limited_query)
return ops.SQLQueryResult(query, schema, self).to_expr()
def _get_schema_using_query(self, limited_query):
type_map = {
int: 'int64',
bool: 'boolean',
float: 'float64',
str: 'string',
datetime.datetime: 'timestamp',
}
with self._execute(limited_query, results=True) as cur:
names = [row[0] for row in cur.proxy._cursor_description()]
ibis_types = [
type_map[row[1]] for row in cur.proxy._cursor_description()
]
return sch.Schema(names, ibis_types)
|
var searchData=
[
['cbrt_0',['cbrt',['../group__math_ga5520218c452db7b34e883bf0f7a14488.html#ga5520218c452db7b34e883bf0f7a14488',1,'eve']]],
['ceil_1',['ceil',['../group__core_ga1fd0ebf298c8ca222374b621cf059750.html#ga1fd0ebf298c8ca222374b621cf059750',1,'eve']]],
['clamp_2',['clamp',['../group__core_gad1d369116a4c78c29e74a36ee641f02a.html#gad1d369116a4c78c29e74a36ee641f02a',1,'eve']]],
['combine_3',['combine',['../group__core_ga6b5426b411f619a1b836eeda12cdc9c0.html#ga6b5426b411f619a1b836eeda12cdc9c0',1,'eve']]],
['condition_5f_4',['condition_',['../structeve_1_1if__.html#ab95afb50101c2b268a415e4b4bdeb186',1,'eve::if_']]],
['conj_5',['conj',['../group__math_ga3da1ad6fd04f08ac14cbe0ff478b6951.html#ga3da1ad6fd04f08ac14cbe0ff478b6951',1,'eve']]],
['convert_6',['convert',['../group__views_ga4cc00e59b322f70ba2c789afde3593aa.html#ga4cc00e59b322f70ba2c789afde3593aa',1,'eve::algo::views::convert()'],['../group__core_gaa19dcccbb0ef0ef464e95ffd7a588867.html#gaa19dcccbb0ef0ef464e95ffd7a588867',1,'eve::convert()']]],
['copy_7',['copy',['../group__algo_ga02000a386a1e19b48cbeb9d21aa57378.html#ga02000a386a1e19b48cbeb9d21aa57378',1,'eve::algo']]],
['copy_5fbackward_8',['copy_backward',['../group__algo_gaf168a34bbecbde1efbbe2344e95fa32c.html#gaf168a34bbecbde1efbbe2344e95fa32c',1,'eve::algo']]],
['copysign_9',['copysign',['../group__core_ga22a598ef861dbecf0613706ecb60c7fb.html#ga22a598ef861dbecf0613706ecb60c7fb',1,'eve']]],
['cos_10',['cos',['../group__math_ga39e50b4c59911e463be1a11fc958fb86.html#ga39e50b4c59911e463be1a11fc958fb86',1,'eve']]],
['cosd_11',['cosd',['../group__math_ga216a80d7f91be23dc0d74e26eb057000.html#ga216a80d7f91be23dc0d74e26eb057000',1,'eve']]],
['cosh_12',['cosh',['../group__math_gab52648ab8990c126fd024ddf8badb536.html#gab52648ab8990c126fd024ddf8badb536',1,'eve']]],
['cospi_13',['cospi',['../group__math_ga03482f35c4921d89499b1034eee99be0.html#ga03482f35c4921d89499b1034eee99be0',1,'eve']]],
['cot_14',['cot',['../group__math_gad0a09342c6bb010028e1686a0b1f599c.html#gad0a09342c6bb010028e1686a0b1f599c',1,'eve']]],
['cotd_15',['cotd',['../group__math_ga1c0c27c3a93a40a2fa91c42b02040e62.html#ga1c0c27c3a93a40a2fa91c42b02040e62',1,'eve']]],
['coth_16',['coth',['../group__math_gab2e9ac37145ddd832e76a2ed724194c9.html#gab2e9ac37145ddd832e76a2ed724194c9',1,'eve']]],
['cotpi_17',['cotpi',['../group__math_gade4a273af7fb50439ae8974d4e5e8222.html#gade4a273af7fb50439ae8974d4e5e8222',1,'eve']]],
['count_5ftrue_18',['count_true',['../group__core_gaa1db4fb3b560614916f4a5c33bedd5f1.html#gaa1db4fb3b560614916f4a5c33bedd5f1',1,'eve']]],
['countl_5fone_19',['countl_one',['../group__core_gac3ea8b4c98c60b7969d5204c18e24107.html#gac3ea8b4c98c60b7969d5204c18e24107',1,'eve']]],
['countl_5fzero_20',['countl_zero',['../group__core_ga29c9c15fec7733014b176d759adecc62.html#ga29c9c15fec7733014b176d759adecc62',1,'eve']]],
['countr_5fone_21',['countr_one',['../group__core_ga0f2c79073c4fd02eba8f003f2809013a.html#ga0f2c79073c4fd02eba8f003f2809013a',1,'eve']]],
['countr_5fzero_22',['countr_zero',['../group__core_ga5c020eebe010e653d992031c6508b55c.html#ga5c020eebe010e653d992031c6508b55c',1,'eve']]],
['csc_23',['csc',['../group__math_ga664582204f1e5e323b88bf429706c77f.html#ga664582204f1e5e323b88bf429706c77f',1,'eve']]],
['csch_24',['csch',['../group__math_gae4658d7c9b116c396866ffa7ff7b47aa.html#gae4658d7c9b116c396866ffa7ff7b47aa',1,'eve']]],
['cscpi_25',['cscpi',['../group__math_gae413e8b133a104f344513b9500b7708b.html#gae413e8b133a104f344513b9500b7708b',1,'eve']]],
['cyl_26',['cyl',['../group__core_ga4029354e92cc2575f406c1335e4f2fef.html#ga4029354e92cc2575f406c1335e4f2fef',1,'eve']]],
['cyl_5fbessel_5fi0_27',['cyl_bessel_i0',['../group__bessel_gad1f7e9c00fa0ad229c612b230059a9d7.html#gad1f7e9c00fa0ad229c612b230059a9d7',1,'eve']]],
['cyl_5fbessel_5fi1_28',['cyl_bessel_i1',['../group__bessel_ga75fe8112cf1f1d8d3bc3c13e796f4fff.html#ga75fe8112cf1f1d8d3bc3c13e796f4fff',1,'eve']]],
['cyl_5fbessel_5fin_29',['cyl_bessel_in',['../group__bessel_ga93b97cdd2985f7fdffda38a1c2660d38.html#ga93b97cdd2985f7fdffda38a1c2660d38',1,'eve']]],
['cyl_5fbessel_5fj0_30',['cyl_bessel_j0',['../group__bessel_ga40e333409894e1ca14d956fc96a3da34.html#ga40e333409894e1ca14d956fc96a3da34',1,'eve']]],
['cyl_5fbessel_5fj1_31',['cyl_bessel_j1',['../group__bessel_ga8d5c8c6360854651827324aff64e9ab9.html#ga8d5c8c6360854651827324aff64e9ab9',1,'eve']]],
['cyl_5fbessel_5fk0_32',['cyl_bessel_k0',['../group__bessel_gaf2b4c47b143db0674b5b658464f3621c.html#gaf2b4c47b143db0674b5b658464f3621c',1,'eve']]],
['cyl_5fbessel_5fk1_33',['cyl_bessel_k1',['../group__bessel_ga5e2dd2d1f2e62bbee606e3194db9352b.html#ga5e2dd2d1f2e62bbee606e3194db9352b',1,'eve']]],
['cyl_5fbessel_5fkn_34',['cyl_bessel_kn',['../group__bessel_gac43789c4ff60e930fef12912520a8a12.html#gac43789c4ff60e930fef12912520a8a12',1,'eve']]],
['cyl_5fbessel_5fy0_35',['cyl_bessel_y0',['../group__bessel_gac25060705be353ebf0747ef780fe3362.html#gac25060705be353ebf0747ef780fe3362',1,'eve']]],
['cyl_5fbessel_5fy1_36',['cyl_bessel_y1',['../group__bessel_gabb11447069fcb923604e1c08489e3193.html#gabb11447069fcb923604e1c08489e3193',1,'eve']]],
['cyl_5fbessel_5fyn_37',['cyl_bessel_yn',['../group__bessel_gab49ce5283b3eccf7e436b2307dc44072.html#gab49ce5283b3eccf7e436b2307dc44072',1,'eve']]]
];
|
initSidebarItems({"enum":[["Error","Top-level error type used by this crate."],["Message","A protocol message or vote."]],"fn":[["process_commit_validation_result","Runs the callback with the appropriate `CommitProcessingOutcome` based on the given `CommitValidationResult`. Outcome is bad if ghost is undefined, good otherwise."],["validate_commit","Validates a GRANDPA commit message and returns the ghost calculated using the precommits in the commit message and using the commit target as a base."]],"mod":[["round","Logic for a single round of GRANDPA."],["vote_graph","Maintains the vote-graph of the blockchain."],["voter","A voter in GRANDPA. This transitions between rounds and casts votes."],["voter_set","Implementation of a `VoterSet`, representing the complete set of voters and their weights in the context of a round of the protocol."]],"struct":[["CatchUp","A catch-up message, which is an aggregate of prevotes and precommits necessary to complete a round."],["Commit","A commit message which is an aggregate of precommits."],["CommitValidationResult","Struct returned from `validate_commit` function with information about the validation result."],["CompactCommit","A commit message with compact representation of authentication data."],["Equivocation","An equivocation (double-vote) in a given round."],["HistoricalVotes","Historical votes seen in a round."],["Precommit","A precommit for a block and its ancestors."],["Prevote","A prevote for a block and its ancestors."],["PrimaryPropose","A primary proposed block, this is a broadcast of the last round’s estimate."],["SignedMessage","A signed message."],["SignedPrecommit","A signed precommit message."],["SignedPrevote","A signed prevote message."]],"trait":[["BlockNumberOps","Arithmetic necessary for a block number."],["Chain","Chain context necessary for implementation of the finality gadget."]],"type":[["MultiAuthData","Authentication data for a set of many messages, currently a set of precommit signatures but in the future could be optimized with BLS signature aggregation."]]});
|
/**
* Tests that the addShard process initializes sharding awareness on an added standalone or
* replica set shard that was started with --shardsvr.
*/
(function() {
"use strict";
var waitForIsMaster = function(conn) {
assert.soon(function() {
var res = conn.getDB('admin').runCommand({isMaster: 1});
return res.ismaster;
});
};
var checkShardingStateInitialized = function(conn, configConnStr, shardName, clusterId) {
var res = conn.getDB('admin').runCommand({shardingState: 1});
assert.commandWorked(res);
assert(res.enabled);
assert.eq(configConnStr, res.configServer);
assert.eq(shardName, res.shardName);
assert(clusterId.equals(res.clusterId),
'cluster id: ' + tojson(clusterId) + ' != ' + tojson(res.clusterId));
};
var checkShardMarkedAsShardAware = function(mongosConn, shardName) {
var res = mongosConn.getDB('config').getCollection('shards').findOne({_id: shardName});
assert.neq(null, res, "Could not find new shard " + shardName + " in config.shards");
assert.eq(1, res.state);
};
// Create the cluster to test adding shards to.
var st = new ShardingTest({shards: 1});
var clusterId = st.s.getDB('config').getCollection('version').findOne().clusterId;
// Add a shard that is a standalone mongod.
var standaloneConn = MongoRunner.runMongod({shardsvr: ''});
waitForIsMaster(standaloneConn);
jsTest.log("Going to add standalone as shard: " + standaloneConn);
var newShardName = "newShard";
assert.commandWorked(st.s.adminCommand({addShard: standaloneConn.name, name: newShardName}));
checkShardingStateInitialized(standaloneConn, st.configRS.getURL(), newShardName, clusterId);
checkShardMarkedAsShardAware(st.s, newShardName);
MongoRunner.stopMongod(standaloneConn.port);
// Add a shard that is a replica set.
var replTest = new ReplSetTest({nodes: 1});
replTest.startSet({shardsvr: ''});
replTest.initiate();
waitForIsMaster(replTest.getPrimary());
jsTest.log("Going to add replica set as shard: " + tojson(replTest));
assert.commandWorked(st.s.adminCommand({addShard: replTest.getURL(), name: replTest.getURL()}));
checkShardingStateInitialized(
replTest.getPrimary(), st.configRS.getURL(), replTest.getURL(), clusterId);
checkShardMarkedAsShardAware(st.s, newShardName);
replTest.stopSet();
st.stop();
})();
|
from mayan.apps.appearance.classes import Icon
from mayan.apps.documents.icons import icon_document_type
icon_index = Icon(driver_name='fontawesome', symbol='list-ul')
icon_document_index_instance_list = Icon(
driver_name='fontawesome', symbol='list-ul'
)
icon_document_type_index_templates = icon_index
icon_index_level_up = Icon(
driver_name='fontawesomecss', css_classes='fa-level-up-alt fa-rotate-90'
)
icon_index_instance_node_with_documents = Icon(
driver_name='fontawesome', symbol='folder'
)
icon_index_instances_rebuild = Icon(
driver_name='fontawesome-dual', primary_symbol='list-ul',
secondary_symbol='hammer'
)
icon_index_instances_reset = Icon(
driver_name='fontawesome-dual', primary_symbol='list-ul',
secondary_symbol='times'
)
icon_index_template_create = Icon(
driver_name='fontawesome-dual', primary_symbol='list-ul',
secondary_symbol='plus'
)
icon_index_template_delete = Icon(driver_name='fontawesome', symbol='times')
icon_index_template_document_types = icon_document_type
icon_index_template_edit = Icon(driver_name='fontawesome', symbol='pencil-alt')
icon_index_template_list = Icon(driver_name='fontawesome', symbol='list-ul')
icon_index_template_node_create = Icon(
driver_name='fontawesome', symbol='plus'
)
icon_index_template_node_delete = Icon(
driver_name='fontawesome', symbol='times'
)
icon_index_template_node_edit = Icon(
driver_name='fontawesome', symbol='pencil-alt'
)
icon_index_template_node_tree_view = Icon(
driver_name='fontawesome', symbol='folder-open'
)
|
"""
Created on May 20, 2010
@author: Nicklas Boerjesson
"""
import unittest
from qal.dal.types import DB_MYSQL, DB_POSTGRESQL, DB_ORACLE, DB_DB2, DB_SQLSERVER
from qal.dal.tests.framework import get_default_dal
def _connect_test(_db_type):
dal = get_default_dal(_db_type, "")
if dal:
dal.close()
return True
else:
raise Exception("Get default DAL did not return a DAL.")
class DALTests(unittest.TestCase):
def test_connect_DB_MYSQL(self):
self.assertEqual(_connect_test(DB_MYSQL), True, 'Connect_test failed: DB_MYSQL')
def test_connect_DB_POSTGRESQL(self):
self.assertEqual(_connect_test(DB_POSTGRESQL), True, 'Connect_test failed: DB_POSTGRESQL')
def test_connect_DB_ORACLE(self):
self.assertEqual(_connect_test(DB_ORACLE), True, 'Connect_test failed: DB_ORACLE')
def test_connect_DB_DB2(self):
self.assertEqual(_connect_test(DB_DB2), True, 'Connect_test failed: DB_DB2')
def test_connect_DB_SQLSERVER(self):
self.assertEqual(_connect_test(DB_SQLSERVER), True, 'Connect_test failed: DB_SQLSERVER')
# This test can be used for debugging.
def _test_run(self):
dal = get_default_dal(DB_MYSQL, 'dbupgrd')
if 1 == 0:
sql = "DROP TABLE \"__VersionLog\""
dal.execute(sql)
sql = "DROP TABLE \"__Application\""
dal.execute(sql)
sql = "DROP TABLE \"TestTable1\""
dal.execute(sql)
sql = "DROP TABLE \"TestTable2\""
dal.execute(sql)
sql = "DROP TABLE \"TestTable3\""
dal.execute(sql)
dal.commit()
sql = \
"""
SELECT COLUMN_NAME, DATA_TYPE, IS_NULLABLE AS NULLABLE, CHARACTER_MAXIMUM_LENGTH AS DATA_LENGTH
FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = '__VersionLog'
"""
_result_set = dal.query(sql)
for item in _result_set:
print(item)
dal.close()
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
# The following comment should be removed at some point in the future.
# mypy: disallow-untyped-defs=False
from __future__ import absolute_import
import datetime
import hashlib
import json
import logging
import os.path
import sys
from pip._vendor import pkg_resources
from pip._vendor.packaging import version as packaging_version
from pip._vendor.six import ensure_binary
from pip._internal.index.collector import LinkCollector
from pip._internal.index.package_finder import PackageFinder
from pip._internal.models.search_scope import SearchScope
from pip._internal.models.selection_prefs import SelectionPreferences
from pip._internal.utils.filesystem import (
adjacent_tmp_file,
check_path_owner,
replace,
)
from pip._internal.utils.misc import (
ensure_dir,
get_installed_version,
redact_auth_from_url,
)
from pip._internal.utils.packaging import get_installer
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
import optparse
from optparse import Values
from typing import Any, Dict, Text, Union
from pip._internal.network.session import PipSession
SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ"
logger = logging.getLogger(__name__)
def make_link_collector(
session, # type: PipSession
options, # type: Values
suppress_no_index=False, # type: bool
):
# type: (...) -> LinkCollector
"""
:param session: The Session to use to make requests.
:param suppress_no_index: Whether to ignore the --no-index option
when constructing the SearchScope object.
"""
index_urls = [options.index_url] + options.extra_index_urls
if options.no_index and not suppress_no_index:
logger.debug(
"Ignoring indexes: %s",
",".join(redact_auth_from_url(url) for url in index_urls),
)
index_urls = []
# Make sure find_links is a list before passing to create().
find_links = options.find_links or []
search_scope = SearchScope.create(find_links=find_links, index_urls=index_urls,)
link_collector = LinkCollector(session=session, search_scope=search_scope)
return link_collector
def _get_statefile_name(key):
# type: (Union[str, Text]) -> str
key_bytes = ensure_binary(key)
name = hashlib.sha224(key_bytes).hexdigest()
return name
class SelfCheckState(object):
def __init__(self, cache_dir):
# type: (str) -> None
self.state = {} # type: Dict[str, Any]
self.statefile_path = None
# Try to load the existing state
if cache_dir:
self.statefile_path = os.path.join(
cache_dir, "selfcheck", _get_statefile_name(self.key)
)
try:
with open(self.statefile_path) as statefile:
self.state = json.load(statefile)
except (IOError, ValueError, KeyError):
# Explicitly suppressing exceptions, since we don't want to
# error out if the cache file is invalid.
pass
@property
def key(self):
return sys.prefix
def save(self, pypi_version, current_time):
# type: (str, datetime.datetime) -> None
# If we do not have a path to cache in, don't bother saving.
if not self.statefile_path:
return
# Check to make sure that we own the directory
if not check_path_owner(os.path.dirname(self.statefile_path)):
return
# Now that we've ensured the directory is owned by this user, we'll go
# ahead and make sure that all our directories are created.
ensure_dir(os.path.dirname(self.statefile_path))
state = {
# Include the key so it's easy to tell which pip wrote the
# file.
"key": self.key,
"last_check": current_time.strftime(SELFCHECK_DATE_FMT),
"pypi_version": pypi_version,
}
text = json.dumps(state, sort_keys=True, separators=(",", ":"))
with adjacent_tmp_file(self.statefile_path) as f:
f.write(ensure_binary(text))
try:
# Since we have a prefix-specific state file, we can just
# overwrite whatever is there, no need to check.
replace(f.name, self.statefile_path)
except OSError:
# Best effort.
pass
def was_installed_by_pip(pkg):
# type: (str) -> bool
"""Checks whether pkg was installed by pip
This is used not to display the upgrade message when pip is in fact
installed by system package manager, such as dnf on Fedora.
"""
try:
dist = pkg_resources.get_distribution(pkg)
return "pip" == get_installer(dist)
except pkg_resources.DistributionNotFound:
return False
def pip_self_version_check(session, options):
# type: (PipSession, optparse.Values) -> None
"""Check for an update for pip.
Limit the frequency of checks to once per week. State is stored either in
the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix
of the pip script path.
"""
installed_version = get_installed_version("pip")
if not installed_version:
return
pip_version = packaging_version.parse(installed_version)
pypi_version = None
try:
state = SelfCheckState(cache_dir=options.cache_dir)
current_time = datetime.datetime.utcnow()
# Determine if we need to refresh the state
if "last_check" in state.state and "pypi_version" in state.state:
last_check = datetime.datetime.strptime(
state.state["last_check"], SELFCHECK_DATE_FMT
)
if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60:
pypi_version = state.state["pypi_version"]
# Refresh the version if we need to or just see if we need to warn
if pypi_version is None:
# Lets use PackageFinder to see what the latest pip version is
link_collector = make_link_collector(
session, options=options, suppress_no_index=True,
)
# Pass allow_yanked=False so we don't suggest upgrading to a
# yanked version.
selection_prefs = SelectionPreferences(
allow_yanked=False,
allow_all_prereleases=False, # Explicitly set to False
)
finder = PackageFinder.create(
link_collector=link_collector, selection_prefs=selection_prefs,
)
best_candidate = finder.find_best_candidate("pip").best_candidate
if best_candidate is None:
return
pypi_version = str(best_candidate.version)
# save that we've performed a check
state.save(pypi_version, current_time)
remote_version = packaging_version.parse(pypi_version)
local_version_is_older = (
pip_version < remote_version
and pip_version.base_version != remote_version.base_version
and was_installed_by_pip("pip")
)
# Determine if our pypi_version is older
if not local_version_is_older:
return
# We cannot tell how the current pip is available in the current
# command context, so be pragmatic here and suggest the command
# that's always available. This does not accommodate spaces in
# `sys.executable`.
pip_cmd = "{} -m pip".format(sys.executable)
logger.warning(
"You are using pip version %s; however, version %s is "
"available.\nYou should consider upgrading via the "
"'%s install --upgrade pip' command.",
pip_version,
pypi_version,
pip_cmd,
)
except Exception:
logger.debug(
"There was an error checking the latest version of pip", exc_info=True,
)
|
!function(){angular.module("angularScreenfull",[])}(),function(){"use strict";function e(e){function l(n,l,r,u){if(r.ngsfFullscreen&&""!==r.ngsfFullscreen){var s=e(r.ngsfFullscreen);s.assign(n,u)}}return{restrict:"A",require:"ngsfFullscreen",controller:n,link:l}}function n(e,n,l,r){function u(){var u=function(){r[o.isFullscreen()?"addClass":"removeClass"](l,"fullscreen"),e.$emit("fullscreenchange"),e.$apply()};n[0].addEventListener(screenfull.raw.fullscreenchange,u),e.$on("$destroy",function(){n[0].removeEventListener(screenfull.raw.fullscreenchange,u)})}function s(n){return e.$on("fullscreenchange",n)}function c(){return o.fullscreenEnabled()?(screenfull.request(l[0]),e.$emit("fullscreenEnabled"),!0):!1}function t(){o.fullscreenEnabled()&&o.isFullscreen()&&o.toggleFullscreen()}function i(){if(o.fullscreenEnabled()){var n=screenfull.isFullscreen;return screenfull.toggle(l[0]),e.$emit(n?"fullscreenDisabled":"fullscreenEnabled"),!0}return!1}function f(){return o.fullscreenEnabled()?screenfull.isFullscreen:!1}function a(){return"undefined"!=typeof screenfull?screenfull.enabled:!1}var o=this;o.onFullscreenChange=s,o.requestFullscreen=c,o.removeFullscreen=t,o.toggleFullscreen=i,o.isFullscreen=f,o.fullscreenEnabled=a,o.fullscreenEnabled()&&u()}angular.module("angularScreenfull").directive("ngsfFullscreen",e),e.$inject=["$parse"],n.$inject=["$scope","$document","$element","$animate"]}(),function(){"use strict";function e(e){function n(n,l,r,u){u.fullscreenEnabled()?e.removeClass(l,"ng-hide"):e.addClass(l,"ng-hide")}return{restrict:"A",require:"^ngsfFullscreen",link:n}}angular.module("angularScreenfull").directive("showIfFullscreenEnabled",e),e.$inject=["$animate"]}(),function(){"use strict";function e(e){function n(n,l,r,u){var s=function(){var n=u.isFullscreen();("false"===r.showIfFullscreen||r.showIfFullscreen===!1)&&(n=!n),n?e.removeClass(l,"ng-hide"):e.addClass(l,"ng-hide")};s();var c=u.onFullscreenChange(s);n.$on("$destroy",c)}return{restrict:"A",require:"^ngsfFullscreen",link:n}}angular.module("angularScreenfull").directive("showIfFullscreen",e),e.$inject=["$animate"]}(),function(){"use strict";function e(){function e(e,n,l,r){n.on("click",function(){r.toggleFullscreen()})}return{restrict:"A",require:"^ngsfFullscreen",link:e}}angular.module("angularScreenfull").directive("ngsfToggleFullscreen",e)}();
|
// All material copyright ESRI, All Rights Reserved, unless otherwise specified.
// See https://js.arcgis.com/4.3/esri/copyright.txt for details.
//>>built
define("require exports dojo/has ../lib/PerformanceTimer ../lib/Camera ../lib/Util ../lib/BitSet ../lib/gl-matrix ./Visualizer".split(" "),function(d,N,O,P,q,y,r,l,t){var n=l.vec3;d=l.vec4d;var v=l.mat4d,g=[0,0],w=v.create(),c=[d.create(),d.create(),d.create(),d.create(),d.create(),d.create()],u={get:function(a){return!0}};return function(){function a(b,f,a,c){this._content={};this._visibleContent=new r;this._frustumCullingEnabled=!0;this._maxFarNearRatio=2E4;this._stats={renderGeometriesTotal:0,
renderGeometriesVisible:0,visualizerRenderTimer:null,viewportRenderTimer:null};this._needsRender=!0;this._rctx=c;this._gl=c.gl;this._visualizer=new t(b,f,a,this._rctx);this._camera=new q(n.createFrom(0,100,-100),n.createFrom(0,0,0))}a.prototype.getCombinedStats=function(){var b={},f=this._visualizer.getCombinedStats(),a;for(a in f)b[a]=f[a];b.renderGeometriesTotal=this._stats.renderGeometriesTotal;b.renderGeometriesVisible=this._stats.renderGeometriesVisible;void 0!==this._gl.getUsedTextureMemory&&
(b.textureMemory=this._gl.getUsedTextureMemory());void 0!==this._gl.getUsedRenderbufferMemory&&(b.renderbufferMemory=this._gl.getUsedRenderbufferMemory());void 0!==this._gl.getUsedVBOMemory&&(b.VBOMemory=this._gl.getUsedVBOMemory());if(void 0!==this._gl.getUsedTextureMemoryStats){var f=this._gl.getUsedTextureMemoryStats(),c;for(c in f)b["texMem type: "+c]=f[c]}return b};a.prototype.dispose=function(){this._visualizer.dispose();this._visualizer=null};a.prototype.setLightingData=function(b){this._visualizer.setLightingData(b)};
a.prototype.getLightingData=function(){return this._visualizer.getLightingData()};a.prototype.getViewParams=function(b){var a=this._visualizer.getViewParams(b);if(!b||b.frustumCullingEnabled)a.frustumCullingEnabled=this._frustumCullingEnabled;if(!b||b.maxFarNearRatio)a.maxFarNearRatio=this._maxFarNearRatio;return a};a.prototype.setViewParams=function(b){void 0!==b.frustumCullingEnabled&&(this._frustumCullingEnabled=b.frustumCullingEnabled);void 0!==b.maxFarNearRatio&&(this._maxFarNearRatio=-1===b.maxFarNearRatio?
2E4:b.maxFarNearRatio);this._visualizer.setViewParams(b);this._needsRender=!0};a.prototype.setRenderParams=function(b){this._visualizer.setRenderParams(b)};a.prototype.getRenderParams=function(){return this._visualizer.getRenderParams()};a.prototype.getFrustumObjects=function(){var b={},a;for(a in this._content)this._visibleContent.get(this._content[a].idx)&&(b[this._content[a].name]=1);return b};a.prototype.modify=function(b,a,c,d){this._visualizer.modify(b,a,c,d);this._content=this._visualizer.getContent()};
a.prototype.getContent=function(){return this._content};a.prototype.setSelectionObject=function(b,a){this._visualizer.setSelectionObject(b,a)};a.prototype.setCamera=function(b){this._camera.copyFrom(b);this._computeVisibleContentAndUpdateNearFar();this._needsRender=!0};a.prototype.getCamera=function(){return this._camera};a.prototype.getPickRay=function(b,a,c){return this.pickRayWithBeginPoint(b,void 0,this._camera.viewMatrix,a,c)};a.prototype.pickRayWithBeginPoint=function(b,a,c,d,g){return this._visualizer.getPickRay(b,
a,this._camera,c,d,g)};a.prototype.addExternalRenderer=function(b,a){return this._visualizer.addExternalRenderer(b,a)};a.prototype.removeExternalRenderer=function(b){return this._visualizer.removeExternalRenderer(b)};a.prototype.getExternalRenderers=function(){return this._visualizer.getExternalRenderers()};a.prototype.render=function(b,a){var c=this._computeVisibleContentAndUpdateNearFar();this._visualizer.render(this._camera,b,c,a)};a.prototype.resetNeedsRender=function(){this._needsRender=!1;this._visualizer.resetNeedsRender()};
a.prototype.needsRender=function(){return this._needsRender||this._visualizer.needsRender()};a.prototype._computeVisibleContentAndUpdateNearFar=function(){return this._frustumCullingEnabled||0<this._maxFarNearRatio?(g[1]=0,this._computeFrustumCullingAndNearFar(this._camera.eye,this._visibleContent,g),0<this._maxFarNearRatio&&0<g[1]&&(this._camera.far=g[1],this._camera.near=Math.max(g[0],this._camera.far/this._maxFarNearRatio)),this._visibleContent):u};a.prototype._computeFrustumCullingAndNearFar=
function(a,f,d){v.perspective(this._camera.fovY,this._camera.aspect,1,10,w);y.matrix2frustumPlanes(this._camera.viewMatrix,w,c);f.clearAll();this._stats.renderGeometriesTotal=0;this._stats.renderGeometriesVisible=0;a=-Number.MAX_VALUE;var b=-Number.MAX_VALUE,g=c[0][0],l=c[0][1],n=c[0][2],q=c[0][3],r=c[1][0],t=c[1][1],u=c[1][2],z=c[1][3],A=c[2][0],B=c[2][1],C=c[2][2],D=c[2][3],E=c[3][0],F=c[3][1],G=c[3][2],H=c[3][3],I=c[4][0],J=c[4][1],K=c[4][2],L=c[4][3],M=c[5][3],x;for(x in this._content){var p=
this._content[x];this._stats.renderGeometriesTotal++;if(!p.material.isBackdrop){var e=p.center,h=e[0],k=e[1],m=e[2],e=p.bsRadius;if(g*h+l*k+n*m+q>e)continue;if(r*h+t*k+u*m+z>e)continue;if(A*h+B*k+C*m+D>e)continue;if(E*h+F*k+G*m+H>e)continue;k=I*h+J*k+K*m;h=k+e;e=-k+e;h>a&&(a=h);e>b&&(b=e)}f.set(p.idx);this._stats.renderGeometriesVisible++}f=a!==-Number.MAX_VALUE;0<this._stats.renderGeometriesVisible&&f&&(d[0]=.99*Math.max(1-(a+L),2),d[1]=1.01*Math.max(10+(b+M),d[0]+1))};return a}()});
|
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
# Copyright 2019 Eddie Antonio Santos <easantos@ualberta.ca>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
from enum import Enum
from typing import (Callable, Dict, List, Mapping, NamedTuple, Optional, Set,
Tuple)
from .data import Arc, StateID
from .data import Symbol as _Symbol
from .flags import (Clear, DisallowFeature, DisallowValue, FlagDiacritic,
Positive, RequireFeature, RequireValue, Unify)
from .symbol import (Epsilon, Grapheme, Identity, MultiCharacterSymbol, Symbol,
Unknown)
FLAG_PATTERN = re.compile(r'''
^@(?:
[UPNRDE][.]\w+[.]\w+ |
[RDC][.]\w+
)@$
''', re.VERBOSE)
class FSTParseError(Exception):
"""
Raise when something goes wrong parsing the FSTs.
"""
class SymbolTable:
"""
Keeps track of ALL of the symbols in an FST.
"""
def __init__(self) -> None:
# TODO: differentiate between input alphabet and output alphabet
# the union of input and output is sigma
self._symbols = {} # type: Dict[int, Symbol]
def __getitem__(self, idx: int):
return self._symbols[idx]
def add(self, symbol_id: int, symbol: Symbol) -> None:
"""
Add a symbol to the symbol table.
"""
if symbol_id in self._symbols:
raise FSTParseError(
'Duplicate symbols for index %d: old: %r; new: %r' % (
symbol_id, self[symbol_id], symbol
))
self._symbols[symbol_id] = symbol
@property
def sigma(self) -> Dict[int, Symbol]:
regular_symbol = (Grapheme, MultiCharacterSymbol, FlagDiacritic)
return {k: v for k, v in self._symbols.items()
if isinstance(v, regular_symbol)}
@property
def has_epsilon(self) -> bool:
return 0 in self._symbols
class FSTParse(NamedTuple('FSTParse', [('symbols', SymbolTable),
('arcs', Set[Arc]),
('intermediate_states', Set[StateID]),
('accepting_states', Set[StateID])])):
"""
The parsed data from an FST, in a nice neat pile.
"""
@property
def multichar_symbols(self) -> Dict[int, MultiCharacterSymbol]:
return {i: sym for i, sym in self.symbols.sigma.items()
if isinstance(sym, MultiCharacterSymbol)}
@property
def flag_diacritics(self) -> Dict[int, FlagDiacritic]:
return {i: sym for i, sym in self.symbols.sigma.items()
if isinstance(sym, FlagDiacritic)}
@property
def graphemes(self) -> Dict[int, Grapheme]:
return {i: sym for i, sym in self.symbols.sigma.items()
if isinstance(sym, Grapheme)}
@property
def sigma(self) -> Dict[int, Symbol]:
return self.symbols.sigma
@property
def states(self):
return self.intermediate_states | self.accepting_states
@property
def has_epsilon(self) -> bool:
return self.symbols.has_epsilon
class FomaParser:
"""
Parses a FOMA file, in plain-text.
"""
LineParser = Callable[[str], None]
def __init__(self, invert_labels: bool) -> None:
self.invert_labels = invert_labels
self.arcs = [] # type: List[Arc]
self.accepting_states = set() # type: Set[StateID]
self.implied_state = None # type: Optional[int]
self.handle_line = self.handle_header
self.has_seen_header = False
self.symbols = SymbolTable()
def handle_header(self, line: str):
# Nothing to do here... yet.
...
def handle_props(self, line: str):
"""
"""
if self.has_seen_header:
raise FSTParseError('Cannot handle multiple FSTs')
self.has_seen_header = True
# TODO: parse:
# - arity
# - arc_count
# - state_count
# - line_count
# - final_count
# - path_count
# - is_deterministic
# - is_pruned
# - is_minimized
# - is_epsilon_free
# - is_loop_free
# - is_completed
# - name
# Foma will technically accept anything until it sees '##sigma##'
# but we won't, as that is gross.
def handle_sigma(self, line: str):
"""
Adds a new entry to the symbol table.
"""
idx_str, _space, symbol_text = line.partition('\N{SPACE}')
idx = int(idx_str)
self.symbols.add(idx, parse_symbol(symbol_text))
def handle_states(self, line: str):
"""
Either:
- appends an arc to the list;
- adds an accepting state; or
- finds the sentinel value
"""
arc_def = tuple(int(num) for num in line.split())
num_items = len(arc_def)
if arc_def == (-1, -1, -1, -1, -1):
# Sentinel value: there are no more arcs to define.
return
if num_items == 2:
if self.implied_state is None:
raise ValueError('No implied state')
src = self.implied_state
# in/out, target (state num implied)
in_label, dest = arc_def
out_label = in_label
elif num_items == 3:
if self.implied_state is None:
raise ValueError('No implied state')
src = self.implied_state
# in, out, target (state num implied)
in_label, out_label, dest = arc_def
elif num_items == 4:
# FIXME: there's a bug here in my interpretation of the final parameter.
# state num, in/out, target, final state
src, in_label, dest, _weight = arc_def
out_label = in_label
# FIXME: this is a STATE WITHOUT TRANSITIONS
if in_label == -1 or dest == -1:
# This is an accepting state
self.accepting_states.add(StateID(src))
return
elif num_items == 5:
# FIXME: last is final_state, not weight
src, in_label, out_label, dest, _weight = arc_def
self.implied_state = src
# Super important! make sure the order of these arguments is
# consistent with the definition of Arc
upper_label, lower_label = self.symbols[in_label], self.symbols[out_label]
if self.invert_labels:
upper_label, lower_label = lower_label, upper_label
arc = Arc(StateID(src), upper_label, lower_label, StateID(dest))
self.arcs.append(arc)
def handle_end(self, line: str):
# Nothing to do here. Yet.
...
def parse_line(self, line: str):
# Find all the details here:
# https://github.com/mhulden/foma/blob/master/foma/io.c#L623-L821
# Check header
if line.startswith('##'):
header = line[2:-2]
self.handle_line = {
'foma-net 1.0': self.handle_header,
'props': self.handle_props,
'sigma': self.handle_sigma,
'states': self.handle_states,
'end': self.handle_end,
}[header]
else:
self.handle_line(line.rstrip('\n'))
def finalize(self) -> FSTParse:
# After parsing, we should be in the ##end## state.
assert self.handle_line == self.handle_end
states = {StateID(arc.state) for arc in self.arcs}
return FSTParse(symbols=self.symbols,
arcs=set(self.arcs),
intermediate_states=states,
accepting_states=self.accepting_states)
def parse_text(self, fst_text: str) -> FSTParse:
for line in fst_text.splitlines():
self.parse_line(line)
return self.finalize()
def parse_text(att_text: str, invert_labels: bool = False) -> FSTParse:
"""
Parse the text of a FOMA binary FST. The text is retrieved by gunzip'ing
the file.
FOMA text is very similar to an AT&T format FST.
"""
return FomaParser(invert_labels).parse_text(att_text)
def parse_symbol(symbol: str) -> Symbol:
if FLAG_PATTERN.match(symbol):
return parse_flag(symbol)
elif symbol == '@_EPSILON_SYMBOL_@':
return Epsilon
elif symbol == '@_UNKNOWN_SYMBOL_@':
return Unknown
elif symbol == '@_IDENTITY_SYMBOL_@':
return Identity
elif symbol.startswith('@') and symbol.endswith('@'):
raise NotImplementedError
elif len(symbol) > 1:
return MultiCharacterSymbol(symbol)
elif len(symbol) == 1:
return Grapheme(symbol)
raise NotImplementedError
def parse_flag(flag_diacritic: str) -> FlagDiacritic:
assert FLAG_PATTERN.match(flag_diacritic)
opcode, *arguments = flag_diacritic.strip('@').split('.')
if opcode == 'U' and len(arguments) == 2:
return Unify(*arguments)
elif opcode == 'P' and len(arguments) == 2:
return Positive(*arguments)
elif opcode == 'R' and len(arguments) == 2:
return RequireValue(*arguments)
elif opcode == 'R' and len(arguments) == 1:
return RequireFeature(*arguments)
elif opcode == 'D' and len(arguments) == 1:
return DisallowFeature(*arguments)
elif opcode == 'D' and len(arguments) == 2:
return DisallowValue(*arguments)
elif opcode == 'C' and len(arguments) == 1:
return Clear(arguments[0])
raise ValueError('Cannot parse ' + flag_diacritic)
|
//
// ToolKit.h
// MapView
//
// Created by imobile-xzy on 16/3/23.
//
//
#import <Foundation/Foundation.h>
@interface ToolKit : NSObject
+(BOOL)createFileDirectories:(NSString*)path;
@end
|
from . import ClientCaches
from . import ClientConstants as CC
from . import ClientDB
from . import ClientImportFileSeeds
from . import ClientImportOptions
from . import ClientMigration
from . import ClientServices
from . import ClientTags
import collections
import hashlib
from . import HydrusConstants as HC
from . import HydrusExceptions
from . import HydrusTagArchive
import os
import random
import shutil
import time
import unittest
from . import HydrusData
from . import HydrusGlobals as HG
from . import TestController
current_tag_pool = [ 'blonde hair', 'blue eyes', 'bodysuit', 'character:samus aran', 'series:metroid', 'studio:nintendo' ]
pending_tag_pool = [ 'favourites', 'kino', 'brown shirt', 'huge knees' ]
deleted_tag_pool = [ 'trash', 'ugly', 'character:smaus aran', 'red hair' ]
to_be_pended_tag_pool = [ 'clothing:high heels', 'firearm', 'puffy armpit' ]
current_parents_pool = []
current_parents_pool.append( ( 'character:princess peach', 'series:super mario bros' ) )
current_parents_pool.append( ( 'character:princess peach', 'gender:female' ) )
current_parents_pool.append( ( 'mario_(mario)', 'series:super mario bros' ) )
current_parents_pool.append( ( 'meta:explicit', 'nsfw' ) )
current_parents_pool.append( ( 'bepis', 'genidalia' ) )
current_parents_pool.append( ( 'bagina', 'genidalia' ) )
pending_parents_pool = []
pending_parents_pool.append( ( 'character:princess daisy', 'series:super mario bros' ) )
pending_parents_pool.append( ( 'character:princess daisy', 'gender:female' ) )
pending_parents_pool.append( ( 'mario_(mario)', 'series:super mario bros' ) )
pending_parents_pool.append( ( 'bepis', 'genidalia' ) )
pending_parents_pool.append( ( 'bagina', 'genidalia' ) )
to_be_pended_parents_pool = []
to_be_pended_parents_pool.append( ( 'pend:parent a', 'pend:parent b' ) )
to_be_pended_parents_pool.append( ( 'parent c', 'parent d' ) )
deleted_parents_pool = []
deleted_parents_pool.append( ( 'male', 'human' ) )
deleted_parents_pool.append( ( 'table', 'general:furniture' ) )
deleted_parents_pool.append( ( 'character:iron man', 'studio:dc' ) )
current_siblings_pool = []
current_siblings_pool.append( ( 'lara_croft', 'character:lara croft' ) )
current_siblings_pool.append( ( 'lara croft', 'character:lara croft' ) )
current_siblings_pool.append( ( 'series:tomb raider (series)', 'series:tomb raider' ) )
current_siblings_pool.append( ( 'general:lamp', 'lamp' ) )
current_siblings_pool.append( ( 'bog', 'bepis' ) )
current_siblings_pool.append( ( 'buggy', 'bagina' ) )
pending_siblings_pool = []
pending_siblings_pool.append( ( 'horse', 'species:horse' ) )
pending_siblings_pool.append( ( 'equine', 'species:equine' ) )
pending_siblings_pool.append( ( 'dog', 'species:dog' ) )
pending_siblings_pool.append( ( 'canine', 'species:canine' ) )
pending_siblings_pool.append( ( 'eguine', 'equine' ) )
to_be_pended_siblings_pool = []
to_be_pended_siblings_pool.append( ( 'pend:sibling a', 'pend:sibling b' ) )
to_be_pended_siblings_pool.append( ( 'sibling c', 'sibling d' ) )
deleted_siblings_pool = []
deleted_siblings_pool.append( ( 'male', 'male:male' ) )
deleted_siblings_pool.append( ( 'table', 'general:table' ) )
deleted_siblings_pool.append( ( 'shadow', 'character:shadow the hedgehog' ) )
pair_types_to_pools = {}
pair_types_to_pools[ HC.CONTENT_TYPE_TAG_PARENTS ] = ( current_parents_pool, pending_parents_pool, to_be_pended_parents_pool, deleted_parents_pool )
pair_types_to_pools[ HC.CONTENT_TYPE_TAG_SIBLINGS ] = ( current_siblings_pool, pending_siblings_pool, to_be_pended_siblings_pool, deleted_siblings_pool )
class TestMigration( unittest.TestCase ):
@classmethod
def _clear_db( cls ):
cls._delete_db()
# class variable
cls._db = ClientDB.DB( HG.test_controller, TestController.DB_DIR, 'client' )
@classmethod
def _delete_db( cls ):
cls._db.Shutdown()
while not cls._db.LoopIsFinished():
time.sleep( 0.1 )
db_filenames = list(cls._db._db_filenames.values())
for filename in db_filenames:
path = os.path.join( TestController.DB_DIR, filename )
os.remove( path )
del cls._db
@classmethod
def setUpClass( cls ):
cls._db = ClientDB.DB( HG.test_controller, TestController.DB_DIR, 'client' )
@classmethod
def tearDownClass( cls ):
cls._delete_db()
def pub( self, *args, **kwargs ): pass
def sub( self, *args, **kwargs ): pass
def Read( self, action, *args, **kwargs ): return TestMigration._db.Read( action, *args, **kwargs )
def WriteSynchronous( self, action, *args, **kwargs ): return TestMigration._db.Write( action, True, *args, **kwargs )
def _set_up_services( self ):
self._test_tag_repo_service_keys = {}
services = self.Read( 'services' )
for i in range( 20 ):
service_key = HydrusData.GenerateKey()
services.append( ClientServices.GenerateService( service_key, HC.TAG_REPOSITORY, 'test repo {}'.format( i ) ) )
self._test_tag_repo_service_keys[ i ] = service_key
self.WriteSynchronous( 'update_services', services )
self.services_manager = ClientCaches.ServicesManager( self )
def _do_fake_imports( self ):
self._md5_to_sha256 = {}
self._sha256_to_md5 = {}
self._sha256_to_sha1 = {}
self._my_files_sha256 = set()
self._hashes_to_current_tags = {}
self._hashes_to_pending_tags = {}
self._hashes_to_deleted_tags = {}
( size, mime, width, height, duration, num_frames, has_audio, num_words ) = ( 65535, HC.IMAGE_JPEG, 640, 480, None, None, False, None )
for i in range( 100 ):
hash = HydrusData.GenerateKey()
md5 = os.urandom( 16 )
sha1 = os.urandom( 20 )
sha512 = os.urandom( 64 )
self._md5_to_sha256[ md5 ] = hash
self._sha256_to_md5[ hash ] = md5
self._sha256_to_sha1[ hash ] = sha1
self._hashes_to_current_tags[ hash ] = set( random.sample( current_tag_pool, 3 ) )
self._hashes_to_pending_tags[ hash ] = set( random.sample( pending_tag_pool, 3 ) )
self._hashes_to_deleted_tags[ hash ] = set( random.sample( deleted_tag_pool, 3 ) )
if i < 50:
fake_file_import_job = ClientImportFileSeeds.FileImportJob( 'fake path' )
fake_file_import_job._hash = hash
fake_file_import_job._file_info = ( size, mime, width, height, duration, num_frames, has_audio, num_words )
fake_file_import_job._extra_hashes = ( md5, sha1, sha512 )
fake_file_import_job._phashes = [ os.urandom( 8 ) ]
fake_file_import_job._file_import_options = ClientImportOptions.FileImportOptions()
self.WriteSynchronous( 'import_file', fake_file_import_job )
self._my_files_sha256.add( hash )
def _add_mappings_to_services( self ):
content_updates = []
for ( hash, tags ) in self._hashes_to_current_tags.items():
for tag in tags:
content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( tag, ( hash, ) ) ) )
for ( hash, tags ) in self._hashes_to_deleted_tags.items():
for tag in tags:
content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ( tag, ( hash, ) ) ) )
service_keys_to_content_updates = { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY : content_updates }
self.WriteSynchronous( 'content_updates', service_keys_to_content_updates )
content_updates = []
for ( hash, tags ) in self._hashes_to_current_tags.items():
for tag in tags:
content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( tag, ( hash, ) ) ) )
for ( hash, tags ) in self._hashes_to_pending_tags.items():
for tag in tags:
content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_PEND, ( tag, ( hash, ) ) ) )
for ( hash, tags ) in self._hashes_to_deleted_tags.items():
for tag in tags:
content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ( tag, ( hash, ) ) ) )
service_keys_to_content_updates = { service_key : content_updates for service_key in self._test_tag_repo_service_keys.values() }
self.WriteSynchronous( 'content_updates', service_keys_to_content_updates )
def _test_mappings_list_to_list( self ):
data = list( self._hashes_to_current_tags.items() )
self.assertTrue( len( data ) > 0 )
source = ClientMigration.MigrationSourceList( self, data )
destination = ClientMigration.MigrationDestinationListMappings( self )
job = ClientMigration.MigrationJob( self, 'test', source, destination )
job.Run()
self.assertEqual( destination.GetDataReceived(), data )
def _test_mappings_hta_to_list( self ):
def run_test( source, expected_data ):
destination = ClientMigration.MigrationDestinationListMappings( self )
job = ClientMigration.MigrationJob( self, 'test', source, destination )
job.Run()
self.assertEqual( dict( destination.GetDataReceived() ), dict( expected_data ) )
md5_hta_path = os.path.join( TestController.DB_DIR, 'md5hta.db' )
sha256_hta_path = os.path.join( TestController.DB_DIR, 'sha256hta.db' )
md5_hta = HydrusTagArchive.HydrusTagArchive( md5_hta_path )
sha256_hta = HydrusTagArchive.HydrusTagArchive( sha256_hta_path )
md5_hta.SetHashType( HydrusTagArchive.HASH_TYPE_MD5 )
sha256_hta.SetHashType( HydrusTagArchive.HASH_TYPE_SHA256 )
md5_hta.BeginBigJob()
sha256_hta.BeginBigJob()
for ( hash, tags ) in self._hashes_to_current_tags.items():
md5 = self._sha256_to_md5[ hash ]
md5_hta.AddMappings( md5, tags )
sha256_hta.AddMappings( hash, tags )
md5_hta.CommitBigJob()
sha256_hta.CommitBigJob()
md5_hta.Optimise()
sha256_hta.Optimise()
md5_hta.Close()
sha256_hta.Close()
del md5_hta
del sha256_hta
#
# test file filter
tag_filter = ClientTags.TagFilter()
source = ClientMigration.MigrationSourceHTA( self, md5_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'md5', None, tag_filter )
expected_data = [ ( self._sha256_to_md5[ hash ], tags ) for ( hash, tags ) in self._hashes_to_current_tags.items() ]
run_test( source, expected_data )
source = ClientMigration.MigrationSourceHTA( self, sha256_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'sha256', None, tag_filter )
expected_data = list( self._hashes_to_current_tags.items() )
run_test( source, expected_data )
source = ClientMigration.MigrationSourceHTA( self, md5_hta_path, CC.LOCAL_FILE_SERVICE_KEY, 'md5', None, tag_filter )
expected_data = [ ( self._sha256_to_md5[ hash ], tags ) for ( hash, tags ) in self._hashes_to_current_tags.items() if hash in self._my_files_sha256 ]
run_test( source, expected_data )
source = ClientMigration.MigrationSourceHTA( self, sha256_hta_path, CC.LOCAL_FILE_SERVICE_KEY, 'sha256', None, tag_filter )
expected_data = [ ( hash, tags ) for ( hash, tags ) in self._hashes_to_current_tags.items() if hash in self._my_files_sha256 ]
run_test( source, expected_data )
# not all hashes, since hash type lookup only available for imported files
hashes = random.sample( self._my_files_sha256, 25 )
source = ClientMigration.MigrationSourceHTA( self, md5_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'md5', hashes, tag_filter )
expected_data = [ ( self._sha256_to_md5[ hash ], tags ) for ( hash, tags ) in self._hashes_to_current_tags.items() if hash in hashes ]
run_test( source, expected_data )
source = ClientMigration.MigrationSourceHTA( self, sha256_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'sha256', hashes, tag_filter )
expected_data = [ ( hash, tags ) for ( hash, tags ) in self._hashes_to_current_tags.items() if hash in hashes ]
run_test( source, expected_data )
# test desired hash type
# not all hashes, since hash type lookup only available for imported files
expected_data = [ ( self._sha256_to_sha1[ hash ], tags ) for ( hash, tags ) in self._hashes_to_current_tags.items() if hash in self._my_files_sha256 ]
source = ClientMigration.MigrationSourceHTA( self, md5_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'sha1', None, tag_filter )
run_test( source, expected_data )
source = ClientMigration.MigrationSourceHTA( self, sha256_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'sha1', None, tag_filter )
run_test( source, expected_data )
# do a test with specific hashes, so md5->sha1 does interim sha256 conversion
# not all hashes, since hash type lookup only available for imported files
hashes = random.sample( self._my_files_sha256, 25 )
expected_data = [ ( self._sha256_to_sha1[ hash ], tags ) for ( hash, tags ) in self._hashes_to_current_tags.items() if hash in hashes ]
source = ClientMigration.MigrationSourceHTA( self, md5_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'sha1', hashes, tag_filter )
run_test( source, expected_data )
# tag filter
tag_filter = ClientTags.TagFilter()
tag_filter.SetRule( '', CC.FILTER_WHITELIST )
tag_filter.SetRule( ':', CC.FILTER_BLACKLIST )
source = ClientMigration.MigrationSourceHTA( self, md5_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'md5', None, tag_filter )
expected_data = [ ( self._sha256_to_md5[ hash ], tag_filter.Filter( tags ) ) for ( hash, tags ) in self._hashes_to_current_tags.items() ]
expected_data = [ ( hash, tags ) for ( hash, tags ) in expected_data if len( tags ) > 0 ]
run_test( source, expected_data )
source = ClientMigration.MigrationSourceHTA( self, sha256_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'sha256', None, tag_filter )
expected_data = [ ( hash, tag_filter.Filter( tags ) ) for ( hash, tags ) in self._hashes_to_current_tags.items() ]
expected_data = [ ( hash, tags ) for ( hash, tags ) in expected_data if len( tags ) > 0 ]
run_test( source, expected_data )
#
os.remove( md5_hta_path )
os.remove( sha256_hta_path )
def _test_mappings_list_to_hta( self ):
def run_test( source, destination_path, desired_hash_type, expected_data ):
destination = ClientMigration.MigrationDestinationHTA( self, destination_path, desired_hash_type )
job = ClientMigration.MigrationJob( self, 'test', source, destination )
job.Run()
hta = HydrusTagArchive.HydrusTagArchive( destination_path )
result = list( hta.IterateMappings() )
self.assertEqual( dict( result ), dict( expected_data ) )
hta.Close()
md5_hta_path = os.path.join( TestController.DB_DIR, 'md5hta.db' )
sha256_hta_path = os.path.join( TestController.DB_DIR, 'sha256hta.db' )
#
md5_data = [ ( self._sha256_to_md5[ hash ], tags ) for ( hash, tags ) in self._hashes_to_current_tags.items() ]
sha256_data = list( self._hashes_to_current_tags.items() )
md5_source = ClientMigration.MigrationSourceList( self, md5_data )
sha256_source = ClientMigration.MigrationSourceList( self, sha256_data )
run_test( md5_source, md5_hta_path, 'md5', md5_data )
run_test( sha256_source, sha256_hta_path, 'sha256', sha256_data )
#
os.remove( md5_hta_path )
os.remove( sha256_hta_path )
def _test_mappings_service_to_list( self ):
def run_test( source, expected_data ):
destination = ClientMigration.MigrationDestinationListMappings( self )
job = ClientMigration.MigrationJob( self, 'test', source, destination )
job.Run()
self.assertEqual( dict( destination.GetDataReceived() ), dict( expected_data ) )
# test file filter
tag_repo_service_key = self._test_tag_repo_service_keys[0]
tag_filter = ClientTags.TagFilter()
source = ClientMigration.MigrationSourceTagServiceMappings( self, CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, CC.COMBINED_FILE_SERVICE_KEY, 'sha256', None, tag_filter, ( HC.CONTENT_STATUS_CURRENT, ) )
expected_data = list( self._hashes_to_current_tags.items() )
run_test( source, expected_data )
source = ClientMigration.MigrationSourceTagServiceMappings( self, tag_repo_service_key, CC.COMBINED_FILE_SERVICE_KEY, 'sha256', None, tag_filter, ( HC.CONTENT_STATUS_CURRENT, ) )
expected_data = list( self._hashes_to_current_tags.items() )
run_test( source, expected_data )
source = ClientMigration.MigrationSourceTagServiceMappings( self, CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, CC.LOCAL_FILE_SERVICE_KEY, 'sha256', None, tag_filter, ( HC.CONTENT_STATUS_CURRENT, ) )
expected_data = [ ( hash, tags ) for ( hash, tags ) in self._hashes_to_current_tags.items() if hash in self._my_files_sha256 ]
run_test( source, expected_data )
source = ClientMigration.MigrationSourceTagServiceMappings( self, tag_repo_service_key, CC.LOCAL_FILE_SERVICE_KEY, 'sha256', None, tag_filter, ( HC.CONTENT_STATUS_CURRENT, ) )
expected_data = [ ( hash, tags ) for ( hash, tags ) in self._hashes_to_current_tags.items() if hash in self._my_files_sha256 ]
run_test( source, expected_data )
# not all hashes, since hash type lookup only available for imported files
hashes = random.sample( self._my_files_sha256, 25 )
source = ClientMigration.MigrationSourceTagServiceMappings( self, CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, CC.COMBINED_FILE_SERVICE_KEY, 'sha256', hashes, tag_filter, ( HC.CONTENT_STATUS_CURRENT, ) )
expected_data = [ ( hash, tags ) for ( hash, tags ) in self._hashes_to_current_tags.items() if hash in hashes ]
run_test( source, expected_data )
source = ClientMigration.MigrationSourceTagServiceMappings( self, tag_repo_service_key, CC.COMBINED_FILE_SERVICE_KEY, 'sha256', hashes, tag_filter, ( HC.CONTENT_STATUS_CURRENT, ) )
expected_data = [ ( hash, tags ) for ( hash, tags ) in self._hashes_to_current_tags.items() if hash in hashes ]
run_test( source, expected_data )
# test desired hash type
# not all hashes, since hash type lookup only available for imported files
expected_data = [ ( self._sha256_to_sha1[ hash ], tags ) for ( hash, tags ) in self._hashes_to_current_tags.items() if hash in self._my_files_sha256 ]
source = ClientMigration.MigrationSourceTagServiceMappings( self, CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, CC.COMBINED_FILE_SERVICE_KEY, 'sha1', None, tag_filter, ( HC.CONTENT_STATUS_CURRENT, ) )
run_test( source, expected_data )
source = ClientMigration.MigrationSourceTagServiceMappings( self, tag_repo_service_key, CC.COMBINED_FILE_SERVICE_KEY, 'sha1', None, tag_filter, ( HC.CONTENT_STATUS_CURRENT, ) )
run_test( source, expected_data )
# tag filter
tag_filter = ClientTags.TagFilter()
tag_filter.SetRule( '', CC.FILTER_WHITELIST )
tag_filter.SetRule( ':', CC.FILTER_BLACKLIST )
source = ClientMigration.MigrationSourceTagServiceMappings( self, CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, CC.COMBINED_FILE_SERVICE_KEY, 'sha256', None, tag_filter, ( HC.CONTENT_STATUS_CURRENT, ) )
expected_data = [ ( hash, tag_filter.Filter( tags ) ) for ( hash, tags ) in self._hashes_to_current_tags.items() ]
expected_data = [ ( hash, tags ) for ( hash, tags ) in expected_data if len( tags ) > 0 ]
run_test( source, expected_data )
source = ClientMigration.MigrationSourceTagServiceMappings( self, tag_repo_service_key, CC.COMBINED_FILE_SERVICE_KEY, 'sha256', None, tag_filter, ( HC.CONTENT_STATUS_CURRENT, ) )
expected_data = [ ( hash, tag_filter.Filter( tags ) ) for ( hash, tags ) in self._hashes_to_current_tags.items() ]
expected_data = [ ( hash, tags ) for ( hash, tags ) in expected_data if len( tags ) > 0 ]
run_test( source, expected_data )
# test statuses
tag_filter = ClientTags.TagFilter()
source = ClientMigration.MigrationSourceTagServiceMappings( self, CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, CC.COMBINED_FILE_SERVICE_KEY, 'sha256', None, tag_filter, ( HC.CONTENT_STATUS_DELETED, ) )
expected_data = list( self._hashes_to_deleted_tags.items() )
run_test( source, expected_data )
source = ClientMigration.MigrationSourceTagServiceMappings( self, tag_repo_service_key, CC.COMBINED_FILE_SERVICE_KEY, 'sha256', None, tag_filter, ( HC.CONTENT_STATUS_DELETED, ) )
expected_data = list( self._hashes_to_deleted_tags.items() )
run_test( source, expected_data )
source = ClientMigration.MigrationSourceTagServiceMappings( self, tag_repo_service_key, CC.COMBINED_FILE_SERVICE_KEY, 'sha256', None, tag_filter, ( HC.CONTENT_STATUS_CURRENT, HC.CONTENT_STATUS_PENDING ) )
expected_data = collections.defaultdict( set )
for ( hash, tags ) in self._hashes_to_current_tags.items():
expected_data[ hash ].update( tags )
for ( hash, tags ) in self._hashes_to_pending_tags.items():
expected_data[ hash ].update( tags )
expected_data = list( expected_data.items() )
run_test( source, expected_data )
def _test_mappings_list_to_service( self ):
def run_test( source, tag_service_key, content_action, expected_data ):
destination = ClientMigration.MigrationDestinationTagServiceMappings( self, tag_service_key, content_action )
job = ClientMigration.MigrationJob( self, 'test', source, destination )
job.Run()
self._db._weakref_media_result_cache = ClientCaches.MediaResultCache()
hashes_to_media_results = { media_result.GetHash() : media_result for media_result in self.Read( 'media_results', list( self._hashes_to_current_tags.keys() ) ) }
for ( hash, tags ) in expected_data:
media_result = hashes_to_media_results[ hash ]
t_m = media_result.GetTagsManager()
if content_action == HC.CONTENT_UPDATE_ADD:
current_tags = t_m.GetCurrent( tag_service_key, ClientTags.TAG_DISPLAY_STORAGE )
for tag in tags:
self.assertIn( tag, current_tags )
elif content_action == HC.CONTENT_UPDATE_DELETE:
current_tags = t_m.GetCurrent( tag_service_key, ClientTags.TAG_DISPLAY_STORAGE )
deleted_tags = t_m.GetDeleted( tag_service_key, ClientTags.TAG_DISPLAY_STORAGE )
for tag in tags:
self.assertNotIn( tag, current_tags )
self.assertIn( tag, deleted_tags )
elif content_action == HC.CONTENT_UPDATE_CLEAR_DELETE_RECORD:
deleted_tags = t_m.GetDeleted( tag_service_key, ClientTags.TAG_DISPLAY_STORAGE )
for tag in tags:
self.assertNotIn( tag, deleted_tags )
elif content_action == HC.CONTENT_UPDATE_PEND:
pending_tags = t_m.GetPending( tag_service_key, ClientTags.TAG_DISPLAY_STORAGE )
for tag in tags:
self.assertIn( tag, pending_tags )
elif content_action == HC.CONTENT_UPDATE_PETITION:
petitioned_tags = t_m.GetPetitioned( tag_service_key, ClientTags.TAG_DISPLAY_STORAGE )
for tag in tags:
self.assertIn( tag, petitioned_tags )
#
# local add
data = [ ( hash, set( random.sample( to_be_pended_tag_pool, 2 ) ) ) for hash in self._hashes_to_current_tags.keys() ]
source = ClientMigration.MigrationSourceList( self, data )
run_test( source, CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, HC.CONTENT_UPDATE_ADD, data )
# local delete
data = [ ( hash, set( random.sample( tags, 2 ) ) ) for ( hash, tags ) in self._hashes_to_current_tags.items() ]
source = ClientMigration.MigrationSourceList( self, data )
run_test( source, CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, HC.CONTENT_UPDATE_DELETE, data )
# local clear deletion record
data = [ ( hash, set( random.sample( tags, 2 ) ) ) for ( hash, tags ) in self._hashes_to_deleted_tags.items() ]
source = ClientMigration.MigrationSourceList( self, data )
run_test( source, CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, HC.CONTENT_UPDATE_CLEAR_DELETE_RECORD, data )
# tag repo pend
data = [ ( hash, set( random.sample( to_be_pended_tag_pool, 2 ) ) ) for hash in self._hashes_to_current_tags.keys() ]
source = ClientMigration.MigrationSourceList( self, data )
run_test( source, self._test_tag_repo_service_keys[1], HC.CONTENT_UPDATE_PEND, data )
# tag repo petition
data = [ ( hash, set( random.sample( tags, 2 ) ) ) for ( hash, tags ) in self._hashes_to_current_tags.items() ]
source = ClientMigration.MigrationSourceList( self, data )
run_test( source, self._test_tag_repo_service_keys[1], HC.CONTENT_UPDATE_PETITION, data )
def _add_pairs_to_services( self, content_type ):
( current, pending, to_be_pended, deleted ) = pair_types_to_pools[ content_type ]
content_updates = []
for pair in current:
content_updates.append( HydrusData.ContentUpdate( content_type, HC.CONTENT_UPDATE_ADD, pair ) )
for pair in deleted:
content_updates.append( HydrusData.ContentUpdate( content_type, HC.CONTENT_UPDATE_DELETE, pair ) )
service_keys_to_content_updates = { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY : content_updates }
self.WriteSynchronous( 'content_updates', service_keys_to_content_updates )
content_updates = []
for pair in current:
content_updates.append( HydrusData.ContentUpdate( content_type, HC.CONTENT_UPDATE_ADD, pair ) )
for pair in pending:
content_updates.append( HydrusData.ContentUpdate( content_type, HC.CONTENT_UPDATE_PEND, pair ) )
for pair in deleted:
content_updates.append( HydrusData.ContentUpdate( content_type, HC.CONTENT_UPDATE_DELETE, pair ) )
service_keys_to_content_updates = { service_key : content_updates for service_key in self._test_tag_repo_service_keys.values() }
self.WriteSynchronous( 'content_updates', service_keys_to_content_updates )
def _test_pairs_list_to_list( self, content_type ):
( current, pending, to_be_pended, deleted ) = pair_types_to_pools[ content_type ]
data = list( current )
self.assertTrue( len( data ) > 0 )
source = ClientMigration.MigrationSourceList( self, data )
destination = ClientMigration.MigrationDestinationListPairs( self )
job = ClientMigration.MigrationJob( self, 'test', source, destination )
job.Run()
self.assertEqual( destination.GetDataReceived(), data )
def _test_pairs_htpa_to_list( self, content_type ):
def run_test( source, expected_data ):
destination = ClientMigration.MigrationDestinationListPairs( self )
job = ClientMigration.MigrationJob( self, 'test', source, destination )
job.Run()
self.assertEqual( set( destination.GetDataReceived() ), set( expected_data ) )
( current, pending, to_be_pended, deleted ) = pair_types_to_pools[ content_type ]
htpa_path = os.path.join( TestController.DB_DIR, 'htpa.db' )
htpa = HydrusTagArchive.HydrusTagPairArchive( htpa_path )
if content_type == HC.CONTENT_TYPE_TAG_PARENTS:
htpa.SetPairType( HydrusTagArchive.TAG_PAIR_TYPE_PARENTS )
elif content_type == HC.CONTENT_TYPE_TAG_SIBLINGS:
htpa.SetPairType( HydrusTagArchive.TAG_PAIR_TYPE_SIBLINGS )
htpa.BeginBigJob()
htpa.AddPairs( current )
htpa.CommitBigJob()
htpa.Optimise()
htpa.Close()
del htpa
#
# test tag filter, left, right, both
free_filter = ClientTags.TagFilter()
namespace_filter = ClientTags.TagFilter()
namespace_filter.SetRule( ':', CC.FILTER_WHITELIST )
namespace_filter.SetRule( '', CC.FILTER_BLACKLIST )
test_filters = []
test_filters.append( ( free_filter, free_filter ) )
test_filters.append( ( namespace_filter, free_filter ) )
test_filters.append( ( free_filter, namespace_filter ) )
test_filters.append( ( namespace_filter, namespace_filter ) )
for ( left_tag_filter, right_tag_filter ) in test_filters:
source = ClientMigration.MigrationSourceHTPA( self, htpa_path, left_tag_filter, right_tag_filter )
expected_data = [ ( left_tag, right_tag ) for ( left_tag, right_tag ) in current if left_tag_filter.TagOK( left_tag ) and right_tag_filter.TagOK( right_tag ) ]
run_test( source, expected_data )
#
os.remove( htpa_path )
def _test_pairs_list_to_htpa( self, content_type ):
def run_test( source, destination_path, content_type, expected_data ):
destination = ClientMigration.MigrationDestinationHTPA( self, destination_path, content_type )
job = ClientMigration.MigrationJob( self, 'test', source, destination )
job.Run()
hta = HydrusTagArchive.HydrusTagPairArchive( destination_path )
result = list( hta.IteratePairs() )
self.assertEqual( set( result ), set( expected_data ) )
hta.Close()
( current, pending, to_be_pended, deleted ) = pair_types_to_pools[ content_type ]
htpa_path = os.path.join( TestController.DB_DIR, 'htpa.db' )
#
source = ClientMigration.MigrationSourceList( self, current )
run_test( source, htpa_path, content_type, list( current ) )
#
os.remove( htpa_path )
def _test_pairs_service_to_list( self, content_type ):
def run_test( source, expected_data ):
destination = ClientMigration.MigrationDestinationListPairs( self )
job = ClientMigration.MigrationJob( self, 'test', source, destination )
job.Run()
self.assertEqual( set( destination.GetDataReceived() ), set( expected_data ) )
( current, pending, to_be_pended, deleted ) = pair_types_to_pools[ content_type ]
# test filters and content statuses
tag_repo_service_key = self._test_tag_repo_service_keys[10]
content_source_tests = []
content_source_tests.append( ( CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, ( current, ), ( HC.CONTENT_STATUS_CURRENT, ) ) )
content_source_tests.append( ( CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, ( deleted, ), ( HC.CONTENT_STATUS_DELETED, ) ) )
content_source_tests.append( ( tag_repo_service_key, ( current, ), ( HC.CONTENT_STATUS_CURRENT, ) ) )
content_source_tests.append( ( tag_repo_service_key, ( current, pending ), ( HC.CONTENT_STATUS_CURRENT, HC.CONTENT_STATUS_PENDING ) ) )
content_source_tests.append( ( tag_repo_service_key, ( deleted, ), ( HC.CONTENT_STATUS_DELETED, ) ) )
free_filter = ClientTags.TagFilter()
namespace_filter = ClientTags.TagFilter()
namespace_filter.SetRule( ':', CC.FILTER_WHITELIST )
namespace_filter.SetRule( '', CC.FILTER_BLACKLIST )
test_filters = []
test_filters.append( ( free_filter, free_filter ) )
test_filters.append( ( namespace_filter, free_filter ) )
test_filters.append( ( free_filter, namespace_filter ) )
test_filters.append( ( namespace_filter, namespace_filter ) )
for ( left_tag_filter, right_tag_filter ) in test_filters:
for ( service_key, content_lists, content_statuses ) in content_source_tests:
source = ClientMigration.MigrationSourceTagServicePairs( self, service_key, content_type, left_tag_filter, right_tag_filter, content_statuses )
expected_data = set()
for content_list in content_lists:
expected_data.update( ( ( left_tag, right_tag ) for ( left_tag, right_tag ) in content_list if left_tag_filter.TagOK( left_tag ) and right_tag_filter.TagOK( right_tag ) ) )
run_test( source, expected_data )
def _test_pairs_list_to_service( self, content_type ):
def run_test( source, tag_service_key, content_action, expected_data ):
destination = ClientMigration.MigrationDestinationTagServicePairs( self, tag_service_key, content_action, content_type )
job = ClientMigration.MigrationJob( self, 'test', source, destination )
job.Run()
if content_type == HC.CONTENT_TYPE_TAG_PARENTS:
statuses_to_pairs = self.Read( 'tag_parents', tag_service_key )
elif content_type == HC.CONTENT_TYPE_TAG_SIBLINGS:
statuses_to_pairs = self.Read( 'tag_siblings', tag_service_key )
if content_action == HC.CONTENT_UPDATE_ADD:
should_be_in = set( statuses_to_pairs[ HC.CONTENT_STATUS_CURRENT ] )
should_not_be_in = set( statuses_to_pairs[ HC.CONTENT_STATUS_DELETED ] )
elif content_action == HC.CONTENT_UPDATE_DELETE:
should_be_in = set( statuses_to_pairs[ HC.CONTENT_STATUS_DELETED ] )
should_not_be_in = set( statuses_to_pairs[ HC.CONTENT_STATUS_CURRENT ] )
elif content_action == HC.CONTENT_UPDATE_PEND:
should_be_in = set( statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ] )
should_not_be_in = set()
elif content_action == HC.CONTENT_UPDATE_PETITION:
should_be_in = set( statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ] )
should_not_be_in = set()
for pair in expected_data:
self.assertIn( pair, should_be_in )
self.assertNotIn( pair, should_not_be_in )
#
tag_repo_service_key = self._test_tag_repo_service_keys[11]
( current, pending, to_be_pended, deleted ) = pair_types_to_pools[ content_type ]
test_rows = []
test_rows.append( ( CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, to_be_pended, HC.CONTENT_UPDATE_ADD ) )
test_rows.append( ( CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, random.sample( current, 3 ), HC.CONTENT_UPDATE_DELETE ) )
test_rows.append( ( tag_repo_service_key, to_be_pended, HC.CONTENT_UPDATE_PEND ) )
test_rows.append( ( tag_repo_service_key, random.sample( current, 3 ), HC.CONTENT_UPDATE_PETITION ) )
for ( service_key, data, action ) in test_rows:
source = ClientMigration.MigrationSourceList( self, data )
run_test( source, service_key, action, data )
def test_migration( self ):
# mappings
self._set_up_services()
self._do_fake_imports()
self._add_mappings_to_services()
self._test_mappings_list_to_list()
self._test_mappings_hta_to_list()
self._test_mappings_list_to_hta()
self._test_mappings_service_to_list()
self._test_mappings_list_to_service()
for content_type in ( HC.CONTENT_TYPE_TAG_PARENTS, HC.CONTENT_TYPE_TAG_SIBLINGS ):
self._add_pairs_to_services( content_type )
self._test_pairs_list_to_list( content_type )
self._test_pairs_htpa_to_list( content_type )
self._test_pairs_list_to_htpa( content_type )
self._test_pairs_service_to_list( content_type )
self._test_pairs_list_to_service( content_type )
|
import pyOcean_cpu as ocean
def check(reference, size, strides, elemsize=1) :
overlap = ocean.checkSelfOverlap(size, strides, elemsize)
if (overlap == reference) :
s = ''
else :
if (reference and not overlap) :
s = '*** Incorrect -- false negative ***'
else :
s = '*** Incorrect ***'
print("%-5s %-5s %s" % (reference, overlap, s))
check(True, [3,2,2,2], [1,2,6,12])
check(True, [3,2], [1,2])
check(False, [4,3], [4,5])
check(True, [4,3], [4,5], 2)
|
"""ASCII-ART 2D pretty-printer"""
from .pretty import pprint, pprint_use_unicode, pretty, pretty_print
|
"""
Copyright (c) 2017, 2019 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
from __future__ import unicode_literals, absolute_import
import koji
from atomic_reactor.core import DockerTasker
from atomic_reactor.plugin import PreBuildPluginsRunner, PluginFailedException
from atomic_reactor.plugins.pre_inject_parent_image import InjectParentImage
from atomic_reactor.plugins.exit_remove_built_image import GarbageCollectionPlugin
from atomic_reactor.plugins.pre_reactor_config import (ReactorConfigPlugin,
WORKSPACE_CONF_KEY,
ReactorConfig)
from atomic_reactor.util import ImageName
from flexmock import flexmock
from tests.constants import MOCK
from osbs.utils import graceful_chain_del
import copy
import pytest
if MOCK:
from tests.docker_mock import mock_docker
KOJI_HUB = 'http://koji.com/hub'
KOJI_BUILD_ID = 123456789
KOJI_BUILD_NVR = 'base-image-1.0-99'
KOJI_BUILD_INFO = {'nvr': KOJI_BUILD_NVR, 'id': KOJI_BUILD_ID}
ARCHIVES = [
{'id': 1},
{'id': 2, 'extra': {}},
{'id': 3, 'extra': {
'docker': {
'repositories': [
'spam.com/fedora:27-3',
'spam.com/fedora@sha256:'
'07cc0fb792aad1b1891354d6a21086038d486e5a05eb76dbe4f8648f0767c53e'
],
}
}},
]
USE_DEFAULT_ARCHIVES = object()
USE_DEFAULT_KOJI_BUILD_INFO = object()
class MockInsideBuilder(object):
def __init__(self):
self.tasker = DockerTasker()
self.base_image = ImageName(repo='fedora', tag='26')
self.original_base_image = ImageName(repo='fedora', tag='26')
self.base_from_scratch = False
self.custom_base_image = False
self.image_id = 'image_id'
self.image = 'image'
self.df_path = 'df_path'
self.df_dir = 'df_dir'
@property
def source(self):
result = flexmock()
setattr(result, 'dockerfile_path', '/')
setattr(result, 'path', '/tmp')
return result
def set_base_image(self, base_image):
self.base_image = ImageName.parse(base_image)
@pytest.fixture()
def workflow(workflow):
if MOCK:
mock_docker()
workflow.builder = MockInsideBuilder()
setattr(workflow.builder, 'base_image_inspect', {})
return workflow
def koji_session(koji_build_id=KOJI_BUILD_ID, koji_build_info=USE_DEFAULT_KOJI_BUILD_INFO,
archives=USE_DEFAULT_ARCHIVES):
if archives == USE_DEFAULT_ARCHIVES:
archives = copy.deepcopy(ARCHIVES)
if koji_build_info == USE_DEFAULT_KOJI_BUILD_INFO:
koji_build_info = copy.deepcopy(KOJI_BUILD_INFO)
session = flexmock()
def mock_get_build(requested_build_id):
if str(requested_build_id) == str(koji_build_id):
return koji_build_info
return None
flexmock(session).should_receive('getBuild').replace_with(mock_get_build)
# Aways expect build ID to be used, even when NVR is given.
flexmock(session).should_receive('listArchives').with_args(KOJI_BUILD_ID).and_return(archives)
flexmock(koji).should_receive('ClientSession').and_return(session)
flexmock(session).should_receive('krb_login').and_return(True)
return session
class TestKojiParent(object):
@pytest.mark.parametrize('base_from_scratch', [True, False]) # noqa
@pytest.mark.parametrize('custom_base_image', [True, False])
def test_parent_image_injected(self, caplog, workflow, reactor_config_map,
base_from_scratch, custom_base_image):
koji_session()
previous_parent_image = workflow.builder.base_image
workflow.builder.base_from_scratch = base_from_scratch
workflow.builder.custom_base_image = custom_base_image
self.run_plugin_with_args(workflow, reactor_config_map=reactor_config_map,
base_from_scratch=base_from_scratch,
custom_base_image=custom_base_image)
if base_from_scratch:
assert str(previous_parent_image) == str(workflow.builder.base_image)
log_msg = "from scratch can't inject parent image"
assert log_msg in caplog.text
elif custom_base_image:
assert str(previous_parent_image) == str(workflow.builder.base_image)
log_msg = "custom base image builds can't inject parent image"
assert log_msg in caplog.text
else:
assert str(previous_parent_image) != str(workflow.builder.base_image)
@pytest.mark.parametrize('koji_build', (KOJI_BUILD_ID, KOJI_BUILD_NVR, str(KOJI_BUILD_ID)))
def test_koji_build_identifier(self, workflow, koji_build, reactor_config_map):
koji_session(koji_build_id=koji_build)
self.run_plugin_with_args(workflow, plugin_args={'koji_parent_build': koji_build},
reactor_config_map=reactor_config_map)
def test_unknown_koji_build(self, workflow, reactor_config_map): # noqa
koji_session()
unknown_build = KOJI_BUILD_ID + 1
with pytest.raises(PluginFailedException) as exc_info:
self.run_plugin_with_args(workflow, plugin_args={'koji_parent_build': unknown_build},
reactor_config_map=reactor_config_map)
assert '{}, not found'.format(unknown_build) in str(exc_info.value)
@pytest.mark.parametrize(('repositories', 'selected'), (
([':26-3', '@sha256:12345'], '@sha256:12345'),
([':26-3', ':26-spam'], ':26-3'),
))
def test_repository_from_koji_build(self, workflow, repositories, selected,
reactor_config_map):
# Populate archives to ensure koji build takes precedence
archives = [
{'id': 1, 'extra': {'docker': {'repositories': [
'spam.com/notselected/fedora{}'.format(repo) for repo in repositories
]}}}
]
repo_template = 'spam.com/fedora{}'
koji_build_info = copy.deepcopy(KOJI_BUILD_INFO)
koji_build_info['extra'] = {'image': {'index': {'pull': [
repo_template.format(repo) for repo in repositories
]}}}
koji_session(archives=archives, koji_build_info=koji_build_info)
workflow.builder.base_image = ImageName.parse('spam.com/fedora:some_tag')
self.run_plugin_with_args(workflow, reactor_config_map=reactor_config_map)
assert str(workflow.builder.base_image) == repo_template.format(selected)
@pytest.mark.parametrize('organization', [None, 'my_organization']) # noqa
@pytest.mark.parametrize('archive_registry', ['spam.com', 'old_registry.com'])
@pytest.mark.parametrize(('repositories', 'selected'), (
([':26-3', '@sha256:12345'], '@sha256:12345'),
([':26-3', ':26-spam'], ':26-3'),
))
def test_repository_selection(self, workflow, organization, archive_registry,
repositories, selected, reactor_config_map):
archive_repo_template = archive_registry + '/fedora{}'
archives = [
{'id': 1, 'extra': {'docker': {'repositories': [
archive_repo_template.format(repo) for repo in repositories
]}}}
]
enclosed_repo_template = 'spam.com/{}/fedora{}'
repo_template = 'spam.com/fedora{}'
koji_session(archives=archives)
workflow.builder.base_image = ImageName.parse('spam.com/fedora:some_tag')
self.run_plugin_with_args(workflow, reactor_config_map=reactor_config_map,
organization=organization)
if organization and reactor_config_map:
selected_repo = enclosed_repo_template.format(organization, selected)
else:
selected_repo = repo_template.format(selected)
assert str(workflow.builder.base_image) == selected_repo
@pytest.mark.parametrize(('repository', 'is_valid'), (
('fedora', True),
('rawhide/fedora', False),
('centos', False),
))
def test_new_parent_image_validation(self, workflow, repository, is_valid,
reactor_config_map):
archives = [
{'id': 1, 'extra': {'docker': {'repositories': [
'spam.com/{}@sha256:12345'.format(repository),
]}}}
]
koji_session(archives=archives)
if is_valid:
self.run_plugin_with_args(workflow, reactor_config_map=reactor_config_map)
else:
with pytest.raises(PluginFailedException) as exc_info:
self.run_plugin_with_args(workflow, reactor_config_map=reactor_config_map)
assert 'differs from repository for existing parent image' in str(exc_info.value)
def test_koji_ssl_certs_used(self, tmpdir, workflow, reactor_config_map): # noqa
session = koji_session()
serverca = tmpdir.join('serverca')
serverca.write('spam')
expected_ssl_login_args = {
'cert': str(tmpdir.join('cert')),
'serverca': str(serverca),
'ca': None,
}
(flexmock(session)
.should_receive('ssl_login')
.with_args(**expected_ssl_login_args)
.and_return(True)
.once())
plugin_args = {'koji_ssl_certs_dir': str(tmpdir)}
self.run_plugin_with_args(workflow, plugin_args, reactor_config_map=reactor_config_map)
def test_no_archives(self, workflow, reactor_config_map): # noqa
koji_session(archives=[])
with pytest.raises(PluginFailedException) as exc_info:
self.run_plugin_with_args(workflow, reactor_config_map=reactor_config_map)
assert 'A suitable archive' in str(exc_info.value)
assert 'not found' in str(exc_info.value)
def test_no_repositories(self, workflow, reactor_config_map): # noqa
archives = copy.deepcopy(ARCHIVES)
for archive in archives:
graceful_chain_del(archive, 'extra', 'docker', 'repositories')
koji_session(archives=archives)
with pytest.raises(PluginFailedException) as exc_info:
self.run_plugin_with_args(workflow, reactor_config_map=reactor_config_map)
assert 'A suitable archive' in str(exc_info.value)
assert 'not found' in str(exc_info.value)
def run_plugin_with_args(self, workflow, plugin_args=None, reactor_config_map=False, # noqa
organization=None, base_from_scratch=False, custom_base_image=False):
plugin_args = plugin_args or {}
plugin_args.setdefault('koji_parent_build', KOJI_BUILD_ID)
plugin_args.setdefault('koji_hub', KOJI_HUB)
if reactor_config_map:
koji_map = {
'hub_url': KOJI_HUB,
'root_url': '',
'auth': {}}
if 'koji_ssl_certs_dir' in plugin_args:
koji_map['auth']['ssl_certs_dir'] = plugin_args['koji_ssl_certs_dir']
workflow.plugin_workspace[ReactorConfigPlugin.key] = {}
workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\
ReactorConfig({'version': 1, 'koji': koji_map,
'registries_organization': organization})
runner = PreBuildPluginsRunner(
workflow.builder.tasker,
workflow,
[{'name': InjectParentImage.key, 'args': plugin_args}]
)
result = runner.run()
if base_from_scratch or custom_base_image:
assert result[InjectParentImage.key] is None
else:
# Koji build ID is always used, even when NVR is given.
assert result[InjectParentImage.key] == KOJI_BUILD_ID
self.assert_images_to_remove(workflow)
def assert_images_to_remove(self, workflow):
expected = set([str(workflow.builder.base_image)])
actual = workflow.plugin_workspace[GarbageCollectionPlugin.key]['images_to_remove']
assert actual == expected
|
module.exports = require('./src/botkitwit');
|
module.exports = {
name: 'hpc-data',
preset: '../../jest.config.js',
transform: {
'^.+\\.[tj]sx?$': 'ts-jest',
},
moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'html'],
coverageDirectory: '../../coverage/libs/hpc-data',
};
|
## @package layer_model_instantiator
# Module caffe2.python.layer_model_instantiator
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from caffe2.python import core
from caffe2.python.layers.layers import InstantiationContext
from caffe2.python.layers.tags import Tags
def _filter_layers(layers, include_tags):
if include_tags is None:
return layers
include_tags = set(include_tags)
return filter(lambda l: not include_tags.isdisjoint(l.tags), layers)
def generate_predict_net(model, include_tags=None):
predict_net = core.Net('predict_net')
for layer in _filter_layers(model.layers, include_tags):
if Tags.TRAIN_ONLY not in layer.tags:
layer.add_operators(
predict_net, context=InstantiationContext.PREDICTION)
return predict_net
def generate_eval_net(model, include_tags=None):
eval_net = core.Net('eval_net')
for layer in _filter_layers(model.layers, include_tags):
layer.add_operators(eval_net, context=InstantiationContext.EVAL)
input_schema = model.input_feature_schema + model.trainer_extra_schema
output_schema = model.output_schema + model.metrics_schema
eval_net.set_input_record(input_schema)
eval_net.set_output_record(output_schema)
return eval_net
def _generate_training_net_only(model, include_tags=None):
train_net = core.Net('train_net')
train_init_net = model.create_init_net('train_init_net')
for layer in _filter_layers(model.layers, include_tags):
layer.add_operators(train_net, train_init_net)
input_schema = model.input_feature_schema + model.trainer_extra_schema
output_schema = model.output_schema + model.metrics_schema
train_net.set_input_record(input_schema)
train_net.set_output_record(output_schema)
return train_init_net, train_net
def generate_training_nets_forward_only(model, include_tags=None):
train_init_net, train_net = _generate_training_net_only(model, include_tags)
return train_init_net, train_net
def generate_training_nets(model, include_tags=None):
train_init_net, train_net = _generate_training_net_only(model, include_tags)
loss = model.loss
grad_map = train_net.AddGradientOperators(loss.field_blobs())
model.apply_optimizers(train_net, train_init_net, grad_map)
return train_init_net, train_net
|
// Copyright (c) 2009-2010 Satoshi Nakamoto
// Copyright (c) 2009-2014 The Bitcoin developers
// Copyright (c) 2014-2015 The Dash developers
// Copyright (c) 2015-2019 The PIVX developers
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#ifndef BITCOIN_CHAINPARAMS_H
#define BITCOIN_CHAINPARAMS_H
#include "chainparamsbase.h"
#include "checkpoints.h"
#include "primitives/block.h"
#include "protocol.h"
#include "uint256.h"
#include "libzerocoin/Params.h"
#include <vector>
typedef unsigned char MessageStartChars[MESSAGE_START_SIZE];
struct CDNSSeedData {
std::string name, host;
CDNSSeedData(const std::string& strName, const std::string& strHost) : name(strName), host(strHost) {}
};
/**
* CChainParams defines various tweakable parameters of a given instance of the
* PWC system. There are three: the main network on which people trade goods
* and services, the public test network which gets reset from time to time and
* a regression test mode which is intended for private networks only. It has
* minimal difficulty to ensure that blocks can be found instantly.
*/
class CChainParams
{
public:
enum Base58Type {
PUBKEY_ADDRESS,
SCRIPT_ADDRESS,
SECRET_KEY, // BIP16
EXT_PUBLIC_KEY, // BIP32
EXT_SECRET_KEY, // BIP32
EXT_COIN_TYPE, // BIP44
MAX_BASE58_TYPES
};
const uint256& HashGenesisBlock() const { return hashGenesisBlock; }
const MessageStartChars& MessageStart() const { return pchMessageStart; }
const std::vector<unsigned char>& AlertKey() const { return vAlertPubKey; }
int GetDefaultPort() const { return nDefaultPort; }
const uint256& ProofOfWorkLimit() const { return bnProofOfWorkLimit; }
int SubsidyHalvingInterval() const { return nSubsidyHalvingInterval; }
/** Used to check majorities for block version upgrade */
int EnforceBlockUpgradeMajority() const { return nEnforceBlockUpgradeMajority; }
int RejectBlockOutdatedMajority() const { return nRejectBlockOutdatedMajority; }
int ToCheckBlockUpgradeMajority() const { return nToCheckBlockUpgradeMajority; }
int MaxReorganizationDepth() const { return nMaxReorganizationDepth; }
/** Used if GenerateBitcoins is called with a negative number of threads */
int DefaultMinerThreads() const { return nMinerThreads; }
const CBlock& GenesisBlock() const { return genesis; }
/** Make miner wait to have peers to avoid wasting work */
bool MiningRequiresPeers() const { return fMiningRequiresPeers; }
/** Headers first syncing is disabled */
bool HeadersFirstSyncingActive() const { return fHeadersFirstSyncingActive; };
/** Default value for -checkmempool and -checkblockindex argument */
bool DefaultConsistencyChecks() const { return fDefaultConsistencyChecks; }
/** Allow mining of a min-difficulty block */
bool AllowMinDifficultyBlocks() const { return fAllowMinDifficultyBlocks; }
/** Skip proof-of-work check: allow mining of any difficulty block */
bool SkipProofOfWorkCheck() const { return fSkipProofOfWorkCheck; }
/** Make standard checks */
bool RequireStandard() const { return fRequireStandard; }
int64_t TargetSpacing() const { return nTargetSpacing; }
/** returns the coinbase maturity **/
int COINBASE_MATURITY() const { return nMaturity; }
/** returns the coinstake maturity (min depth required) **/
int COINSTAKE_MIN_DEPTH() const { return nStakeMinDepth; }
bool HasStakeMinAgeOrDepth(const int contextHeight, const uint32_t contextTime, const int utxoFromBlockHeight, const uint32_t utxoFromBlockTime) const;
/** returns the max future time (and drift in seconds) allowed for a block in the future **/
int FutureBlockTimeDrift(const bool isPoS) const { return isPoS ? nFutureTimeDriftPoS : nFutureTimeDriftPoW; }
uint32_t MaxFutureBlockTime(uint32_t time, const bool isPoS) const { return time + FutureBlockTimeDrift(isPoS); }
CAmount MaxMoneyOut() const { return nMaxMoneyOut; }
/** The masternode count that we will allow the see-saw reward payments to be off by */
int MasternodeCountDrift() const { return nMasternodeCountDrift; }
/** Make miner stop after a block is found. In RPC, don't return until nGenProcLimit blocks are generated */
bool MineBlocksOnDemand() const { return fMineBlocksOnDemand; }
/** In the future use NetworkIDString() for RPC fields */
bool TestnetToBeDeprecatedFieldRPC() const { return fTestnetToBeDeprecatedFieldRPC; }
/** Return the BIP70 network string (main, test or regtest) */
std::string NetworkIDString() const { return strNetworkID; }
const std::vector<CDNSSeedData>& DNSSeeds() const { return vSeeds; }
const std::vector<unsigned char>& Base58Prefix(Base58Type type) const { return base58Prefixes[type]; }
const std::vector<CAddress>& FixedSeeds() const { return vFixedSeeds; }
virtual const Checkpoints::CCheckpointData& Checkpoints() const = 0;
int PoolMaxTransactions() const { return nPoolMaxTransactions; }
/** Return the number of blocks in a budget cycle */
int GetBudgetCycleBlocks() const { return nBudgetCycleBlocks; }
int64_t GetProposalEstablishmentTime() const { return nProposalEstablishmentTime; }
/** Spork key and Masternode Handling **/
std::string SporkKey() const { return strSporkKey; }
std::string SporkKeyOld() const { return strSporkKeyOld; }
int64_t NewSporkStart() const { return nEnforceNewSporkKey; }
int64_t RejectOldSporkKey() const { return nRejectOldSporkKey; }
std::string ObfuscationPoolDummyAddress() const { return strObfuscationPoolDummyAddress; }
int64_t StartMasternodePayments() const { return nStartMasternodePayments; }
int64_t Budget_Fee_Confirmations() const { return nBudget_Fee_Confirmations; }
CBaseChainParams::Network NetworkID() const { return networkID; }
CAmount MinStakeAmount() const { return nMinStakeAmount; }
int MinStakeHistory() const { return nMinStakeHistory; }
int HardenedStakeHeight() const { return nHardenedStakeHeight; }
/** Zerocoin **/
std::string Zerocoin_Modulus() const { return zerocoinModulus; }
libzerocoin::ZerocoinParams* Zerocoin_Params(bool useModulusV1) const;
int Zerocoin_MaxSpendsPerTransaction() const { return nMaxZerocoinSpendsPerTransaction; }
int Zerocoin_MaxPublicSpendsPerTransaction() const { return nMaxZerocoinPublicSpendsPerTransaction; }
CAmount Zerocoin_MintFee() const { return nMinZerocoinMintFee; }
int Zerocoin_MintRequiredConfirmations() const { return nMintRequiredConfirmations; }
int Zerocoin_RequiredAccumulation() const { return nRequiredAccumulation; }
int Zerocoin_DefaultSpendSecurity() const { return nDefaultSecurityLevel; }
int Zerocoin_HeaderVersion() const { return nZerocoinHeaderVersion; }
int Zerocoin_RequiredStakeDepth() const { return nZerocoinRequiredStakeDepth; }
/** Height or Time Based Activations **/
int ModifierUpgradeBlock() const { return nModifierUpdateBlock; }
int LAST_POW_BLOCK() const { return nLastPOWBlock; }
int PawcoinBadBlockTime() const { return nPawcoinBadBlockTime; }
int PawcoinBadBlocknBits() const { return nPawcoinBadBlocknBits; }
int Zerocoin_StartHeight() const { return nZerocoinStartHeight; }
int Zerocoin_Block_EnforceSerialRange() const { return nBlockEnforceSerialRange; }
int Zerocoin_Block_RecalculateAccumulators() const { return nBlockRecalculateAccumulators; }
int Zerocoin_Block_FirstFraudulent() const { return nBlockFirstFraudulent; }
int Zerocoin_Block_LastGoodCheckpoint() const { return nBlockLastGoodCheckpoint; }
int Zerocoin_StartTime() const { return nZerocoinStartTime; }
int Block_Enforce_Invalid() const { return nBlockEnforceInvalidUTXO; }
int Zerocoin_Block_V2_Start() const { return nBlockZerocoinV2; }
bool IsStakeModifierV2(const int nHeight) const { return nHeight >= nBlockStakeModifierlV2; }
// fake serial attack
int Zerocoin_Block_EndFakeSerial() const { return nFakeSerialBlockheightEnd; }
CAmount GetSupplyBeforeFakeSerial() const { return nSupplyBeforeFakeSerial; }
int Zerocoin_Block_Double_Accumulated() const { return nBlockDoubleAccumulated; }
CAmount InvalidAmountFiltered() const { return nInvalidAmountFiltered; };
int Zerocoin_Block_Public_Spend_Enabled() const { return nPublicZCSpends; }
protected:
CChainParams() {}
uint256 hashGenesisBlock;
MessageStartChars pchMessageStart;
//! Raw pub key bytes for the broadcast alert signing key.
std::vector<unsigned char> vAlertPubKey;
int nDefaultPort;
uint256 bnProofOfWorkLimit;
int nMaxReorganizationDepth;
int nSubsidyHalvingInterval;
int nEnforceBlockUpgradeMajority;
int nRejectBlockOutdatedMajority;
int nToCheckBlockUpgradeMajority;
int64_t nTargetSpacing;
int nLastPOWBlock;
int64_t nPawcoinBadBlockTime;
unsigned int nPawcoinBadBlocknBits;
int nMasternodeCountDrift;
int nMaturity;
int nStakeMinDepth;
int nFutureTimeDriftPoW;
int nFutureTimeDriftPoS;
int nModifierUpdateBlock;
CAmount nMaxMoneyOut;
int nMinerThreads;
std::vector<CDNSSeedData> vSeeds;
std::vector<unsigned char> base58Prefixes[MAX_BASE58_TYPES];
CBaseChainParams::Network networkID;
std::string strNetworkID;
CBlock genesis;
std::vector<CAddress> vFixedSeeds;
bool fMiningRequiresPeers;
bool fAllowMinDifficultyBlocks;
bool fDefaultConsistencyChecks;
bool fRequireStandard;
bool fMineBlocksOnDemand;
bool fSkipProofOfWorkCheck;
bool fTestnetToBeDeprecatedFieldRPC;
bool fHeadersFirstSyncingActive;
int nPoolMaxTransactions;
int nBudgetCycleBlocks;
std::string strSporkKey;
std::string strSporkKeyOld;
int64_t nEnforceNewSporkKey;
int64_t nRejectOldSporkKey;
std::string strObfuscationPoolDummyAddress;
int64_t nStartMasternodePayments;
std::string zerocoinModulus;
int nMaxZerocoinSpendsPerTransaction;
int nMaxZerocoinPublicSpendsPerTransaction;
CAmount nMinZerocoinMintFee;
CAmount nInvalidAmountFiltered;
int nMintRequiredConfirmations;
int nRequiredAccumulation;
int nDefaultSecurityLevel;
int nZerocoinHeaderVersion;
int64_t nBudget_Fee_Confirmations;
int nZerocoinStartHeight;
int nZerocoinStartTime;
int nZerocoinRequiredStakeDepth;
int64_t nProposalEstablishmentTime;
int nBlockEnforceSerialRange;
int nBlockRecalculateAccumulators;
int nBlockFirstFraudulent;
int nBlockLastGoodCheckpoint;
int nBlockEnforceInvalidUTXO;
int nBlockZerocoinV2;
int nBlockDoubleAccumulated;
int nPublicZCSpends;
int nBlockStakeModifierlV2;
CAmount nMinStakeAmount;
int nMinStakeHistory;
int nHardenedStakeHeight;
// fake serial attack
int nFakeSerialBlockheightEnd = 0;
CAmount nSupplyBeforeFakeSerial = 0;
};
/**
* Modifiable parameters interface is used by test cases to adapt the parameters in order
* to test specific features more easily. Test cases should always restore the previous
* values after finalization.
*/
class CModifiableParams
{
public:
//! Published setters to allow changing values in unit test cases
virtual void setSubsidyHalvingInterval(int anSubsidyHalvingInterval) = 0;
virtual void setEnforceBlockUpgradeMajority(int anEnforceBlockUpgradeMajority) = 0;
virtual void setRejectBlockOutdatedMajority(int anRejectBlockOutdatedMajority) = 0;
virtual void setToCheckBlockUpgradeMajority(int anToCheckBlockUpgradeMajority) = 0;
virtual void setDefaultConsistencyChecks(bool aDefaultConsistencyChecks) = 0;
virtual void setAllowMinDifficultyBlocks(bool aAllowMinDifficultyBlocks) = 0;
virtual void setSkipProofOfWorkCheck(bool aSkipProofOfWorkCheck) = 0;
};
/**
* Return the currently selected parameters. This won't change after app startup
* outside of the unit tests.
*/
const CChainParams& Params();
/** Return parameters for the given network. */
CChainParams& Params(CBaseChainParams::Network network);
/** Get modifiable network parameters (UNITTEST only) */
CModifiableParams* ModifiableParams();
/** Sets the params returned by Params() to those for the given network. */
void SelectParams(CBaseChainParams::Network network);
/**
* Looks for -regtest or -testnet and then calls SelectParams as appropriate.
* Returns false if an invalid combination is given.
*/
bool SelectParamsFromCommandLine();
#endif // BITCOIN_CHAINPARAMS_H
|
from assignment import AssignmentsDB
from csv import DictWriter, writer
def main():
assignments_db = AssignmentsDB()
with open('general_assignments.csv', 'w', newline='') as file:
# fieldnames = ['ID', 'assigned_test_list']
w = writer(file, dialect='excel')
# w.writerow(fieldnames)
for i in assignments_db.general_assignments_dict:
w.writerow([i] + assignments_db.general_assignments_dict[i])
if __name__ == '__main__':
main()
|
# Simple Qt5 application embedding matplotlib canvases
#
# Based on material from
# Copyright (C) 2005 Florent Rougon
# 2006 Darren Dale
#
#
# Modified by Jeremy Daily on 21 May 2017
#
# This file is a modified example program for matplotlib. It may be used and
# modified with no restriction; raw copies as well as modified versions
# may be distributed without limitation.
import sys
import os
import random
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from numpy import arange, sin, pi
from matplotlib.backends import qt_compat
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.figure import Figure
progname = os.path.basename(sys.argv[0])
progversion = "0.1"
class MyMplCanvas(FigureCanvas):
"""Ultimately, this is a QWidget (as well as a FigureCanvasAgg, etc.)."""
def __init__(self, parent=None, width=5, height=4, dpi=100):
fig = Figure(figsize=(width, height), dpi=dpi)
self.axes = fig.add_subplot(111)
self.compute_initial_figure()
FigureCanvas.__init__(self, fig)
self.setParent(parent)
FigureCanvas.setSizePolicy(self,
QSizePolicy.Expanding,
QSizePolicy.Expanding)
FigureCanvas.updateGeometry(self)
def compute_initial_figure(self):
pass
class MyStaticMplCanvas(MyMplCanvas):
"""Simple static canvas from matplotlib with a sine plot."""
def compute_initial_figure(self):
t = arange(0.0, 3.0, 0.01)
s = sin(2*pi*t)
self.axes.plot(t, s)
class MyDynamicMplCanvas(MyMplCanvas):
"""A canvas that updates itself frequently with a new plot."""
def __init__(self, *args, **kwargs):
MyMplCanvas.__init__(self, *args, **kwargs)
self.timer = QTimer(self)
self.timer.timeout.connect(self.update_figure)
self.timer.start(100)
def compute_initial_figure(self):
pass
def update_figure(self):
# Build a list of 4 random integers between 0 and 10 (both inclusive)
l = [random.randint(0, 10) for i in range(4)]
self.axes.cla()
self.axes.plot([0, 1, 2, 3], l, 'r')
self.draw()
class ApplicationWindow(QWidget):
def __init__(self, parent=None):
super(ApplicationWindow, self).__init__(parent)
self.setWindowTitle("Matplotlib in QT Demo")
self.main_widget = QWidget(self)
layout = QVBoxLayout(self.main_widget)
self.static_canvas = MyStaticMplCanvas(self.main_widget, width=5, height=4, dpi=100)
layout.addWidget(self.static_canvas)
self.dynamic_canvas = MyDynamicMplCanvas(self.main_widget, width=5, height=4, dpi=100)
layout.addWidget(self.dynamic_canvas)
self.setLayout(layout)
if __name__ == '__main__':
app = QApplication(sys.argv)
screen = ApplicationWindow()
screen.show()
app.exec_()
screen.dynamic_canvas.timer.stop() #Stops the repeating plotter
sys.exit()
|
stop_words = ['i','me','my', 'myself', 'we', 'our', 'ours', 'ourselves', 'you', "you're", "you've", "you'll", "you'd", 'your', 'yours', 'yourself', 'yourselves', 'he', 'him', 'his', 'himself', 'she', "she's", 'her', 'hers', 'herself', 'it', "it's", 'its', 'itself', 'they', 'them', 'their', 'theirs', 'themselves', 'what', 'which', 'who', 'whom', 'this', 'that', "that'll", 'these', 'those', 'am', 'is', 'are', 'was', 'were', 'be', 'been', 'being', 'have', 'has', 'had', 'having', 'do', 'does', 'did', 'doing', 'a', 'an', 'the', 'and', 'but', 'if', 'or', 'because', 'as', 'until', 'while', 'of', 'at', 'by', 'for', 'with', 'about', 'against', 'between', 'into', 'through', 'during', 'before', 'after', 'above', 'below', 'to', 'from', 'up','down', 'in', 'out', 'on', 'off', 'over', 'under', 'again', 'further', 'then', 'once', 'here', 'there', 'when', 'where', 'why', 'how', 'all', 'any', 'both', 'each', 'few', 'more', 'most', 'other', 'some', 'such', 'no', 'nor', 'not', 'only', 'own', 'same', 'so', 'than', 'too', 'very', 's', 't', 'can', 'will', 'just', 'don', "don't", 'should', "should've", 'now', 'd', 'll', 'm', 'o', 're', 've', 'y', 'ain', 'aren', "aren't", 'couldn', "couldn't", 'didn', "didn't", 'doesn', "doesn't", 'hadn', "hadn't", 'hasn', "hasn't", 'haven', "haven't", 'isn', "isn't", 'ma', 'mightn', "mightn't", 'mustn', "mustn't", 'needn', "needn't", 'shan', "shan't", 'shouldn', "shouldn't", 'wasn', "wasn't", 'weren', "weren't", 'won', "won't", 'wouldn', "wouldn't"]
|
#!/usr/bin/env python
#
#The MIT License (MIT)
##
# Copyright (c) 2015 Bit9 + Carbon Black
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# -----------------------------------------------------------------------------
# <Short Description>
#
# USAGE:
# python process_cmdline_regex.py -c https://127.0.0.1:443 -a 6b5aee99c133c003b9c11e584c9958da8f8943fa -n -r .*\\.dll -C -M
# python process_cmdline_regex.py -c https://127.0.0.1:443 -a 6b5aee99c133c003b9c11e584c9958da8f8943fa -n -r \\.dll -C
# python process_cmdline_regex.py -c https://127.0.0.1:443 -a 6b5aee99c133c003b9c11e584c9958da8f8943fa -n -r "(rundll.*\\.dll)" -G 0
#
# EXAMPLE OUTPUT:
#
# Displaying Report for Commandline regular expression matches
#
# Command Line Strings Matching REGEX: (rundll.*\.dll)
# ============================================================
#
# rundll32.exe" "c:\windows\system32\iesetup.dll
# rundll32.exe" c:\windows\syswow64\mscories.dll
# rundll32.exe c:\windows\system32\werconcpl.dll
# rundll32.exe" iedkcs32.dll
# rundll32.exe" "c:\windows\system32\iedkcs32.dll
# rundll32.exe" advpack.dll
# rundll32.exe uxtheme.dll
# rundll32.exe" c:\windows\system32\cryptext.dll
# --------------------------------------------
# 528 Command Line Matches:
# Search Match Count : Command Line Match
# --------------------------------------------
# 1 : rundll32.exe c:\windows\system32\appxdeploymentclient.dll
# 1 : rundll32.exe" "C:\Windows\System32\winethc.dll
# 2 : rundll32.exe "c:\windows\system32\netplwiz.dll
# 18 : rundll32.exe "c:\windows\uicphe.dll
# 43 : rundll32.exe C:\Windows\system32\GeneralTel.dll
# 72 : rundll32.exe" advpack.dll
# 126 : rundll32.exe" iedkcs32.dll
# 300 : rundll32.exe "c:\windows\uicphe.dll
#
#
# BEST PRACTICE RECCOMENDATION: Use output redirection '>' OR '>>' to send results to a text file to allow for future grep, sed, awk processing
# python process_cmdline_regex.py -c https://127.0.0.1:443 -a 6b5aee99c133c003b9c11e584c9958da8f8943fa -n -r \\.dll -C > /tmp/script.output
#
# Performance Note: Given that this script parses all command line data stored in Carbon Black,
# this script can take from several minutes to several hours to run depending upon the size of
# your Carbon Black ER datastore & the CbER server's hardware. It is reccomended to use output
# redirection as then you can "tail" as well as monitor the output file's size to check the status
# of long running queries.
#
# last updated 2016-04-20 by Ben Tedesco bentedesco@hotmail.com
#
# Future enhancements:
#
#
# >>----------------------------------------------------------------------------------->
import sys
import re
import struct
import socket
import collections
import operator
from optparse import OptionParser
from cbapi import CbApi
class CBQuery(object):
def __init__(self, url, token, ssl_verify):
self.cb = CbApi(url, token=token, ssl_verify=ssl_verify)
self.cb_url = url
def report(self, rundll_query, dll_dictionary, search_match_count):
# CALLED BY: self.report(regex, regex_match_dictionary, search_match_count)
print "--------------------------------------------"
print "%s Command Line Matches:" % (search_match_count)
print "%s : %s" % ("Search Match Count", "Command Line Match")
print "--------------------------------------------"
#ordered_dll_dictionary = collections.OrderedDict(sorted(dll_dictionary.items()))
ordered_dll_dictionary = sorted(dll_dictionary.items(), key=operator.itemgetter(1))
for value in ordered_dll_dictionary:
print "%s : %s" % (value[1], value[0])
def check(self, regex, ignore_case, group_reference_to_match, count_flag, matches_only_flag):
# CALLED BY: cb.check(opts.regex, opts.ignore_case, opts.group_reference_to_match, opts.count_flag, opts.matches_only_flag)
# print a legend
print ""
print "Displaying Report for Commandline regular expression matches"
print ""
print "Command Line Strings Matching REGEX: %s" % (regex)
print "============================================================"
print ""
# build the query string
q = "cmdline:*"
#define dictionary
regex_match_dictionary = dict()
search_match_count = 0
#define regexp
# check if we need to ignore case, if so, update regexp
if ignore_case:
regexp = re.compile(regex, re.IGNORECASE)
else:
regexp = re.compile(regex)
for result in self.cb.process_search_iter(q):
cmdline = result.get("cmdline", "<unknown>")
# print "CMD: %s" % (cmdline,)
#SEARCH FOR REGEX IN STRING!!
if matches_only_flag:
# print "-----MATCHES ONLY"
search_match_result = regexp.match(cmdline)
else:
# print "-----EVERYTHING"
search_match_result = regexp.search(cmdline)
if search_match_result is not None:
# print "cmdline: %s" % (cmdline)
# print "result: %s" % (search_match_result)
# print "------------------------------------"
# Iterate TOTAL Search Match Count
search_match_count = search_match_count + 1
# On Match, add to dictionary
# 1st Check group_reference_to_match flag to see if we need to add a specific Group Reference or just the entire Command Line as the regex match
if group_reference_to_match:
# print "cmdline: %s" % (cmdline)
# print"matching GROUP: %s" % (group_reference_to_match)
# print"search_match_result: %s" % (search_match_result)
regex_match_group_reference = search_match_result.group(int(group_reference_to_match))
if regex_match_group_reference not in regex_match_dictionary.keys():
print "%s" % (regex_match_group_reference)
regex_match_dictionary[regex_match_group_reference] = 1
else:
regex_match_dictionary[regex_match_group_reference] = regex_match_dictionary[regex_match_group_reference] + 1
else:
if cmdline not in regex_match_dictionary.keys():
print "%s" % (cmdline)
regex_match_dictionary[cmdline] = 1
else:
regex_match_dictionary[cmdline] = regex_match_dictionary[cmdline] + 1
self.report(regex, regex_match_dictionary, search_match_count)
def build_cli_parser():
parser = OptionParser(usage="%prog [options]", description="Parse the command line using a regular expression (includes the options to count matches & leverage reference groups to define output). NOTE: Given that this script parses all command line data stored in Carbon Black, this script can take from several minutes to several hours to run depending upon the size of your Carbon Black ER datastore & the CbER server's hardware. It is reccomended to use output redirection as then you can tail as well as monitor the output file's size to check the status of long running queries.")
# for each supported output type, add an option
parser.add_option("-c", "--cburl", action="store", default=None, dest="url",
help="CB server's URL. e.g., https://127.0.0.1:443")
parser.add_option("-a", "--apitoken", action="store", default=None, dest="token",
help="API Token for Carbon Black server")
parser.add_option("-n", "--no-ssl-verify", action="store_false", default=True, dest="ssl_verify",
help="Do not verify server SSL certificate.")
parser.add_option("-r", "--regex", action="store", default=None, dest="regex",
help="Regular Expression for parsing cmdline")
parser.add_option("-i", "--ignore-case", action="store", default=None, dest="ignore_case",
help="Flag to force regex to ignore character case when matching")
parser.add_option("-G", "--group-reference-to-match", action="store", default=None, dest="group_reference_to_match",
help="User an integer to specify which parenthesized reference group in the regex to match")
parser.add_option("-C", "--count", action="store_true", default=False, dest="count_flag",
help="Count instances of matched regex hits (in some cases, enabling this function may cause this script to run for a long time)")
parser.add_option("-M", "--matches-only", action="store_true", default=False, dest="matches_only_flag",
help="Match MUST begin at the 1st character of the command line string (ASSUME ^ at start of regex)")
return parser
def main(argv):
parser = build_cli_parser()
opts, args = parser.parse_args(argv)
if not opts.url or not opts.token or not opts.regex:
print "Missing required param."
sys.exit(-1)
#If group_reference_to_match is specified, verify it is an integer
if opts.group_reference_to_match is not None:
if not opts.group_reference_to_match.isdigit:
print "group-reference-to-match argument must be defined as an integer"
sys.exit(-1)
cb = CBQuery(opts.url, opts.token, ssl_verify=opts.ssl_verify)
cb.check(opts.regex, opts.ignore_case, opts.group_reference_to_match, opts.count_flag, opts.matches_only_flag)
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
|
"""
Admin Panel settings for Users.
"""
from django.contrib import admin
# Register your models here.
|
from util import *
from collections import *
import copy
from functools import reduce
day = 14
def task1():
data = get_input_for_day(day)
# data = get_input_for_file("test")
input = list(data[0])
formulas = defaultdict(str)
for line in data[2:]:
parts, result = line.split(" -> ")
formulas[parts] = result
queue = deque(input)
iterations = 10
for i in range(iterations):
l = len(queue) - 1
for _ in range(l):
a = queue.popleft()
b = queue[0]
result = formulas[a + b]
queue.append(a)
queue.append(result)
queue.rotate(-1)
counts = Counter(queue)
most_common = counts.most_common()[0]
least_common = counts.most_common()[len(counts) - 1]
ans = most_common[1] - least_common[1]
print(ans)
def task2():
data = get_input_for_day(day)
# data = get_input_for_file("test")
formulas = defaultdict(str)
for line in data[2:]:
parts, result = line.split(" -> ")
formulas[parts] = result
iterations = 40
pairs = defaultdict(int)
for i in range(len(data[0]) - 1):
pair = data[0][i:i+2]
pairs[pair] += 1
for i in range(iterations):
new_pairs = pairs.copy()
for k, v in pairs.items():
a, b = k
result = formulas[k]
new_pairs[a + result] += v
new_pairs[result + b] += v
new_pairs[k] -= v
if new_pairs[k] == 0:
del new_pairs[k]
pairs = new_pairs
counts = defaultdict(int)
for k, v in pairs.items():
counts[k[0]] += v # only count first character of each pair to avoid double counting
counts[data[0][-1]] += 1 # last element always stays the same, add 1 to its count
counts = Counter(counts)
most_common = counts.most_common()[0]
least_common = counts.most_common()[len(counts) - 1]
ans = most_common[1] - least_common[1]
print(ans)
# task1()
task2()
|
const tap = require("tap");
const mongoose = require("mongoose");
const buildFastify = require("../../src/app");
const User = require("../../src/models/User");
const NameService = require("../../src/services/Name");
const fastify = buildFastify();
const signupBody = {
full_name: "İbrahim Can",
email: "email_test0@example.com",
password: "1234567890",
};
const signIn = {
email: "email_test0@example.com",
password: "1234567890",
};
const signInEmailError = {
email: "email_test0@gmail.com",
password: "1234567890",
};
const signInPassError = {
email: "email_test0@example.com",
password: "1234567",
};
tap.test("POST `auth/signup` Add New User", async (t) => {
const response = await fastify.inject({
method: "POST",
url: "auth/signup/",
body: signupBody,
});
const body = JSON.parse(response.body);
const { statusCode, message, token } = body;
t.equal(response.statusCode, 200);
t.type(body, Object);
t.equal(statusCode, 200);
t.equal(message, "Successfully signed up");
t.not(token, undefined);
});
tap.test("POST `auth/signup` New User Email Error", async (t) => {
const response = await fastify.inject({
method: "POST",
url: "auth/signup/",
body: signupBody,
});
const body = JSON.parse(response.body);
const { statusCode, message } = body;
t.equal(response.statusCode, 409);
t.type(body, Object);
t.equal(statusCode, 409);
t.equal(message, "Email already exists");
});
tap.test("POST `auth/signIn` User Sign In", async (t) => {
const response = await fastify.inject({
method: "POST",
url: "auth/signIn/",
body: signIn,
});
const body = JSON.parse(response.body);
const { statusCode, message, token } = body;
t.equal(response.statusCode, 200);
t.type(body, Object);
t.equal(statusCode, 200);
t.equal(message, "Successfully signed in");
t.not(token, undefined);
});
tap.test("POST `auth/signIn` User Sign In Email Error", async (t) => {
const response = await fastify.inject({
method: "POST",
url: "auth/signIn/",
body: signInEmailError,
});
const body = JSON.parse(response.body);
const { statusCode, message } = body;
t.equal(response.statusCode, 400);
t.type(body, Object);
t.equal(statusCode, 400);
t.equal(message, "User not found");
});
tap.test("POST `auth/signIn` User Sign In Email Error", async (t) => {
t.teardown(() => fastify.close());
t.teardown(() => NameService.disconnect());
t.teardown(() => User.deleteOne({ email: signupBody.email }));
t.teardown(() => mongoose.connection.close());
const response = await fastify.inject({
method: "POST",
url: "auth/signIn/",
body: signInPassError,
});
const body = JSON.parse(response.body);
const { statusCode, message } = body;
t.equal(response.statusCode, 400);
t.type(body, Object);
t.equal(statusCode, 400);
t.equal(message, "Incorrect password");
});
|
#!/usr/bin/env python
# Truncate.py
# Copyright (C) 2006 CCLRC, Graeme Winter
#
# This code is distributed under the BSD license, a copy of which is
# included in the root directory of this package.
#
# 26th October 2006
#
# A wrapper for the CCP4 program Truncate, which calculates F's from
# I's and gives a few useful statistics about the data set.
from __future__ import absolute_import, division
import os
import sys
from xia2.Decorators.DecoratorFactory import DecoratorFactory
from xia2.Driver.DriverFactory import DriverFactory
from xia2.Handlers.Phil import PhilIndex
from xia2.Handlers.Streams import Chatter, Debug
from xia2.lib.bits import transpose_loggraph
from xia2.Wrappers.CCP4.Ctruncate import Ctruncate
def Truncate(DriverType = None):
'''A factory for TruncateWrapper classes.'''
DriverInstance = DriverFactory.Driver(DriverType)
CCP4DriverInstance = DecoratorFactory.Decorate(DriverInstance, 'ccp4')
if PhilIndex.params.ccp4.truncate.program == 'ctruncate':
return Ctruncate(DriverType)
elif PhilIndex.params.ccp4.truncate.program == 'cctbx':
from xia2.Wrappers.XIA.FrenchWilson import FrenchWilson
return FrenchWilson(DriverType)
class TruncateWrapper(CCP4DriverInstance.__class__):
'''A wrapper for Truncate, using the CCP4-ified Driver.'''
def __init__(self):
# generic things
CCP4DriverInstance.__class__.__init__(self)
self.set_executable(os.path.join(
os.environ.get('CBIN', ''), 'truncate'))
self._anomalous = False
self._nres = 0
# should we do wilson scaling?
self._wilson = True
self._b_factor = 0.0
self._moments = None
self._wilson_fit_grad = 0.0
self._wilson_fit_grad_sd = 0.0
self._wilson_fit_m = 0.0
self._wilson_fit_m_sd = 0.0
self._wilson_fit_range = None
# numbers of reflections in and out, and number of absences
# counted
self._nref_in = 0
self._nref_out = 0
self._nabsent = 0
self._xmlout = None
def set_anomalous(self, anomalous):
self._anomalous = anomalous
def set_wilson(self, wilson):
'''Set the use of Wilson scaling - if you set this to False
Wilson scaling will be switched off...'''
self._wilson = wilson
def get_xmlout(self):
return self._xmlout
def truncate(self):
'''Actually perform the truncation procedure.'''
self.check_hklin()
self.check_hklout()
self.start()
if self._anomalous:
self.input('anomalous yes')
else:
self.input('anomalous no')
if self._nres:
self.input('nres %d' % self._nres)
if not self._wilson:
self.input('scale 1')
self.close_wait()
try:
self.check_for_errors()
self.check_ccp4_errors()
except RuntimeError:
try:
os.remove(self.get_hklout())
except Exception:
pass
raise RuntimeError('truncate failure')
# parse the output for interesting things, including the
# numbers of reflections in and out (isn't that a standard CCP4
# report?) and the number of absent reflections.
self._nref_in, self._nref_out = self.read_nref_hklin_hklout(
self.get_all_output())
# FIXME guess I should be reading this properly...
self._nabsent = self._nref_in - self._nref_out
for line in self.get_all_output():
if 'Least squares straight line gives' in line:
list = line.replace('=', ' ').split()
if not '***' in list[6]:
self._b_factor = float(list[6])
else:
Debug.write('no B factor available')
if 'LSQ Line Gradient' in line:
self._wilson_fit_grad = float(line.split()[-1])
resol_width = max(self._wilson_fit_range) - \
min(self._wilson_fit_range)
if self._wilson_fit_grad > 0 and resol_width > 1.0 \
and False:
raise RuntimeError( \
'wilson plot gradient positive: %.2f' % \
self._wilson_fit_grad)
elif self._wilson_fit_grad > 0:
Debug.write(
'Positive gradient but not much wilson plot')
if 'Uncertainty in Gradient' in line:
self._wilson_fit_grad_sd = float(line.split()[-1])
if 'X Intercept' in line:
self._wilson_fit_m = float(line.split()[-1])
if 'Uncertainty in Intercept' in line:
self._wilson_fit_m_sd = float(line.split()[-1])
if 'Resolution range' in line:
self._wilson_fit_range = map(float, line.split()[-2:])
results = self.parse_ccp4_loggraph()
moments = transpose_loggraph(
results['Acentric Moments of E for k = 1,3,4,6,8'])
# keys we want in this are "Resln_Range" "1/resol^2" and
# MomentZ2. The last of these should be around two, but is
# likely to be a little different to this.
self._moments = moments
def get_b_factor(self):
return self._b_factor
def get_wilson_fit(self):
return self._wilson_fit_grad, self._wilson_fit_grad_sd, \
self._wilson_fit_m, self._wilson_fit_m_sd
def get_wilson_fit_range(self):
return self._wilson_fit_range
def get_moments(self):
return self._moments
def get_nref_in(self):
return self._nref_in
def get_nref_out(self):
return self._nref_out
def get_nabsent(self):
return self._nabsent
def read_nref_hklin_hklout(self, records):
'''Look to see how many reflections came in through HKLIN, and
how many went out again in HKLOUT.'''
nref_in = 0
nref_out = 0
current_logical = None
for record in records:
if 'Logical Name' in record:
current_logical = record.split()[2]
assert(current_logical in ['HKLIN', 'HKLOUT', 'SYMINFO'])
if 'Number of Reflections' in record:
if current_logical == 'HKLIN':
nref_in = int(record.split()[-1])
elif current_logical == 'HKLOUT':
nref_out = int(record.split()[-1])
return nref_in, nref_out
return TruncateWrapper()
if __name__ == '__main__':
truncate = Truncate()
truncate.set_hklin(sys.argv[1])
truncate.set_hklout(sys.argv[2])
truncate.truncate()
print truncate.get_nref_in(), truncate.get_nref_out(), \
truncate.get_nabsent()
|
"""
16) Faça um programa que leia um número inteiro positivo impar N
imprima todos os núemros impares de 1 até N em ordem decrescente.
"""
n = int(input('Digite um número par \n'))
if n % 2 == 1:
for i in range(n, -1, -1):
if i % 2 == 1:
print(i)
else:
print('Número inválido')
|
import React from 'react';
import ReactDOM from 'react-dom';
import { Provider } from 'react-redux';
import configureStore from './store/configureStore';
import routes from './routes';
const store = configureStore(window.__REDUX_STATE__);
ReactDOM.render(
<Provider store={store}>
{routes}
</Provider>,
document.getElementById('root'),
);
|
# coding: utf-8
"""
Hydrogen Proton API
Financial engineering module of Hydrogen Atom # noqa: E501
OpenAPI spec version: 1.9.2
Contact: info@hydrogenplatform.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import proton_api
from proton_api.models.budget_component import BudgetComponent # noqa: E501
from proton_api.rest import ApiException
class TestBudgetComponent(unittest.TestCase):
"""BudgetComponent unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testBudgetComponent(self):
"""Test BudgetComponent"""
# FIXME: construct object with mandatory attributes with example values
# model = proton_api.models.budget_component.BudgetComponent() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
"""
Utilities useful for routing
"""
import gzip
import zlib
from typing import Callable
from fastapi.routing import APIRoute
from starlette.requests import Request
from starlette.responses import Response
class CompressedRequest(Request):
""" Allow the body of the request to be compressed with gzip or zlib """
async def body(self) -> bytes:
""" Override the original body method """
if not hasattr(self, "_body"):
body = await super().body()
if "gzip" in self.headers.getlist("Content-Encoding"):
body = gzip.decompress(body)
elif "deflate" in self.headers.getlist("Content-Encoding"):
body = zlib.decompress(body)
setattr(self, "_body", body)
return self._body
class CompressibleRoute(APIRoute):
""" An APIRoute which supports gzip/zlib compressed body """
def get_route_handler(self) -> Callable:
""" Override the original get route handler to return out custom handler """
original_route_handler = super().get_route_handler()
async def custom_route_handler(request: Request) -> Response:
""" A route handler that wraps the request in our custom class """
request = CompressedRequest(request.scope, request.receive)
return await original_route_handler(request)
return custom_route_handler
|
import { useState, useCallback, useEffect } from 'react';
import axios from 'axios';
export const useHttpClient = () => {
const [error, setError] = useState(null);
const [isLoading, setLoading] = useState(false);
const source = axios.CancelToken.source();
const sendRequest = useCallback(
async (url, method = 'GET', body = null, headers = {}) => {
setLoading(true);
try {
const response = await axios({
url,
method,
cancelToken: source.token,
body,
headers
});
if (!response.statusText === 'OK') {
throw new Error(response.message);
}
setLoading(false);
return response.data;
} catch (error) {
if (axios.isCancel(error)) {
console.log('axios.isCancel(error)');
// don't update state in case component is dismounting
} else {
console.log(error);
setLoading(false);
setError({
message: error.response.data.error,
status: error.response.status
});
}
}
},
[source.token]
);
const clearError = () => {
setError(null);
};
useEffect(() => {
return () => {
source.cancel();
};
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
return { error, isLoading, sendRequest, clearError };
};
|
"""
Routines useful for dealing with cos spectra, especially reading different files.
"""
import numpy as np
import pyfits
def readx1d(filename):
"""
Read an x1d format spectrum from calcos.
:param filename: name of the x1d file
:type filename: string
For the output spectra::
wa = wavelengths (Angstroms)
fl = flux
er = 1 sigma error in flux
dq = data quality, > 0 means bad for some reason
gr = gross count rate, gross counts / (exposure time in s)
bg = background count rate
net = (gr - bg) / eps, where eps is the flat fielding.
"""
vnum = int(filename.split('/')[-1][4:6])
fh = pyfits.open(filename)
hd = fh[0].header
optelem = hd['OPT_ELEM']
cwa = hd['CENTRWV']
exptime = fh[1].header['EXPTIME']
keys = 'WAVELENGTH FLUX ERROR DQ GROSS NET BACKGROUND'.split()
names = 'wa,fl,er,dq,gr,net,bg'
r = fh['SCI'].data
fh.close()
vals = [r[k][0] for k in keys]
isort = vals[0].argsort()
for i in range(1, len(vals)):
vals[i] = vals[i][isort]
sp1 = np.rec.fromarrays(vals, names=names)
vals = [r[k][1] for k in keys]
isort = vals[0].argsort()
for i in range(1, len(vals)):
vals[i] = vals[i][isort]
sp2 = np.rec.fromarrays(vals, names=names)
if sp1.wa[0] < sp2.wa[0]:
info1 = vnum, optelem, cwa, 'FUVB', exptime
info2 = vnum, optelem, cwa, 'FUVA', exptime
else:
info2 = vnum, optelem, cwa, 'FUVB', exptime
info1 = vnum, optelem, cwa, 'FUVA', exptime
return [sp1, sp2], [info1, info2]
|
// Copyright 2014 Globo.com Player authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
var BaseObject = require('../base/base_object')
var CoreFactory = require('./core_factory')
var Loader = require('./loader')
var assign = require('lodash.assign')
var find = require('lodash.find')
var Events = require('events')
var uniqueId = require('../base/utils').uniqueId
var PlayerInfo = require('./player_info')
/**
* @class Player
* @constructor
* @extends BaseObject
* @module components
* @example
* // Creates an instance:
* var player = new Clappr.Player({source: "http://your.video/here.mp4", parentId: "#player"});
*/
class Player extends BaseObject {
/**
* constructor.
*
* @method constructor
* @param {Object} options Data
* to configure player instance
* @param {Boolean} [options.autoPlay]
* @default false
* if you want the video to automatically play after page load.
* @param {Boolean} [options.mute]
* @default false
* if you want the video to start muted.
* @param {String} [options.poster]
* Define a poster by adding its address `http://url/img.png` on your embed parameters. It will appear after video embed, disappear on play and go back when user stops the video.
*/
constructor(options) {
super(options)
window.p = this
var defaultOptions = {playerId: uniqueId(""), persistConfig: true, width: 640, height: 360, baseUrl: 'http://cdn.clappr.io/latest'}
this.options = assign(defaultOptions, options)
this.options.sources = this.normalizeSources(options)
this.loader = new Loader(this.options.plugins || {})
this.coreFactory = new CoreFactory(this, this.loader)
PlayerInfo.currentSize = {width: options.width, height: options.height}
if (this.options.parentId) {
this.setParentId(this.options.parentId)
}
}
setParentId(parentId) {
var el = document.querySelector(parentId)
if (el) {
this.attachTo(el)
}
}
/**
* attach player to element
*
* @method attachTo
* @param {Object} element Data
* You can use this method to attach the player to a given `element`. You don't need to do this when you specify it during the player instantiation passing the parentId param.
*/
attachTo(element) {
this.options.parentElement = element
this.core = this.coreFactory.create()
this.addEventListeners()
}
addEventListeners() {
this.listenTo(this.core.mediaControl, Events.MEDIACONTROL_CONTAINERCHANGED, this.containerChanged)
var container = this.core.mediaControl.container
if (!!container) {
this.listenTo(container, Events.CONTAINER_PLAY, this.onPlay)
this.listenTo(container, Events.CONTAINER_PAUSE, this.onPause)
this.listenTo(container, Events.CONTAINER_STOP, this.onStop)
this.listenTo(container, Events.CONTAINER_ENDED, this.onEnded)
this.listenTo(container, Events.CONTAINER_SEEK, this.onSeek)
this.listenTo(container, Events.CONTAINER_ERROR, this.onError)
this.listenTo(container, Events.CONTAINER_TIMEUPDATE, this.onTimeUpdate)
}
}
containerChanged() {
this.stopListening()
this.addEventListeners()
}
onPlay() {
this.trigger(Events.PLAYER_PLAY)
}
onPause() {
this.trigger(Events.PLAYER_PAUSE)
}
onStop() {
this.trigger(Events.PLAYER_STOP, this.getCurrentTime())
}
onEnded() {
this.trigger(Events.PLAYER_ENDED)
}
onSeek(percent) {
this.trigger(Events.PLAYER_SEEK, percent)
}
onTimeUpdate(position, duration) {
this.trigger(Events.PLAYER_TIMEUPDATE, position, duration)
}
onError(error) {
this.trigger(Events.PLAYER_ERROR, error)
}
is(value, type) {
return value.constructor === type
}
normalizeSources(options) {
var sources = options.sources || (options.source !== undefined? [options.source.toString()] : [])
return sources.length === 0 ? ['no.op'] : sources
}
resize(size) {
this.core.resize(size);
}
load(sources, mimeType) {
this.core.load(sources, mimeType)
}
destroy() {
this.core.destroy()
}
play() {
this.core.mediaControl.container.play();
}
pause() {
this.core.mediaControl.container.pause();
}
stop() {
this.core.mediaControl.container.stop();
}
seek(time) {
this.core.mediaControl.container.setCurrentTime(time);
}
setVolume(volume) {
this.core.mediaControl.container.setVolume(volume);
}
mute() {
this.core.mediaControl.container.setVolume(0);
}
unmute() {
this.core.mediaControl.container.setVolume(100);
}
isPlaying() {
return this.core.mediaControl.container.isPlaying();
}
getPlugin(name) {
var plugins = this.core.plugins.concat(this.core.mediaControl.container.plugins);
return find(plugins, function(plugin) {
return plugin.name === name;
});
}
getCurrentTime() {
return this.core.mediaControl.container.getCurrentTime()
}
getDuration() {
return this.core.mediaControl.container.getDuration()
}
}
module.exports = Player
|
# A vuetify layout for the glue data viewers. For now we keep this isolated to
# a single file, but once we are happy with it we can just replace the original
# default layout.
import ipyvuetify as v
__all__ = ['vuetify_layout_factory']
def vuetify_layout_factory(viewer):
def on_click(widget, event, data):
drawer.v_model = not drawer.v_model
sidebar_button = v.AppBarNavIcon()
sidebar_button.on_event('click', on_click)
options_panel = v.ExpansionPanels(
v_model=[0, 1], multiple=True, accordion=True, style_='padding-left: 1px; min-width: 200px',
children=[
v.ExpansionPanel(children=[
v.ExpansionPanelHeader(class_='font-weight-bold', children=['Viewer Options']),
v.ExpansionPanelContent(children=[viewer.viewer_options])]),
v.ExpansionPanel(children=[
v.ExpansionPanelHeader(class_='font-weight-bold', children=['Layer Options']),
v.ExpansionPanelContent(children=[viewer.layer_options])])])
drawer = v.NavigationDrawer(v_model=False, absolute=True, right=True,
children=[sidebar_button,
options_panel], width="min-content")
toolbar = v.Toolbar(dense=True, class_='elevation-0',
children=[v.ToolbarItems(children=[viewer.toolbar_selection_tools,
viewer.toolbar_selection_mode,
viewer.toolbar_active_subset]),
v.Spacer(),
sidebar_button])
layout = v.Html(tag='div', children=[
toolbar,
v.Row(no_gutters=True, children=[
v.Col(cols=12, children=[viewer.figure_widget]),
v.Col(cols=12, children=[viewer.output_widget])
]),
drawer
])
return layout
|
// Created by Sergey Litvinov on 08.03.2021
// Copyright 2021 ETH Zurich
#include <stddef.h>
#ifdef __cplusplus
extern "C" {
#endif
int SystemBaseName(const char*, char*);
int SystemDirName(const char*, char*);
char* SystemRealPath(const char*, char* resolved);
int SystemGetHostName(char*, size_t size);
int SystemHasHyperthreads(void);
int SystemIsDir(const char*);
int SystemIsFile(const char*);
int SystemJoin(const char*, const char*, char*);
int SystemMakeDir(const char*, int parent);
int SystemSplitExt(const char*, char*, char*);
size_t SystemGetMem(void);
#ifdef __cplusplus
}
#endif
|
/* eslint-disable camelcase */
module.exports = {
type: 'object',
properties: {
// field_wysiwyg also has a `format` that we don't use
field_wysiwyg: { $ref: 'GenericNestedString' },
field_title: { $ref: 'GenericNestedString' },
},
};
|
/**
* Policy Mappings
* (sails.config.policies)
*
* Policies are simple functions which run **before** your actions.
*
* For more information on configuring policies, check out:
* https://sailsjs.com/docs/concepts/policies
*/
// Authentication module.
const auth = require('http-auth');
const basic = auth.basic({
realm: "CONIX"
}, (username, password, callback) => {
// Custom authentication
// Use callback(error) if you want to throw async error.
callback(username === "conix" && password === "conix");
}
);
const auth_connect = require('http-auth-connect');
module.exports.policies = {
/***************************************************************************
* *
* Default policy for all controllers and actions, unless overridden. *
* (`true` allows public access) *
* *
***************************************************************************/
//'*': 'isLocal',
RecordController: {
update: auth_connect(basic),
create: auth_connect(basic),
delete: auth_connect(basic),
}
};
|
/**
* @name Enums
* @type Object
*/
/**
* @typedef {string} Enums.ButtonType
* @enum {'back'|'danger'|'default'|'normal'|'success'}
*/
/**
* @typedef {string} Enums.ButtonStylingMode
* @enum {'text'|'outlined'|'contained'}
*/
/**
* @typedef {string} Enums.EventKeyModifier
* @enum {'alt'|'ctrl'|'meta'|'shift'}
*/
/**
* @typedef {number} Enums.FirstDayOfWeek
* @enum {0|1|2|3|4|5|6}
*/
/**
* @typedef {number} Enums.PivotGridFieldChooserLayout
* @enum {0|1|2}
*/
/**
* @typedef {string} Enums.DropDownSearchMode
* @enum {'contains'|'startswith'}
*/
/**
* @typedef {string} Enums.ValidationMessageMode
* @enum {'always'|'auto'}
*/
/**
* @typedef {string} Enums.GaugeTitlePosition
* @enum {'bottom-center'|'bottom-left'|'bottom-right'|'top-center'|'top-left'|'top-right'}
*/
/**
* @typedef {string} Enums.VizAnimationEasing
* @enum {'easeOutCubic'|'linear'}
*/
/**
* @typedef {string} Enums.Format
* @enum {'billions'|'currency'|'day'|'decimal'|'exponential'|'fixedPoint'|'largeNumber'|'longDate'|'longTime'|'millions'|'millisecond'|'month'|'monthAndDay'|'monthAndYear'|'percent'|'quarter'|'quarterAndYear'|'shortDate'|'shortTime'|'thousands'|'trillions'|'year'|'dayOfWeek'|'hour'|'longDateLongTime'|'minute'|'second'|'shortDateShortTime'}
*/
/**
* @typedef {string} Enums.VizTheme
* @enum {'android5.light'|'generic.dark'|'generic.light'|'generic.contrast'|'ios7.default'|'win10.black'|'win10.white'|'win8.black'|'win8.white'|'generic.carmine'|'generic.darkmoon'|'generic.darkviolet'|'generic.greenmist'|'generic.softblue'|'material.blue.light'|'material.lime.light'|'material.orange.light'|'material.purple.light'|'material.teal.light'}
* @member 'win8.black' @deprecated 'generic.dark'
* @member 'win8.white' @deprecated 'generic.light'
* @member 'win10.black' @deprecated 'generic.dark'
* @member 'win10.white' @deprecated 'generic.light'
* @member 'android5.light' @deprecated 'material.blue.light'
*/
/**
* @typedef {string} Enums.VizPalette
* @enum {'Bright'|'Default'|'Harmony Light'|'Ocean'|'Pastel'|'Soft'|'Soft Pastel'|'Vintage'|'Violet'|'Carmine'|'Dark Moon'|'Dark Violet'|'Green Mist'|'Soft Blue'|'Material'|'Office'}
* @member 'Default' @deprecated 'Material'
*/
/**
* @typedef {string} Enums.VizPaletteExtensionMode
* @enum {'alternate'|'blend'|'extrapolate'}
*/
/**
* @typedef {string} Enums.CircularGaugeElementOrientation
* @enum {'center'|'inside'|'outside'}
*/
/**
* @typedef {string} Enums.GaugeOverlappingBehavior
* @enum {'first'|'last'}
*/
/**
* @typedef {string} Enums.ScaleLabelOverlappingBehavior
* @enum {'hide'|'none'}
*/
/**
* @typedef {string} Enums.OverlappingBehavior
* @enum {'rotate'|'stagger'|'none'|'hide'}
*/
/**
* @typedef {string} Enums.PolarChartOverlappingBehavior
* @enum {'none'|'hide'}
*/
/**
* @typedef {string} Enums.Orientation
* @enum {'horizontal'|'vertical'}
*/
/**
* @typedef {string} Enums.VerticalAlignment
* @enum {'bottom'|'center'|'top'}
*/
/**
* @typedef {string} Enums.HorizontalAlignment
* @enum {'center'|'left'|'right'}
*/
/**
* @typedef {string} Enums.VerticalEdge
* @enum {'bottom'|'top'}
*/
/**
* @typedef {string} Enums.DashStyle
* @enum {'dash'|'dot'|'longDash'|'solid'}
*/
/**
* @typedef {string} Enums.ResizeHandle
* @enum {'bottom'|'left'|'right'|'top'|'all'}
*/
/**
* @typedef {string} Enums.BoxDirection
* @enum {'col'|'row'}
*/
/**
* @typedef {string} Enums.BoxAlign
* @enum {'center'|'end'|'space-around'|'space-between'|'start'}
*/
/**
* @typedef {string} Enums.BoxCrossAlign
* @enum {'center'|'end'|'start'|'stretch'}
*/
/**
* @typedef {string} Enums.ButtonGroupSelectionMode
* @enum {'multiple'|'single'}
*/
/**
* @typedef {string} Enums.Mode
* @enum {'auto'}
*/
/**
* @typedef {string} Enums.SparklineType
* @enum {'area'|'bar'|'line'|'spline'|'splinearea'|'steparea'|'stepline'|'winloss'}
*/
/**
* @typedef {string} Enums.VizPointSymbol
* @enum {'circle'|'cross'|'polygon'|'square'|'triangle'}
*/
/**
* @typedef {string} Enums.CalendarZoomLevel
* @enum {'century'|'decade'|'month'|'year'}
*/
/**
* @typedef {string} Enums.ChartResolveLabelOverlapping
* @enum {'hide'|'none'|'stack'}
*/
/**
* @typedef {string} Enums.ChartElementSelectionMode
* @enum {'multiple'|'single'}
*/
/**
* @typedef {string} Enums.SeriesType
* @enum {'area'|'bar'|'bubble'|'candlestick'|'fullstackedarea'|'fullstackedbar'|'fullstackedline'|'fullstackedspline'|'fullstackedsplinearea'|'line'|'rangearea'|'rangebar'|'scatter'|'spline'|'splinearea'|'stackedarea'|'stackedbar'|'stackedline'|'stackedspline'|'stackedsplinearea'|'steparea'|'stepline'|'stock'}
*/
/**
* @typedef {string} Enums.Position
* @enum {'bottom'|'left'|'right'|'top'}
*/
/**
* @typedef {string} Enums.ChartPointerType
* @enum {'all'|'mouse'|'none'|'touch'}
*/
/**
* @typedef {string} Enums.ChartZoomAndPanMode
* @enum {'both'|'none'|'pan'|'zoom'}
*/
/**
* @typedef {string} Enums.ChartLegendHoverMode
* @enum {'excludePoints'|'includePoints'|'none'}
*/
/**
* @typedef {string} Enums.RelativePosition
* @enum {'inside'|'outside'}
*/
/**
* @typedef {string} Enums.DiscreteAxisDivisionMode
* @enum {'betweenLabels'|'crossLabels'}
*/
/**
* @typedef {string} Enums.ScaleBreakLineStyle
* @enum {'straight'|'waved'}
*/
/**
* @typedef {string} Enums.ChartLabelDisplayMode
* @enum {'rotate'|'stagger'|'standard'}
*/
/**
* @typedef {string} Enums.VizTimeInterval
* @enum {'day'|'hour'|'millisecond'|'minute'|'month'|'quarter'|'second'|'week'|'year'}
*/
/**
* @typedef {string} Enums.VisualRangeUpdateMode
* @enum {'auto'|'keep'|'reset'|'shift'}
*/
/**
* @typedef {string} Enums.AxisScaleType
* @enum {'continuous'|'discrete'|'logarithmic'}
*/
/**
* @typedef {string} Enums.ChartDataType
* @enum {'datetime'|'numeric'|'string'}
*/
/**
* @typedef {string} Enums.ArgumentAxisHoverMode
* @enum {'allArgumentPoints'|'none'}
*/
/**
* @typedef {string} Enums.ChartTooltipLocation
* @enum {'center'|'edge'}
*/
/**
* @typedef {string} Enums.PieChartLegendHoverMode
* @enum {'none'|'allArgumentPoints'}
*/
/**
* @typedef {string} Enums.PieChartResolveLabelOverlapping
* @enum {'hide'|'none'|'shift'}
*/
/**
* @typedef {string} Enums.PieChartType
* @enum {'donut'|'doughnut'|'pie'}
*/
/**
* @typedef {string} Enums.PieChartSegmentsDirection
* @enum {'anticlockwise'|'clockwise'}
*/
/**
* @typedef {string} Enums.PolarChartResolveLabelOverlapping
* @enum {'hide'|'none'}
*/
/**
* @typedef {string} Enums.PolarChartSeriesType
* @enum {'area'|'bar'|'line'|'scatter'|'stackedbar'}
*/
/**
* @typedef {string} Enums.EditorApplyValueMode
* @enum {'instantly'|'useButtons'}
*/
/**
* @typedef {string} Enums.ShowSubmenuMode
* @enum {'onClick'|'onHover'}
*/
/**
* @typedef {string} Enums.MenuSelectionMode
* @enum {'none'|'single'}
*/
/**
* @typedef {string} Enums.ContextMenuSubmenuDirection
* @enum {'auto'|'left'|'right'}
*/
/**
* @typedef {string} Enums.GridColumnChooserMode
* @enum {'dragAndDrop'|'select'}
*/
/**
* @typedef {string} Enums.ColumnResizingMode
* @enum {'nextColumn'|'widget'}
*/
/**
* @typedef {string} Enums.HorizontalEdge
* @enum {'left'|'right'}
*/
/**
* @typedef {string} Enums.GridColumnDataType
* @enum {'string'|'number'|'date'|'boolean'|'object'|'datetime'}
*/
/**
* @typedef {string} Enums.SortOrder
* @enum {'asc'|'desc'}
*/
/**
* @typedef {string} Enums.FilterBuilderFieldFilterOperations
* @enum {'='|'<>'|'<'|'<='|'>'|'>='|'contains'|'endswith'|'isblank'|'isnotblank'|'notcontains'|'startswith'|'between'}
*/
/**
* @typedef {string} Enums.FilterBuilderGroupOperations
* @enum {'and'|'or'|'notAnd'|'notOr'}
*/
/**
* @typedef {string} Enums.FilterOperations
* @enum {'<'|'<='|'<>'|'='|'>'|'>='|'between'|'contains'|'endswith'|'notcontains'|'startswith'}
*/
/**
* @typedef {string} Enums.FilterType
* @enum {'exclude'|'include'}
*/
/**
* @typedef {string} Enums.HeaderFilterGroupInterval
* @enum {'day'|'hour'|'minute'|'month'|'quarter'|'second'|'year'}
*/
/**
* @typedef {string} Enums.GridEditMode
* @enum {'batch'|'cell'|'row'|'form'|'popup'}
*/
/**
* @typedef {string} Enums.GridEditRefreshMode
* @enum {'full'|'reshape'|'repaint'}
*/
/**
* @typedef {string} Enums.GridApplyFilterMode
* @enum {'auto'|'onClick'}
*/
/**
* @typedef {string} Enums.GridGroupingExpandMode
* @enum {'buttonClick'|'rowClick'}
*/
/**
* @typedef {string} Enums.GridScrollingMode
* @enum {'infinite'|'standard'|'virtual'}
*/
/**
* @typedef {string} Enums.ShowScrollbarMode
* @enum {'always'|'never'|'onHover'|'onScroll'}
*/
/**
* @typedef {string} Enums.SelectionMode
* @enum {'multiple'|'none'|'single'}
*/
/**
* @typedef {string} Enums.GridSelectionShowCheckBoxesMode
* @enum {'always'|'none'|'onClick'|'onLongTap'}
*/
/**
* @typedef {string} Enums.SelectAllMode
* @enum {'allPages'|'page'}
*/
/**
* @typedef {string} Enums.SummaryType
* @enum {'avg'|'count'|'custom'|'max'|'min'|'sum'}
*/
/**
* @typedef {string} Enums.GridSortingMode
* @enum {'multiple'|'none'|'single'}
*/
/**
* @typedef {string} Enums.StateStoringType
* @enum {'custom'|'localStorage'|'sessionStorage'}
*/
/**
* @typedef {string} Enums.DateBoxType
* @enum {'date'|'datetime'|'time'}
*/
/**
* @typedef {string} Enums.DateBoxPickerType
* @enum {'calendar'|'list'|'native'|'rollers'}
*/
/**
* @typedef {string} Enums.FileUploadMode
* @enum {'instantly'|'useButtons'|'useForm'}
*/
/**
* @typedef {string} Enums.UploadHttpMethod
* @enum {'POST'|'PUT'}
*/
/**
* @typedef {string} Enums.FormLabelLocation
* @enum {'left'|'right'|'top'}
*/
/**
* @typedef {string} Enums.FormItemEditorType
* @enum {'dxAutocomplete'|'dxCalendar'|'dxCheckBox'|'dxColorBox'|'dxDateBox'|'dxDropDownBox'|'dxLookup'|'dxNumberBox'|'dxRadioGroup'|'dxRangeSlider'|'dxSelectBox'|'dxSlider'|'dxSwitch'|'dxTagBox'|'dxTextArea'|'dxTextBox'}
*/
/**
* @typedef {string} Enums.FormItemType
* @enum {'empty'|'group'|'simple'|'tabbed'|'button'}
*/
/**
* @typedef {string} Enums.FunnelAlgorithm
* @enum {'dynamicHeight'|'dynamicSlope'}
*/
/**
* @typedef {string} Enums.HatchingDirection
* @enum {'left'|'none'|'right'}
*/
/**
* @typedef {string} Enums.FunnelLabelPosition
* @enum {'columns'|'inside'|'outside'}
*/
/**
* @typedef {string} Enums.SankeyLabelOverlappingBehavior
* @enum {'ellipsis'|'hide'|'none'}
*/
/**
* @typedef {string} Enums.SankeyColorMode
* @enum {'none'|'source'|'target'|'gradient'}
*/
/**
* @typedef {string} Enums.ListSelectionMode
* @enum {'all'|'multiple'|'none'|'single'}
*/
/**
* @typedef {string} Enums.ListMenuMode
* @enum {'context'|'slide'}
*/
/**
* @typedef {string} Enums.ListItemDeleteMode
* @enum {'context'|'slideButton'|'slideItem'|'static'|'swipe'|'toggle'}
*/
/**
* @typedef {string} Enums.ListPageLoadMode
* @enum {'nextButton'|'scrollBottom'}
*/
/**
* @typedef {string} Enums.CollectionSearchMode
* @enum {'contains'|'startswith'|'equals'}
*/
/**
* @typedef {string} Enums.GeoMapType
* @enum {'hybrid'|'roadmap'|'satellite'}
*/
/**
* @typedef {string} Enums.GeoMapProvider
* @enum {'bing'|'google'|'googleStatic'}
*/
/**
* @typedef {string} Enums.GeoMapRouteMode
* @enum {'driving'|'walking'}
*/
/**
* @typedef {string} Enums.SubmenuDirection
* @enum {'auto'|'leftOrTop'|'rightOrBottom'}
*/
/**
* @typedef {string} Enums.NavSelectionMode
* @enum {'multiple'|'single'}
*/
/**
* @typedef {string} Enums.NumberBoxMode
* @enum {'number'|'text'|'tel'}
*/
/**
* @typedef {string} Enums.PivotGridScrollingMode
* @enum {'standard'|'virtual'}
*/
/**
* @typedef {string} Enums.PivotGridDataFieldArea
* @enum {'column'|'row'}
*/
/**
* @typedef {string} Enums.PivotGridTotalsDisplayMode
* @enum {'both'|'columns'|'none'|'rows'}
*/
/**
* @typedef {string} Enums.PivotGridRowHeadersLayout
* @enum {'standard'|'tree'}
*/
/**
* @typedef {string} Enums.Toolbar
* @enum {'bottom'|'top'}
*/
/**
* @typedef {string} Enums.ToolbarItemWidget
* @enum {'dxAutocomplete'|'dxButton'|'dxCheckBox'|'dxDateBox'|'dxMenu'|'dxSelectBox'|'dxTabs'|'dxTextBox'}
*/
/**
* @typedef {string} Enums.ToolbarItemLocation
* @enum {'after'|'before'|'center'}
*/
/**
* @typedef {string} Enums.RangeSelectorAxisScaleType
* @enum {'continuous'|'discrete'|'logarithmic'|'semidiscrete'}
*/
/**
* @typedef {string} Enums.ValueChangedCallMode
* @enum {'onMoving'|'onMovingComplete'}
*/
/**
* @typedef {string} Enums.BackgroundImageLocation
* @enum {'center'|'centerBottom'|'centerTop'|'full'|'leftBottom'|'leftCenter'|'leftTop'|'rightBottom'|'rightCenter'|'rightTop'}
*/
/**
* @typedef {string} Enums.RangeSelectorChartAxisScaleType
* @enum {'continuous'|'logarithmic'}
*/
/**
* @typedef {string} Enums.SliderTooltipShowMode
* @enum {'always'|'onHover'}
*/
/**
* @typedef {string} Enums.SchedulerViewType
* @enum {'agenda'|'day'|'month'|'timelineDay'|'timelineMonth'|'timelineWeek'|'timelineWorkWeek'|'week'|'workWeek'}
*/
/**
* @typedef {string} Enums.MaxAppointmentsPerCell
* @enum {'auto'|'unlimited'}
*/
/**
* @typedef {string} Enums.SchedulerRecurrenceEditMode
* @enum {'dialog'|'occurrence'|'series'}
*/
/**
* @typedef {string} Enums.ScrollDirection
* @enum {'both'|'horizontal'|'vertical'}
*/
/**
* @typedef {string} Enums.SlideOutMenuPosition
* @enum {'inverted'|'normal'}
*/
/**
* @typedef {string} Enums.DrawerOpenedStateMode
* @enum {'overlap'|'shrink'|'push'}
*/
/**
* @typedef {string} Enums.DrawerPosition
* @enum {'left'|'right'|'top'|'bottom'}
*/
/**
* @typedef {string} Enums.DrawerRevealMode
* @enum {'slide'|'expand'}
*/
/**
* @typedef {string} Enums.TextBoxMode
* @enum {'email'|'password'|'search'|'tel'|'text'|'url'}
*/
/**
* @typedef {string} Enums.ShowMaskMode
* @enum {'always'|'onFocus'}
*/
/**
* @typedef {string} Enums.ToastType
* @enum {'custom'|'error'|'info'|'success'|'warning'}
*/
/**
* @typedef {string} Enums.ToolbarItemLocateInMenuMode
* @enum {'always'|'auto'|'never'}
*/
/**
* @typedef {string} Enums.ToolbarItemShowTextMode
* @enum {'always'|'inMenu'}
*/
/**
* @typedef {string} Enums.ToolbarRenderMode
* @enum {'bottomToolbar'|'topToolbar'}
*/
/**
* @typedef {string} Enums.TreeListDataStructure
* @enum {'plain'|'tree'}
*/
/**
* @typedef {string} Enums.TreeListScrollingMode
* @enum {'standard'|'virtual'}
*/
/**
* @typedef {string} Enums.GridRowRenderingMode
* @enum {'standard'|'virtual'}
*/
/**
* @typedef {string} Enums.GridColumnRenderingMode
* @enum {'standard'|'virtual'}
*/
/**
* @typedef {string} Enums.TreeMapLayoutAlgorithm
* @enum {'sliceanddice'|'squarified'|'strip'}
*/
/**
* @typedef {string} Enums.TreeMapLayoutDirection
* @enum {'leftBottomRightTop'|'leftTopRightBottom'|'rightBottomLeftTop'|'rightTopLeftBottom'}
*/
/**
* @typedef {string} Enums.TreeMapResolveLabelOverflow
* @enum {'ellipsis'|'hide'}
*/
/**
* @typedef {string} Enums.TreeMapColorizerType
* @enum {'discrete'|'gradient'|'none'|'range'}
*/
/**
* @typedef {string} Enums.TreeViewDataStructure
* @enum {'plain'|'tree'}
*/
/**
* @typedef {string} Enums.TreeViewCheckBoxMode
* @enum {'none'|'normal'|'selectAll'}
*/
/**
* @typedef {string} Enums.TreeViewExpandEvent
* @enum {'dblclick'|'click'}
*/
/**
* @typedef {string} Enums.VectorMapLayerType
* @enum {'area'|'line'|'marker'}
*/
/**
* @typedef {string} Enums.VectorMapMarkerType
* @enum {'bubble'|'dot'|'image'|'pie'}
*/
/**
* @typedef {string} Enums.VectorMapMarkerShape
* @enum {'circle'|'square'}
*/
/**
* @typedef {string} Enums.AnimationType
* @enum {'css'|'fade'|'fadeIn'|'fadeOut'|'pop'|'slide'|'slideIn'|'slideOut'}
*/
/**
* @typedef {string} Enums.Direction
* @enum {'bottom'|'left'|'right'|'top'}
*/
/**
* @typedef {string} Enums.FinancialChartReductionLevel
* @enum {'close'|'high'|'low'|'open'}
*/
/**
* @typedef {string} Enums.ChartSeriesHoverMode
* @enum {'allArgumentPoints'|'allSeriesPoints'|'excludePoints'|'includePoints'|'nearestPoint'|'none'|'onlyPoint'}
*/
/**
* @typedef {string} Enums.ChartSeriesSelectionMode
* @enum {'allArgumentPoints'|'allSeriesPoints'|'excludePoints'|'includePoints'|'none'|'onlyPoint'}
*/
/**
* @typedef {string} Enums.ChartPointInteractionMode
* @enum {'allArgumentPoints'|'allSeriesPoints'|'none'|'onlyPoint'}
*/
/**
* @typedef {string} Enums.PointSymbol
* @enum {'circle'|'cross'|'polygon'|'square'|'triangleDown'|'triangleUp'}
*/
/**
* @typedef {string} Enums.ValueErrorBarDisplayMode
* @enum {'auto'|'high'|'low'|'none'}
*/
/**
* @typedef {string} Enums.ValueErrorBarType
* @enum {'fixed'|'percent'|'stdDeviation'|'stdError'|'variance'}
*/
/**
* @typedef {string} Enums.ValidationRuleType
* @enum {'required'|'numeric'|'range'|'stringLength'|'custom'|'compare'|'pattern'|'email'}
*/
/**
* @typedef {string} Enums.ComparisonOperator
* @enum {'!='|'!=='|'<'|'<='|'=='|'==='|'>'|'>='}
*/
/**
* @typedef {string} Enums.FilterBuilderFieldDataType
* @enum {'string'|'number'|'date'|'boolean'|'object'|'datetime'}
*/
/**
* @typedef {string} Enums.SmallValuesGroupingMode
* @enum {'none'|'smallValueThreshold'|'topN'}
*/
/**
* @typedef {string} Enums.PieChartSeriesInteractionMode
* @enum {'none'|'onlyPoint'}
*/
/**
* @typedef {string} Enums.PieChartLabelPosition
* @enum {'columns'|'inside'|'outside'}
*/
/**
* @typedef {string} Enums.PivotGridDataType
* @enum {'date'|'number'|'string'}
*/
/**
* @typedef {string} Enums.PivotGridGroupInterval
* @enum {'day'|'dayOfWeek'|'month'|'quarter'|'year'}
*/
/**
* @typedef {string} Enums.PivotGridArea
* @enum {'column'|'data'|'filter'|'row'}
*/
/**
* @typedef {string} Enums.PivotGridSortBy
* @enum {'displayText'|'value'|'none'}
*/
/**
* @typedef {string} Enums.ApplyChangesMode
* @enum {'instantly'|'onDemand'}
*/
/**
* @typedef {string} Enums.PivotGridSummaryDisplayMode
* @enum {'absoluteVariation'|'percentOfColumnGrandTotal'|'percentOfColumnTotal'|'percentOfGrandTotal'|'percentOfRowGrandTotal'|'percentOfRowTotal'|'percentVariation'}
*/
/**
* @typedef {string} Enums.PivotGridRunningTotalMode
* @enum {'column'|'row'}
*/
/**
* @typedef {string} Enums.PositionAlignment
* @enum {'bottom'|'center'|'left'|'left bottom'|'left top'|'right'|'right bottom'|'right top'|'top'}
*/
/**
* @typedef {string} Enums.PositionResolveCollisionXY
* @enum {'fit'|'fit flip'|'fit flipfit'|'fit none'|'flip'|'flip fit'|'flip none'|'flipfit'|'flipfit fit'|'flipfit none'|'none'|'none fit'|'none flip'|'none flipfit'}
*/
/**
* @typedef {string} Enums.PositionResolveCollision
* @enum {'fit'|'flip'|'flipfit'|'none'}
*/
/**
* @typedef {string} Enums.ChartSeriesAggregationMethod
* @enum {'avg'|'count'|'max'|'min'|'ohlc'|'range'|'sum'|'custom'}
*/
/**
* @typedef {string} Enums.ChartSingleValueSeriesAggregationMethod
* @enum {'avg'|'count'|'max'|'min'|'sum'|'custom'}
*/
/**
* @typedef {string} Enums.ChartFinancialSeriesAggregationMethod
* @enum {'ohlc'|'custom'}
*/
/**
* @typedef {string} Enums.ChartRangeSeriesAggregationMethod
* @enum {'range'|'custom'}
*/
/**
* @typedef {string} Enums.ChartBubbleSeriesAggregationMethod
* @enum {'avg'|'custom'}
*/
/**
* @typedef {string} Enums.DataSourceStoreType
* @enum {'array'|'local'|'odata'}
*/
/**
* @typedef {string} Enums.PivotGridStoreType
* @enum {'array'|'local'|'odata'|'xmla'}
*/
/**
* @typedef {string} Enums.ExportFormat
* @enum {'GIF'|'JPEG'|'PDF'|'PNG'|'SVG'}
*/
/**
* @typedef {string} Enums.XlsxUnderlineType
* @enum {'double'|'doubleAccounting'|'none'|'single'|'singleAccounting'}
*/
/**
* @typedef {string} Enums.XlsxCellDataType
* @enum {'n'|'s'}
*/
/**
* @typedef {string} Enums.XlsxHorizontalAlignment
* @enum {'center'|'centerContinuous'|'distributed'|'fill'|'general'|'justify'|'left'|'right'}
*/
/**
* @typedef {string} Enums.XlsxVerticalAlignment
* @enum {'bottom'|'center'|'distributed'|'justify'|'top'}
*/
/**
* @typedef {string} Enums.XlsxPatternStyle
* @enum {'darkDown'|'darkGray'|'darkGrid'|'darkHorizontal'|'darkTrellis'|'darkUp'|'darkVertical'|'gray0625'|'gray125'|'lightDown'|'lightGray'|'lightGrid'|'lightHorizontal'|'lightTrellis'|'lightUp'|'lightVertical'|'mediumGray'|'none'|'solid'}
*/
/**
* @typedef {string} Enums.HtmlEditorValueType
* @enum {'html'|'markdown'}
*/
/**
* @typedef {string} Enums.EditorStylingMode
* @enum {'outlined'|'underlined'|'filled'}
*/
/**
* @typedef {string} Enums.GridCommandColumnType
* @enum {'adaptive'|'buttons'|'detailExpand'|'groupExpand'|'selection'}
*/
/**
* @typedef {string} Enums.TreeListCommandColumnType
* @enum {'adaptive'|'buttons'}
*/
/**
* @typedef {string} Enums.GridColumnButtonName
* @enum {'cancel'|'delete'|'edit'|'save'|'undelete'}
*/
/**
* @typedef {string} Enums.TreeListColumnButtonName
* @enum {'add'|'cancel'|'delete'|'edit'|'save'|'undelete'}
*/
|
#include "stub/baseentity.h"
#include "stub/tfplayer.h"
#include "stub/tfweaponbase.h"
#include "stub/projectiles.h"
#include "util/pooled_string.h"
#include <boost/algorithm/string.hpp>
class EntityModule
{
public:
EntityModule() {}
EntityModule(CBaseEntity *entity) {}
};
struct CustomVariable
{
string_t key;
string_t value;
float value_float;
};
struct CustomOutput
{
string_t key;
CBaseEntityOutput output;
};
class ExtraEntityData
{
public:
ExtraEntityData(CBaseEntity *entity) {}
~ExtraEntityData() {
for (auto module : modules) {
delete module.second;
}
}
void AddModule(const char *name, EntityModule *module) {
for (auto it = modules.begin(); it != modules.end(); it++) {
if (it->first == name) {
delete it->second;
modules.erase(it);
break;
}
}
modules.push_back({name, module});
}
EntityModule *GetModule(const char *name) {
for (auto &module : modules) {
if (module.first == name) {
return module.second;
}
}
return nullptr;
}
void RemoveModule(const char *name) {
for (auto it = modules.begin(); it != modules.end(); it++) {
if (it->first == name) {
delete it->second;
modules.erase(it);
break;
}
}
}
std::vector<CustomVariable> &GetCustomVariables() {
return custom_variables;
}
std::vector<CustomOutput> &GetCustomOutputs() {
return custom_outputs;
}
private:
std::vector<std::pair<const char *, EntityModule *>> modules;
std::vector<CustomVariable> custom_variables;
std::vector<CustomOutput> custom_outputs;
};
class ExtraEntityDataWithAttributes : public ExtraEntityData
{
public:
ExtraEntityDataWithAttributes(CBaseEntity *entity) : ExtraEntityData(entity) {}
// float *fast_attribute_cache;
// ~ExtraEntityDataWithAttributes() {
// delete[] fast_attribute_cache;
// }
};
class ExtraEntityDataEconEntity : public ExtraEntityDataWithAttributes
{
public:
ExtraEntityDataEconEntity(CBaseEntity *entity) : ExtraEntityDataWithAttributes(entity) {}
};
class ExtraEntityDataCombatCharacter : public ExtraEntityDataWithAttributes
{
public:
ExtraEntityDataCombatCharacter(CBaseEntity *entity) : ExtraEntityDataWithAttributes(entity) {}
};
class ExtraEntityDataCombatWeapon : public ExtraEntityDataEconEntity
{
public:
ExtraEntityDataCombatWeapon(CBaseEntity *entity) : ExtraEntityDataEconEntity(entity) {
// fast_attribute_cache = fast_attrib_cache_data;
// for (int i = 0; i < FastAttributes::ATTRIB_COUNT_ITEM; i++) {
// fast_attrib_cache_data[i] = FLT_MIN;
// }
}
//float[FastAttributes::ATTRIB_COUNT_ITEM] fast_attrib_cache_data;
};
class HomingRockets : public EntityModule
{
public:
HomingRockets() {}
HomingRockets(CBaseEntity *entity) {}
bool enable = false;
bool ignore_disguised_spies = true;
bool ignore_stealthed_spies = true;
bool follow_crosshair = false;
float speed = 1.0f;
float turn_power = 0.0f;
float min_dot_product = -0.25f;
float aim_time = 9999.0f;
float acceleration = 0.0f;
float acceleration_time = 9999.0f;
float acceleration_start = 0.0f;
float gravity = 0.0f;
};
class ExtraEntityDataProjectile : public ExtraEntityData
{
public:
ExtraEntityDataProjectile(CBaseEntity *entity) : ExtraEntityData(entity) {}
~ExtraEntityDataProjectile() {
if (homing != nullptr) {
delete homing;
}
}
HomingRockets *homing;
};
class ExtraEntityDataPlayer : public ExtraEntityDataCombatCharacter
{
public:
ExtraEntityDataPlayer(CBaseEntity *entity) : ExtraEntityDataCombatCharacter(entity) {
// fast_attribute_cache = fast_attrib_cache_data;
// for (int i = 0; i < FastAttributes::ATTRIB_COUNT_PLAYER; i++) {
// fast_attrib_cache_data[i] = FLT_MIN;
// }
}
//float[FastAttributes::ATTRIB_COUNT_PLAYER] fast_attrib_cache_data;
};
class ExtraEntityDataBot : public ExtraEntityDataPlayer
{
public:
ExtraEntityDataBot(CBaseEntity *entity) : ExtraEntityDataPlayer(entity) {}
};
class ExtraEntityDataFuncRotating : public ExtraEntityData
{
public:
ExtraEntityDataFuncRotating(CBaseEntity *entity) : ExtraEntityData(entity) {}
CHandle<CBaseEntity> m_hRotateTarget;
};
class ExtraEntityDataTriggerDetector : public ExtraEntityData
{
public:
ExtraEntityDataTriggerDetector(CBaseEntity *entity) : ExtraEntityData(entity) {}
CHandle<CBaseEntity> m_hLastTarget;
CHandle<CBaseEntity> m_hYRotateEntity;
CHandle<CBaseEntity> m_hXRotateEntity;
bool m_bHasTarget;
};
class ExtraEntityDataWeaponSpawner : public ExtraEntityData
{
public:
ExtraEntityDataWeaponSpawner(CBaseEntity *entity) : ExtraEntityData(entity) {}
std::vector<CHandle<CBaseEntity>> m_SpawnedWeapons;
};
/////////////
inline ExtraEntityDataWithAttributes *GetExtraEntityDataWithAttributes(CBaseEntity *entity, bool create = true) {
ExtraEntityData *data = entity->m_extraEntityData;
if (create && entity->m_extraEntityData == nullptr) {
if (entity->IsPlayer()) {
data = entity->m_extraEntityData = new ExtraEntityDataPlayer(entity);
}
else if (entity->IsBaseCombatWeapon()) {
data = entity->m_extraEntityData = new ExtraEntityDataCombatWeapon(entity);
}
else if (entity->IsWearable()) {
data = entity->m_extraEntityData = new ExtraEntityDataWithAttributes(entity);
}
}
return static_cast<ExtraEntityDataWithAttributes *>(data);
}
inline ExtraEntityDataPlayer *GetExtraPlayerData(CTFPlayer *entity, bool create = true) {
ExtraEntityData *data = entity->m_extraEntityData;
if (create && entity->m_extraEntityData == nullptr) {
data = entity->m_extraEntityData = new ExtraEntityDataPlayer(entity);
}
return static_cast<ExtraEntityDataPlayer *>(data);
}
inline ExtraEntityDataCombatWeapon *GetExtraWeaponData(CBaseCombatWeapon *entity, bool create = true) {
ExtraEntityData *data = entity->m_extraEntityData;
if (create && entity->m_extraEntityData == nullptr) {
data = entity->m_extraEntityData = new ExtraEntityDataCombatWeapon(entity);
}
return static_cast<ExtraEntityDataCombatWeapon *>(data);
}
inline ExtraEntityDataBot *GetExtraBotData(CTFPlayer *entity, bool create = true) {
ExtraEntityData *data = entity->m_extraEntityData;
if (create && entity->m_extraEntityData == nullptr) {
data = entity->m_extraEntityData = new ExtraEntityDataBot(entity);
}
return static_cast<ExtraEntityDataBot *>(data);
}
inline ExtraEntityDataProjectile *GetExtraProjectileData(CBaseProjectile *entity, bool create = true) {
ExtraEntityData *data = entity->m_extraEntityData;
if (create && entity->m_extraEntityData == nullptr) {
data = entity->m_extraEntityData = new ExtraEntityDataProjectile(entity);
}
return static_cast<ExtraEntityDataProjectile *>(data);
}
inline ExtraEntityDataFuncRotating *GetExtraFuncRotatingData(CFuncRotating *entity, bool create = true) {
ExtraEntityData *data = entity->m_extraEntityData;
if (create && entity->m_extraEntityData == nullptr) {
data = entity->m_extraEntityData = new ExtraEntityDataFuncRotating(entity);
}
return static_cast<ExtraEntityDataFuncRotating *>(data);
}
inline ExtraEntityDataTriggerDetector *GetExtraTriggerDetectorData(CBaseEntity *entity, bool create = true) {
ExtraEntityData *data = entity->m_extraEntityData;
if (create && entity->m_extraEntityData == nullptr) {
data = entity->m_extraEntityData = new ExtraEntityDataTriggerDetector(entity);
}
return static_cast<ExtraEntityDataTriggerDetector *>(data);
}
inline ExtraEntityDataWeaponSpawner *GetExtraWeaponSpawnerData(CBaseEntity *entity, bool create = true) {
ExtraEntityData *data = entity->m_extraEntityData;
if (create && entity->m_extraEntityData == nullptr) {
data = entity->m_extraEntityData = new ExtraEntityDataWeaponSpawner(entity);
}
return static_cast<ExtraEntityDataWeaponSpawner *>(data);
}
template< typename DataClass, typename EntityClass>
inline DataClass *GetExtraData(EntityClass *entity, bool create = true) {
ExtraEntityData *data = entity->m_extraEntityData;
if (create && entity->m_extraEntityData == nullptr) {
data = entity->m_extraEntityData = new DataClass(entity);
}
return static_cast<DataClass *>(data);
}
template<typename DataClass>
inline DataClass *GetExtraData(CBaseEntity *entity, bool create = true) {
ExtraEntityData *data = entity->m_extraEntityData;
if (create && entity->m_extraEntityData == nullptr) {
data = entity->m_extraEntityData = new DataClass(entity);
}
return static_cast<DataClass *>(data);
}
inline ExtraEntityData *CreateExtraData(CBaseEntity *entity) {
static PooledString weapon_spawner("$weapon_spawner");
ExtraEntityData *data = GetExtraEntityDataWithAttributes(entity, true);
if (data != nullptr) {
return data;
}
auto projectile = rtti_cast<CBaseProjectile *>(entity);
if (projectile != nullptr) {
return entity->m_extraEntityData = new ExtraEntityDataProjectile(projectile);
}
auto rotating = rtti_cast<CFuncRotating *>(entity);
if (rotating != nullptr) {
return entity->m_extraEntityData = new ExtraEntityDataFuncRotating(rotating);
}
auto trigger = rtti_cast<CBaseTrigger *>(entity);
if (trigger != nullptr) {
return entity->m_extraEntityData = new ExtraEntityDataTriggerDetector(trigger);
}
if (entity->GetClassname() == weapon_spawner) {
return entity->m_extraEntityData = new ExtraEntityDataWeaponSpawner(trigger);
}
return entity->m_extraEntityData = new ExtraEntityData(entity);
}
inline ExtraEntityData *GetExtraData(CBaseEntity *entity, bool create = true) {
if (!create || entity->m_extraEntityData != nullptr) {
return entity->m_extraEntityData;
}
return CreateExtraData(entity);
}
////////
template<class ModuleType>
inline ModuleType *CBaseEntity::GetEntityModule(const char* name)
{
auto data = this->GetExtraEntityData();
return data != nullptr ? static_cast<ModuleType *>(data->GetModule(name)) : nullptr;
}
template<class ModuleType>
inline ModuleType *CBaseEntity::GetOrCreateEntityModule(const char* name)
{
auto data = GetExtraData(this);
auto module = data->GetModule(name);
if (module == nullptr) {
module = new ModuleType(this);
data->AddModule(name, module);
}
return static_cast<ModuleType *>(module);
}
inline void CBaseEntity::AddEntityModule(const char* name, EntityModule *module)
{
GetExtraData(this)->AddModule(name, module);
}
inline void CBaseEntity::RemoveEntityModule(const char* name)
{
auto data = this->GetExtraEntityData();
if (data != nullptr) {
data->RemoveModule(name);
}
}
inline std::vector<CustomVariable> &GetCustomVariables(CBaseEntity *entity)
{
return GetExtraData(entity)->GetCustomVariables();
}
template<FixedString lit>
inline const char *CBaseEntity::GetCustomVariable(const char *defValue)
{
static PooledString pooled(lit);
auto data = this->GetExtraEntityData();
if (data != nullptr) {
auto &attrs = data->GetCustomVariables();
for (auto &var : attrs) {
if (var.key == pooled) {
return STRING(var.value);
}
}
}
return defValue;
}
template<FixedString lit>
inline float CBaseEntity::GetCustomVariableFloat(float defValue)
{
static PooledString pooled(lit);
auto data = this->GetExtraEntityData();
if (data != nullptr) {
auto &attrs = data->GetCustomVariables();
for (auto &var : attrs) {
if (var.key == pooled) {
return var.value_float;
}
}
}
return defValue;
}
inline const char *CBaseEntity::GetCustomVariableByText(const char *key, const char *defValue)
{
auto data = this->GetExtraEntityData();
if (data != nullptr) {
auto &attrs = data->GetCustomVariables();
for (auto &var : attrs) {
if (STRING(var.key) == key || stricmp(STRING(var.key), key) == 0) {
return STRING(var.value);
}
}
}
return defValue;
}
inline void CBaseEntity::SetCustomVariable(const char *key, const char *value)
{
auto &list = GetExtraData(this)->GetCustomVariables();
bool found = false;
for (auto &var : list) {
if (STRING(var.key) == key || stricmp(key, STRING(var.key)) == 0) {
var.value = AllocPooledString(value);
var.value_float = strtof(value, nullptr);
found = true;
break;
}
}
if (!found) {
list.push_back({AllocPooledString(key), AllocPooledString(value), strtof(value, nullptr)});
}
}
inline void CBaseEntity::AddCustomOutput(const char *key, const char *value)
{
std::string namestr = key;
boost::algorithm::to_lower(namestr);
auto &list = GetExtraData(this)->GetCustomOutputs();
bool found = false;
for (auto &var : list) {
if (STRING(var.key) == namestr.c_str() || stricmp(namestr.c_str(), STRING(var.key)) == 0) {
var.output.ParseEventAction(value);
found = true;
break;
}
}
if (!found) {
list.emplace_back();
list.back().key = AllocPooledString(namestr.c_str());
list.back().output.ParseEventAction(value);
}
}
template<FixedString lit>
inline void CBaseEntity::FireCustomOutput(CBaseEntity *activator, CBaseEntity *caller, variant_t variant)
{
static PooledString pooled(lit);
auto data = this->GetExtraEntityData();
if (data != nullptr) {
auto &attrs = data->GetCustomOutputs();
for (auto &var : attrs) {
if (var.key == pooled) {
var.output.FireOutput(variant, activator, caller);
return;
}
}
}
}
inline void CBaseEntity::RemoveCustomOutput(const char *key)
{
auto data = this->GetExtraEntityData();
if (data != nullptr) {
std::string namestr = key;
boost::algorithm::to_lower(namestr);
auto &list = data->GetCustomOutputs();
bool found = false;
for (auto it = list.begin(); it != list.end(); it++) {
if (STRING(it->key) == namestr.c_str() || stricmp(namestr.c_str(), STRING(it->key)) == 0) {
list.erase(it);
return;
}
}
}
}
inline void CBaseEntity::RemoveAllCustomOutputs()
{
auto data = this->GetExtraEntityData();
if (data != nullptr) {
data->GetCustomOutputs().clear();
}
}
|
import styled from 'styled-components'
export const SSearchForm = styled.form`
position: fixed;
background-image: linear-gradient(
to top,
rgba(15, 15, 15, 0.6),
rgba(15, 15, 15, 0.5),
rgba(15, 15, 15, 0.4),
rgba(15, 15, 15, 0.3),
rgba(15, 15, 15, 0.2),
rgba(15, 15, 15, 0.08),
rgba(15, 15, 15, 0)
);
z-index: 10;
`
|
#!/usr/bin/env python
'''
Tools for making nice plots
Uses matplotlib backend (currently wx or gtk) to provide tools for editing
matplotlib plots after they have been created
NOTE: I have imported all of pyplot into this module, so you may call it
as though you were calling pyplot directly. The difference is that with
some of the functions, such as plot, a window will be brought up allowing
you to toggle the visibility of the artists
'''
import matplotlib
if __name__ != '__main__':
from matplotlib.pyplot import *
def plot( *args, **kwargs):
'''
Works the same as matplotlib.pyplot.plot, with the addition of a panel
being created which allows the user to toggle visibility of
the artists in the axis
'''
matplotlib.pyplot.plot( *args, **kwargs )
create_plot_browser( matplotlib.pyplot.gca() )
# end plot
def scatter( *args, **kwargs):
'''
Works the same as matplotlib.pyplot.scatter, with the addition of a panel
being created which allows the user to toggle visibility of
the artists in the axis
'''
matplotlib.pyplot.scatter( *args, **kwargs )
create_plot_browser( matplotlib.pyplot.gca() )
# end scatter
def create_plot_browser( ax ):
'''Creates a window for modifying how artists are displayed in an axis'''
# Call browser creation code appropriate to the current backend
backend = matplotlib.get_backend().lower()
if backend == 'wxagg':
import plotting_tools_wx
plotting_tools_wx.create_plot_browser( ax )
elif backend == 'gtkagg':
import plotting_tools_gtk
plotting_tools_gtk.create_plot_browser( ax )
# end create_plot_browser
if __name__=='__main__':
import argparse
parser = argparse.ArgumentParser('Test plotting_tools module')
parser.add_argument('backend', help='Backend to use with matplotlib. Currently supports wxagg and gtkagg')
args = parser.parse_args()
matplotlib.use( args.backend )
import matplotlib.pyplot
fig = matplotlib.pyplot.figure()
ax = fig.add_subplot(111)
ax.plot( [1,2,3] )
ax.scatter( [1,2,3], [3,2,1], color='g')
ax.plot( [2, 2, 1], 'r' )
fig.show()
create_plot_browser( ax )
matplotlib.pyplot.show()
|
export class ajaxAction {
static INSERT_NAME = "INSERT_NAME";
static INSERT_NAME_SUCCESS = "INSERT_NAME_SUCCESS";
static INSERT_NAME_FAILED = "INSERT_NAME_FAILED";
static GET_NAMES = "GET_NAMES";
static GET_NAMES_SUCCESS = "GET_NAMES_SUCCESS";
static GET_NAMES_FAILED = "GET_NAMES_FAILED";
static insertName(name) {
return { type: ajaxAction.INSERT_NAME, payload: name }
}
static getNames() {
return { type: ajaxAction.GET_NAMES }
}
}
|
# Standard Library Imports
import argparse
import logging
import sys
import os
PY3 = sys.version_info >= (3, 0)
unicode_type = type(u"")
if PY3:
real_input = input
import urllib.parse as urlparse
else:
# noinspection PyUnresolvedReferences
real_input = raw_input
# noinspection PyUnresolvedReferences
import urlparse
__all__ = ["urlparse", "real_input", "ensure_native_str"]
class CacheProperty(object):
"""
Converts a class method into a property and cache result after first use.
When property is accessed for the first time, the result is computed and returned.
The class property is then replaced with an instance attribute with the computed result.
"""
def __init__(self, func):
self.__name__ = func.__name__
self.__doc__ = func.__doc__
self._func = func
def __get__(self, instance, owner):
if instance:
attr = self._func(instance)
setattr(instance, self.__name__, attr)
return attr
else:
return self
class RealPath(argparse.Action):
"""
Custom action to convert given path to a full canonical path.
Eliminating any symbolic links, expanding user path and environment variables if encountered.
"""
def __call__(self, _, namespace, value, option_string=None):
value = unicode_cmdargs(value)
path = fullpath(value)
setattr(namespace, self.dest, path)
class RealPathList(argparse.Action):
"""
Custom action to convert a list of path to a full canonical list of paths.
Eliminating any symbolic links, expanding user path and environment variables if encountered.
"""
def __call__(self, _, namespace, values, option_string=None):
values = map(unicode_cmdargs, values)
values = map(fullpath, values)
setattr(namespace, self.dest, list(values))
class UrlList(argparse.Action):
"""
Custom action to convert a list of path to a full canonical list of paths.
Eliminating any symbolic links, expanding user path and environment variables if encountered.
"""
def __call__(self, _, namespace, values, option_string=None):
values = map(unicode_cmdargs, values)
setattr(namespace, self.dest, list(values))
class CommaList(argparse.Action):
"""
Custom action to split multiple parameters which are
separated by a comma, and append then to a empty list.
"""
def __call__(self, _, namespace, values, option_string=None):
values = unicode_cmdargs(values)
items = [value.strip() for value in values.split(",")]
setattr(namespace, self.dest, items)
class CusstomStreamHandler(logging.StreamHandler):
"""
A handler class which writes logging records, appropriately formatted, to a stream.
Debug & Info records will be logged to sys.stdout, and all other records will be logged to sys.stderr.
"""
def __init__(self):
super(CusstomStreamHandler, self).__init__(sys.stdout)
# noinspection PyBroadException
def emit(self, record):
"""Swap out the stdout stream with stderr if log level is WARNING or greater."""
if record.levelno >= 30:
org_stream = self.stream
self.stream = sys.stderr
try:
super(CusstomStreamHandler, self).emit(record)
finally:
self.stream = org_stream
else:
super(CusstomStreamHandler, self).emit(record)
class CustomFormatter(object):
def __init__(self):
self.default_fmt = logging.Formatter("%(relativeCreated)-19s %(levelname)5s: %(message)s")
self.fmts = {"kodi.dev": logging.Formatter("%(relativeCreated)-19s %(levelname)5s: [kodi-addon-dev] %(message)s")}
def format(self, record):
formater = self.fmts.get(record.name, self.default_fmt)
return formater.format(record)
def ensure_native_str(data, encoding="utf8"):
"""
Ensures that given string is returned as a native str type, bytes on python2 or unicode on python3.
:param data: String to convert if needed.
:param encoding: The encoding to use when encoding.
:returns: The given string as UTF-8.
:rtype: str
"""
if isinstance(data, str):
return data
elif isinstance(data, unicode_type):
# Only executes on python 2
return data.encode(encoding)
elif isinstance(data, bytes):
# Only executes on python 3
return data.decode(encoding)
else:
str(data)
def ensure_unicode(data, encoding="utf8"):
"""
Ensures that given string is return as a unicode string.
:param data: String to convert if needed.
:param encoding: The encoding to use when decoding.
:returns: The given string as unicode.
:rtype: unicode
"""
if isinstance(data, bytes):
return data.decode(encoding)
else:
return unicode_type(data)
def unicode_cmdargs(cmdarg):
"""Convert a command line string to unicode."""
if isinstance(cmdarg, bytes):
try:
# There is a possibility that this will fail
return cmdarg.decode(sys.getfilesystemencoding())
except UnicodeDecodeError:
try:
# Attept decoding using utf8
return cmdarg.decode("utf8")
except UnicodeDecodeError:
# Fall back to latin-1
return cmdarg.decode("latin-1")
# If this fails then we are fucked
else:
return cmdarg
def fullpath(path): # type: (str) -> str
"""
Converts given path to a full canonical path. Eliminating any symbolic links,
expanding user path and environment variables if encountered.
"""
return os.path.realpath(os.path.expanduser(os.path.expandvars(path)))
|
import math
def find_locations_of_minimum_and_maximum_skew_value(text):
length_of_text = len(text)
locations_for_minimum_skew_value = []
locations_for_maximum_skew_value = []
minimum_skew_value = math.inf
maximum_skew_value = -1 * math.inf
skew_value = 0
for i in range(0, length_of_text):
if text[i] == 'G':
skew_value += 1
elif text[i] == 'C':
skew_value -= 1
if skew_value < minimum_skew_value:
minimum_skew_value = skew_value
locations_for_minimum_skew_value.clear()
locations_for_minimum_skew_value.append(i)
elif skew_value == minimum_skew_value:
locations_for_minimum_skew_value.append(i)
if skew_value > maximum_skew_value:
maximum_skew_value = skew_value
locations_for_maximum_skew_value.clear()
locations_for_maximum_skew_value.append(i)
elif skew_value == maximum_skew_value:
locations_for_maximum_skew_value.append(i)
return minimum_skew_value, maximum_skew_value, locations_for_minimum_skew_value, locations_for_maximum_skew_value
|
import json
import requests
from threading import Thread
class NetApp_OCUM_HTTP(object):
"""
Class object for handling HTTP requests/responses for the OCUM.
"""
def __init__(self, settings):
self.settings = settings
self.path = None
def _GET_worker(self, url, params, accept, responses, embedded_key):
"""
Worker for performing threaded HTTP GET requests.
"""
response = requests.get(url,
auth = (self.settings.api_user, self.settings.api_password),
verify = self.settings.verify_ssl,
headers = {
'Accept': 'application/vnd.netapp.object.inventory.{}.hal+json'.format(accept)
},
params = params
)
# Request failed
if not int(response.status_code) == 200:
raise Exception('Failed to GET {0}: {1}'.format(url, response.json()))
responses[accept] = response.json()['_embedded'][embedded_key]
def _sort_response_by_id(self, response, name_from):
"""
Sort the responses by object ID.
"""
objects_by_id = {}
for accept_type, accept_responses in response.items():
for accept_response in accept_responses:
current_id = None
# Object has a top level ID
if 'id' in accept_response:
current_id = accept_response['id']
# Object ID is nested
else:
# Extract ID for volume relationship
if self.path == 'volumes' and accept_type == 'relationship':
# Map to the source volume if found, otherwise destination volume
if accept_response['source_volume']['id']:
current_id = accept_response['source_volume']['id']
else:
current_id = accept_response['destination_volume']['id']
# Extract ID for aggregate capacity
if self.path == 'aggregates' and accept_type == 'capacity':
current_id = accept_response['aggregate']['id']
# Extract ID for node capacity
if self.path == 'nodes' and accept_type == 'capacity':
current_id = accept_response['node']['id']
if not current_id in objects_by_id:
objects_by_id[current_id] = {
'type': self.path
}
if accept_type == name_from[0]:
objects_by_id[current_id]['name'] = accept_response[name_from[1]][name_from[2]]
objects_by_id[current_id][accept_type] = accept_response
# Convert to an array of objects
object_array = []
for object_id, object_attrs in objects_by_id.items():
object_item = object_attrs
object_item['id'] = object_id
object_array.append(object_item)
return object_array
def GET(self, url_path, accept, embedded_key, name_from, params={}):
"""
Make a GET request to the OCUM API method with an optional filter. Start
a thread for each accept type.
"""
responses = {}
threads = []
self.path = url_path
api_url = 'https://{0}:{1}/api/ontap/{2}'.format(
self.settings.api_host,
self.settings.api_port,
url_path
)
# Get objects for each type of endpoint based on Accept header
# for accept_type in accept:
# self._GET_worker(api_url, params, accept_type, responses, embedded_key)
for accept_type in accept:
t = Thread(target=self._GET_worker, args=(api_url, params, accept_type, responses, embedded_key))
t.start()
threads.append(t)
for t in threads:
t.join()
# Sort response objects by ID and return
return self._sort_response_by_id(responses, name_from)
|
#pragma once
#include "base.h"
#include "checkpoint_mgr.h"
#include "ioloop.h"
#include "learner_synchronizer.h"
#include "paxos_log.h"
namespace paxos {
class Acceptor;
class CheckpoingMgr;
class StateMachineFac;
class Learner : public Base {
public:
Learner(
const Config* config,
const Communicate* communicate,
const Instance* instance,
const Acceptor* acceptor,
const LogStorage* log_storage,
const IoLoop* ioloop,
const CheckpointMgr* checkpoint_mgr,
const StateMachineFac* state_machine_fac
);
~Learner();
const uint64_t GetLatestInstanceID();
virtual void InitInstance();
const bool IsLearned();
void InitLearnerSynchronizer();
void LearnValueWithoutWrite(
const uint64_t instance_id,
const std::string & val,
const uint32_t checksum
);
void TransmitToFollower();
void ProposerSendSuccess(
const uint64_t instance_id,
const uint64_t proposal_id
);
void OnProposerSendSuccess(const PaxosMsg& paxos_msg);
void Stop();
private:
Acceptor* acceptor_;
LearnerSynchronizer learner_synchronizer_;
uint64_t highest_instance_id_;
IoLoop* ioloop_;
std::string learned_val_;
bool is_learned_;
uint32_t new_checksum_;
Config* config_;
PaxosLog paxos_log_;
uint32_t ask_for_learn_noop_timer_id_;
uint64_t highest_seen_instance_id_;
uint64_t highest_seen_instance_id_from_node_id_;
bool is_im_learning_;
uint64_t last_ack_instance_id_;
CheckpointMgr * checkpoint_mgr_;
StateMachineFac * state_machine_fac_;
};
}
|
# prefer setuptools over distutils
from setuptools import setup, find_packages
# use a consistent encoding
from codecs import open
from os import path
import json
import sys
is_python_2 = sys.version_info < (3, 0)
here = path.abspath(path.dirname(__file__))
root = path.dirname(here)
readme_rst = path.join(here, 'README.rst')
package_json = path.join(here, 'package.json')
# a workaround when installing locally from git repository with pip install -e .
if not path.isfile(package_json):
package_json = path.join(root, 'package.json')
# long description from README file
with open(readme_rst, encoding='utf-8') as f:
long_description = f.read()
# version number and all other params from package.json
with open(package_json, encoding='utf-8') as f:
package = json.load(f)
setup(
name=package['name'],
version=package['version'],
description=package['description'],
long_description=long_description,
url=package['homepage'],
author=package['author']['name'],
author_email=package['author']['email'],
license=package['license'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Financial and Insurance Industry',
'Intended Audience :: Information Technology',
'Topic :: Software Development :: Build Tools',
'Topic :: Office/Business :: Financial :: Investment',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: JavaScript',
'Programming Language :: PHP',
'Operating System :: OS Independent',
'Environment :: Console'
],
keywords=package['keywords'],
packages=find_packages(exclude=['ccxt.async_support*'] if is_python_2 else []),
install_requires=[
'setuptools>=38.5.1',
'certifi>=2018.1.18',
'requests>=2.18.4',
'autobahn>=18.6.1',
'pyee>=5.0.0'
],
extras_require={
':python_version>="3.5.2"': [
'aiohttp>=3.0.1',
'aiodns==1.1.1',
'yarl==1.1.0',
],
'qa': [
'flake8==3.5.0'
],
'doc': [
'Sphinx==1.7.0'
]
}
)
|
import React, { useEffect, useState } from "react"
import '../Css/OrderInterface.css'
import './AddressForm'
import Nav from './Nav'
const OrderInterface = () => {
const APP_ID = '6fa68a69'
const APP_KEY = '761aa985f178a745368429f58f681626'
const [items, setItems] = useState([])
const [search, setSearch] = useState('')
const [query, setQuery] = useState('pizza')
useEffect(() => {
getItems()
// eslint-disable-next-line
}, [query])
const getItems = async () => {
const response = await fetch(`https://api.edamam.com/search?q=${query}&app_id=${APP_ID}&app_key=${APP_KEY}`)
const data = await response.json()
setItems(data.hits)
}
const updateSearch = e => {
setSearch(e.target.value)
}
const getSearch = e => {
e.preventDefault()
setQuery(search)
setSearch('')
}
const updateBasket = (title) => {
var newOrders = []
var prevOrders = localStorage.getItem('orders')
let a = JSON.parse(prevOrders)
if (a !== null) {
a.forEach(element => {
newOrders.push(element)
});
}
newOrders.push(title)
localStorage.setItem('orders', JSON.stringify(newOrders))
setBasket(newOrders)
if (basket !== null) {
if (basket.length > 8) {
alert('You are a probably fat, so stop ordering more because I have developed a bad component that will go off screen if you continue and then you cant checkout')
}
}
}
function userCreate(resturantAdress,adress){
var email = localStorage.getItem('email')
fetch(`http://localhost:4000/orders/add?customer_email=${email}&address_from=${resturantAdress}&address_to=${adress}`)
.then()
.catch(err => {
})
}
const checkout = async () => {
if (basket !== null) {
if (basket.length < 1) {
alert('Your basket is empty')
} else {
// alert('Thanks for your order! \n We will send a car for your order and you will be notified when it has arrived')
let email = localStorage.getItem("email")
let customerAddress = ""
let response = await fetch(`http://localhost:4000/customers/getAddress?customer_email=${email}`)
let json = await response.json()
let array = await json.data.rows
if( array.length > 0 ){
customerAddress = array[0].address
}
let address = prompt("Please enter your adress", customerAddress )
if (address === null || address === "") {
console.log("User cancelled the prompt.")
} else {
// localStorage.clear()
let id = localStorage.getItem('id')
localStorage.setItem('orders', null)
localStorage.setItem('order'+ id, orders+ ';' +address)
let newId = Number(id) + 1
localStorage.setItem('id', newId)
// let old = localStorage.getItem('address');
// if (old === null) old = "";
// localStorage.setItem('address', old + address + ',');
setBasket([])
let resturantAddress = 'resturant' + Math.floor(Math.random() * 100)
userCreate(resturantAddress,address)
}
}
} else {
alert('Your basket is empty')
}
}
useEffect(() => {
let getData = localStorage.getItem('orders')
let basketItems = JSON.parse(getData)
setBasket(basketItems)
}, [])
const deleteItem = (title) => {
for (let index = 0; index < basket.length; index++) {
if (basket[index] === title) {
basket.splice(index, 1)
break
}
}
localStorage.setItem('orders', JSON.stringify(basket))
var retriveData = localStorage.getItem('orders')
var orders = JSON.parse(retriveData)
setBasket(orders)
}
const [basket, setBasket] = useState([])
var retriveData = localStorage.getItem('orders')
var orders = JSON.parse(retriveData)
let checkId = localStorage.getItem('id')
if(checkId === null){
localStorage.setItem('id',0)
}else if(Number(checkId) > 20){
localStorage.setItem('id',1)
}
return (
<div className="OrderInterface">
<Nav />
<form onSubmit={getSearch} className="search-form">
<input className="search-bar" type="text" value={search} onChange={updateSearch} />
<button className="search-button" type="submit">Search</button>
</form>
{items.map(r => (
<div className="Food">
<h1>{r.recipe.label}</h1>
<img src={r.recipe.image} alt="" />
<br></br>
<button onClick={() => updateBasket(r.recipe.label)}>Add Item</button>
</div>
))}
<div className="Basket">
<h1>Basket:</h1>
{orders === null ? <p>Currenlty empty</p> : <p></p>}
{ }
{orders === null ? '' : basket.map(r => (
<div>
<h3>{r}</h3>
<button onClick={() => deleteItem(r)}>X</button>
</div>
))}
<br></br>
<button onClick={checkout}>Checkout</button>
</div>
</div>
)
}
export default OrderInterface
|
"""
Script to display Inspirational quotes on Ubuntu Notification tab.
"""
import os
import sys
import random
import subprocess
class Quotify:
def generate_quote_notification(self):
"""
Caller function to send desktop notification of quote.
"""
quotes_file_path = self.get_file_contents("resources/quotes.txt")
quote = self.get_quote(quotes_file_path)
self.display_quote(quote)
def get_file_contents(self,filepath):
"""
Returns the quotes from file in list.
"""
lines_list = open(filepath).readlines()
return lines_list
def get_quote(self,quotes,seed=None):
"""
Returns a random quote from the list of text
"""
random.seed(seed)
random_quote = random.choice(quotes)
return random_quote
def display_quote(self,quote):
"""
Executes notify command to display quote.
WARNING: ONLY works on linux distos with notify-send program. Tested on Ubuntu.
"""
command ='notify-send "Wisdom Dose " '+ quote
subprocess.Popen(command,shell=True)
def main():
"""
Entry point for the quotes script.
"""
quotify = Quotify()
quotify.generate_quote_notification()
if __name__ =='__main__':
sys.exit(main())
|
from _common import *
print("-*-*-*-*-*-*-*-*-*-*-*Setup begins*-*-*-*-*-*-*-*-*-*-*-*-*-")
delete_for_users()
print("-*-*-*-*-*-*-*-*-*-*-*Setup ends*-*-*-*-*-*-*-*-*-*-*-*-*-*-")
|
/*
* Copyright (c) 2003, 2007-14 Matteo Frigo
* Copyright (c) 2003, 2007-14 Massachusetts Institute of Technology
*
* The following statement of license applies *only* to this header file,
* and *not* to the other files distributed with FFTW or derived therefrom:
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
* OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/***************************** NOTE TO USERS *********************************
*
* THIS IS A HEADER FILE, NOT A MANUAL
*
* If you want to know how to use FFTW, please read the manual,
* online at http://www.fftw.org/doc/ and also included with FFTW.
* For a quick start, see the manual's tutorial section.
*
* (Reading header files to learn how to use a library is a habit
* stemming from code lacking a proper manual. Arguably, it's a
* *bad* habit in most cases, because header files can contain
* interfaces that are not part of the public, stable API.)
*
****************************************************************************/
#ifndef FFTW3_H
#define FFTW3_H
#include <stdio.h>
#ifdef __cplusplus
extern "C"
{
#endif /* __cplusplus */
/* If <complex.h> is included, use the C99 complex type. Otherwise
define a type bit-compatible with C99 complex */
#if !defined(FFTW_NO_Complex) && defined(_Complex_I) && defined(complex) && defined(I)
# define FFTW_DEFINE_COMPLEX(R, C) typedef R _Complex C
#else
# define FFTW_DEFINE_COMPLEX(R, C) typedef R C[2]
#endif
#define FFTW_CONCAT(prefix, name) prefix ## name
#define FFTW_MANGLE_DOUBLE(name) FFTW_CONCAT(fftw_, name)
#define FFTW_MANGLE_FLOAT(name) FFTW_CONCAT(fftwf_, name)
#define FFTW_MANGLE_LONG_DOUBLE(name) FFTW_CONCAT(fftwl_, name)
#define FFTW_MANGLE_QUAD(name) FFTW_CONCAT(fftwq_, name)
/* IMPORTANT: for Windows compilers, you should add a line
#define FFTW_DLL
here and in kernel/ifftw.h if you are compiling/using FFTW as a
DLL, in order to do the proper importing/exporting, or
alternatively compile with -DFFTW_DLL or the equivalent
command-line flag. This is not necessary under MinGW/Cygwin, where
libtool does the imports/exports automatically. */
#if defined(FFTW_DLL) && (defined(_WIN32) || defined(__WIN32__))
/* annoying Windows syntax for shared-library declarations */
# if defined(COMPILING_FFTW) /* defined in api.h when compiling FFTW */
# define FFTW_EXTERN extern __declspec(dllexport)
# else /* user is calling FFTW; import symbol */
# define FFTW_EXTERN extern __declspec(dllimport)
# endif
#else
# define FFTW_EXTERN extern
#endif
/* specify calling convention (Windows only) */
#if defined(_WIN32) || defined(__WIN32__)
# define FFTW_CDECL __cdecl
#else
# define FFTW_CDECL
#endif
enum fftw_r2r_kind_do_not_use_me {
FFTW_R2HC=0, FFTW_HC2R=1, FFTW_DHT=2,
FFTW_REDFT00=3, FFTW_REDFT01=4, FFTW_REDFT10=5, FFTW_REDFT11=6,
FFTW_RODFT00=7, FFTW_RODFT01=8, FFTW_RODFT10=9, FFTW_RODFT11=10
};
struct fftw_iodim_do_not_use_me {
int n; /* dimension size */
int is; /* input stride */
int os; /* output stride */
};
#include <stddef.h> /* for ptrdiff_t */
struct fftw_iodim64_do_not_use_me {
ptrdiff_t n; /* dimension size */
ptrdiff_t is; /* input stride */
ptrdiff_t os; /* output stride */
};
typedef void (FFTW_CDECL *fftw_write_char_func_do_not_use_me)(char c, void *);
typedef int (FFTW_CDECL *fftw_read_char_func_do_not_use_me)(void *);
/*
huge second-order macro that defines prototypes for all API
functions. We expand this macro for each supported precision
X: name-mangling macro
R: real data type
C: complex data type
*/
#define FFTW_DEFINE_API(X, R, C) \
\
FFTW_DEFINE_COMPLEX(R, C); \
\
typedef struct X(plan_s) *X(plan); \
\
typedef struct fftw_iodim_do_not_use_me X(iodim); \
typedef struct fftw_iodim64_do_not_use_me X(iodim64); \
\
typedef enum fftw_r2r_kind_do_not_use_me X(r2r_kind); \
\
typedef fftw_write_char_func_do_not_use_me X(write_char_func); \
typedef fftw_read_char_func_do_not_use_me X(read_char_func); \
\
FFTW_EXTERN void \
FFTW_CDECL X(execute)(const X(plan) p); \
\
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_dft)(int rank, const int *n, \
C *in, C *out, int sign, unsigned flags); \
\
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_dft_1d)(int n, C *in, C *out, int sign, \
unsigned flags); \
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_dft_2d)(int n0, int n1, \
C *in, C *out, int sign, unsigned flags); \
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_dft_3d)(int n0, int n1, int n2, \
C *in, C *out, int sign, unsigned flags); \
\
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_many_dft)(int rank, const int *n, \
int howmany, \
C *in, const int *inembed, \
int istride, int idist, \
C *out, const int *onembed, \
int ostride, int odist, \
int sign, unsigned flags); \
\
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_guru_dft)(int rank, const X(iodim) *dims, \
int howmany_rank, \
const X(iodim) *howmany_dims, \
C *in, C *out, \
int sign, unsigned flags); \
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_guru_split_dft)(int rank, const X(iodim) *dims, \
int howmany_rank, \
const X(iodim) *howmany_dims, \
R *ri, R *ii, R *ro, R *io, \
unsigned flags); \
\
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_guru64_dft)(int rank, \
const X(iodim64) *dims, \
int howmany_rank, \
const X(iodim64) *howmany_dims, \
C *in, C *out, \
int sign, unsigned flags); \
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_guru64_split_dft)(int rank, \
const X(iodim64) *dims, \
int howmany_rank, \
const X(iodim64) *howmany_dims, \
R *ri, R *ii, R *ro, R *io, \
unsigned flags); \
\
FFTW_EXTERN void \
FFTW_CDECL X(execute_dft)(const X(plan) p, C *in, C *out); \
\
FFTW_EXTERN void \
FFTW_CDECL X(execute_split_dft)(const X(plan) p, R *ri, R *ii, \
R *ro, R *io); \
\
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_many_dft_r2c)(int rank, const int *n, \
int howmany, \
R *in, const int *inembed, \
int istride, int idist, \
C *out, const int *onembed, \
int ostride, int odist, \
unsigned flags); \
\
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_dft_r2c)(int rank, const int *n, \
R *in, C *out, unsigned flags); \
\
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_dft_r2c_1d)(int n,R *in,C *out,unsigned flags); \
\
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_dft_r2c_2d)(int n0, int n1, \
R *in, C *out, unsigned flags); \
\
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_dft_r2c_3d)(int n0, int n1, \
int n2, \
R *in, C *out, unsigned flags); \
\
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_many_dft_c2r)(int rank, const int *n, \
int howmany, \
C *in, const int *inembed, \
int istride, int idist, \
R *out, const int *onembed, \
int ostride, int odist, \
unsigned flags); \
\
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_dft_c2r)(int rank, const int *n, \
C *in, R *out, unsigned flags); \
\
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_dft_c2r_1d)(int n,C *in,R *out,unsigned flags); \
\
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_dft_c2r_2d)(int n0, int n1, \
C *in, R *out, unsigned flags); \
\
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_dft_c2r_3d)(int n0, int n1, \
int n2, \
C *in, R *out, unsigned flags); \
\
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_guru_dft_r2c)(int rank, const X(iodim) *dims, \
int howmany_rank, \
const X(iodim) *howmany_dims, \
R *in, C *out, \
unsigned flags); \
\
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_guru_dft_c2r)(int rank, const X(iodim) *dims, \
int howmany_rank, \
const X(iodim) *howmany_dims, \
C *in, R *out, \
unsigned flags); \
\
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_guru_split_dft_r2c)(int rank, const X(iodim) *dims, \
int howmany_rank, \
const X(iodim) *howmany_dims, \
R *in, R *ro, R *io, \
unsigned flags); \
\
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_guru_split_dft_c2r)(int rank, const X(iodim) *dims, \
int howmany_rank, \
const X(iodim) *howmany_dims, \
R *ri, R *ii, R *out, \
unsigned flags); \
\
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_guru64_dft_r2c)(int rank, \
const X(iodim64) *dims, \
int howmany_rank, \
const X(iodim64) *howmany_dims, \
R *in, C *out, \
unsigned flags); \
\
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_guru64_dft_c2r)(int rank, \
const X(iodim64) *dims, \
int howmany_rank, \
const X(iodim64) *howmany_dims, \
C *in, R *out, \
unsigned flags); \
\
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_guru64_split_dft_r2c)(int rank, const X(iodim64) *dims, \
int howmany_rank, \
const X(iodim64) *howmany_dims, \
R *in, R *ro, R *io, \
unsigned flags); \
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_guru64_split_dft_c2r)(int rank, const X(iodim64) *dims, \
int howmany_rank, \
const X(iodim64) *howmany_dims, \
R *ri, R *ii, R *out, \
unsigned flags); \
\
FFTW_EXTERN void \
FFTW_CDECL X(execute_dft_r2c)(const X(plan) p, R *in, C *out); \
\
FFTW_EXTERN void \
FFTW_CDECL X(execute_dft_c2r)(const X(plan) p, C *in, R *out); \
\
FFTW_EXTERN void \
FFTW_CDECL X(execute_split_dft_r2c)(const X(plan) p, \
R *in, R *ro, R *io); \
\
FFTW_EXTERN void \
FFTW_CDECL X(execute_split_dft_c2r)(const X(plan) p, \
R *ri, R *ii, R *out); \
\
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_many_r2r)(int rank, const int *n, \
int howmany, \
R *in, const int *inembed, \
int istride, int idist, \
R *out, const int *onembed, \
int ostride, int odist, \
const X(r2r_kind) *kind, unsigned flags); \
\
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_r2r)(int rank, const int *n, R *in, R *out, \
const X(r2r_kind) *kind, unsigned flags); \
\
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_r2r_1d)(int n, R *in, R *out, \
X(r2r_kind) kind, unsigned flags); \
\
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_r2r_2d)(int n0, int n1, R *in, R *out, \
X(r2r_kind) kind0, X(r2r_kind) kind1, \
unsigned flags); \
\
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_r2r_3d)(int n0, int n1, int n2, \
R *in, R *out, X(r2r_kind) kind0, \
X(r2r_kind) kind1, X(r2r_kind) kind2, \
unsigned flags); \
\
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_guru_r2r)(int rank, const X(iodim) *dims, \
int howmany_rank, \
const X(iodim) *howmany_dims, \
R *in, R *out, \
const X(r2r_kind) *kind, unsigned flags); \
\
FFTW_EXTERN X(plan) \
FFTW_CDECL X(plan_guru64_r2r)(int rank, const X(iodim64) *dims, \
int howmany_rank, \
const X(iodim64) *howmany_dims, \
R *in, R *out, \
const X(r2r_kind) *kind, unsigned flags); \
\
FFTW_EXTERN void \
FFTW_CDECL X(execute_r2r)(const X(plan) p, R *in, R *out); \
\
FFTW_EXTERN void \
FFTW_CDECL X(destroy_plan)(X(plan) p); \
\
FFTW_EXTERN void \
FFTW_CDECL X(forget_wisdom)(void); \
FFTW_EXTERN void \
FFTW_CDECL X(cleanup)(void); \
\
FFTW_EXTERN void \
FFTW_CDECL X(set_timelimit)(double t); \
\
FFTW_EXTERN void \
FFTW_CDECL X(plan_with_nthreads)(int nthreads); \
\
FFTW_EXTERN int \
FFTW_CDECL X(planner_nthreads)(void); \
\
FFTW_EXTERN int \
FFTW_CDECL X(init_threads)(void); \
\
FFTW_EXTERN void \
FFTW_CDECL X(cleanup_threads)(void); \
\
FFTW_EXTERN void \
FFTW_CDECL X(threads_set_callback)( \
void (*parallel_loop)(void *(*work)(char *), \
char *jobdata, size_t elsize, int njobs, void *data), void *data); \
\
FFTW_EXTERN void \
FFTW_CDECL X(make_planner_thread_safe)(void); \
\
FFTW_EXTERN int \
FFTW_CDECL X(export_wisdom_to_filename)(const char *filename); \
\
FFTW_EXTERN void \
FFTW_CDECL X(export_wisdom_to_file)(FILE *output_file); \
\
FFTW_EXTERN char * \
FFTW_CDECL X(export_wisdom_to_string)(void); \
\
FFTW_EXTERN void \
FFTW_CDECL X(export_wisdom)(X(write_char_func) write_char, \
void *data); \
FFTW_EXTERN int \
FFTW_CDECL X(import_system_wisdom)(void); \
\
FFTW_EXTERN int \
FFTW_CDECL X(import_wisdom_from_filename)(const char *filename); \
\
FFTW_EXTERN int \
FFTW_CDECL X(import_wisdom_from_file)(FILE *input_file); \
\
FFTW_EXTERN int \
FFTW_CDECL X(import_wisdom_from_string)(const char *input_string); \
\
FFTW_EXTERN int \
FFTW_CDECL X(import_wisdom)(X(read_char_func) read_char, void *data); \
\
FFTW_EXTERN void \
FFTW_CDECL X(fprint_plan)(const X(plan) p, FILE *output_file); \
\
FFTW_EXTERN void \
FFTW_CDECL X(print_plan)(const X(plan) p); \
\
FFTW_EXTERN char * \
FFTW_CDECL X(sprint_plan)(const X(plan) p); \
\
FFTW_EXTERN void * \
FFTW_CDECL X(malloc)(size_t n); \
\
FFTW_EXTERN R * \
FFTW_CDECL X(alloc_real)(size_t n); \
FFTW_EXTERN C * \
FFTW_CDECL X(alloc_complex)(size_t n); \
\
FFTW_EXTERN void \
FFTW_CDECL X(free)(void *p); \
\
FFTW_EXTERN void \
FFTW_CDECL X(flops)(const X(plan) p, \
double *add, double *mul, double *fmas); \
FFTW_EXTERN double \
FFTW_CDECL X(estimate_cost)(const X(plan) p); \
\
FFTW_EXTERN double \
FFTW_CDECL X(cost)(const X(plan) p); \
\
FFTW_EXTERN int \
FFTW_CDECL X(alignment_of)(R *p); \
\
FFTW_EXTERN const char X(version)[]; \
FFTW_EXTERN const char X(cc)[]; \
FFTW_EXTERN const char X(codelet_optim)[];
/* end of FFTW_DEFINE_API macro */
FFTW_DEFINE_API(FFTW_MANGLE_DOUBLE, double, fftw_complex)
FFTW_DEFINE_API(FFTW_MANGLE_FLOAT, float, fftwf_complex)
FFTW_DEFINE_API(FFTW_MANGLE_LONG_DOUBLE, long double, fftwl_complex)
/* __float128 (quad precision) is a gcc extension on i386, x86_64, and ia64
for gcc >= 4.6 (compiled in FFTW with --enable-quad-precision) */
#if (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)) \
&& !(defined(__ICC) || defined(__INTEL_COMPILER) || defined(__CUDACC__) || defined(__PGI)) \
&& (defined(__i386__) || defined(__x86_64__) || defined(__ia64__))
# if !defined(FFTW_NO_Complex) && defined(_Complex_I) && defined(complex) && defined(I)
/* note: __float128 is a typedef, which is not supported with the _Complex
keyword in gcc, so instead we use this ugly __attribute__ version.
However, we can't simply pass the __attribute__ version to
FFTW_DEFINE_API because the __attribute__ confuses gcc in pointer
types. Hence redefining FFTW_DEFINE_COMPLEX. Ugh. */
# undef FFTW_DEFINE_COMPLEX
# define FFTW_DEFINE_COMPLEX(R, C) typedef _Complex float __attribute__((mode(TC))) C
# endif
FFTW_DEFINE_API(FFTW_MANGLE_QUAD, __float128, fftwq_complex)
#endif
#define FFTW_FORWARD (-1)
#define FFTW_BACKWARD (+1)
#define FFTW_NO_TIMELIMIT (-1.0)
/* documented flags */
#define FFTW_MEASURE (0U)
#define FFTW_DESTROY_INPUT (1U << 0)
#define FFTW_UNALIGNED (1U << 1)
#define FFTW_CONSERVE_MEMORY (1U << 2)
#define FFTW_EXHAUSTIVE (1U << 3) /* NO_EXHAUSTIVE is default */
#define FFTW_PRESERVE_INPUT (1U << 4) /* cancels FFTW_DESTROY_INPUT */
#define FFTW_PATIENT (1U << 5) /* IMPATIENT is default */
#define FFTW_ESTIMATE (1U << 6)
#define FFTW_WISDOM_ONLY (1U << 21)
/* undocumented beyond-guru flags */
#define FFTW_ESTIMATE_PATIENT (1U << 7)
#define FFTW_BELIEVE_PCOST (1U << 8)
#define FFTW_NO_DFT_R2HC (1U << 9)
#define FFTW_NO_NONTHREADED (1U << 10)
#define FFTW_NO_BUFFERING (1U << 11)
#define FFTW_NO_INDIRECT_OP (1U << 12)
#define FFTW_ALLOW_LARGE_GENERIC (1U << 13) /* NO_LARGE_GENERIC is default */
#define FFTW_NO_RANK_SPLITS (1U << 14)
#define FFTW_NO_VRANK_SPLITS (1U << 15)
#define FFTW_NO_VRECURSE (1U << 16)
#define FFTW_NO_SIMD (1U << 17)
#define FFTW_NO_SLOW (1U << 18)
#define FFTW_NO_FIXED_RADIX_LARGE_N (1U << 19)
#define FFTW_ALLOW_PRUNING (1U << 20)
#ifdef __cplusplus
} /* extern "C" */
#endif /* __cplusplus */
#endif /* FFTW3_H */
|
/*
* Copyright 2009-2017 Alibaba Cloud All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ALIBABACLOUD_DRDS_MODEL_FLASHBACKRECYCLEBINTABLERESULT_H_
#define ALIBABACLOUD_DRDS_MODEL_FLASHBACKRECYCLEBINTABLERESULT_H_
#include <string>
#include <vector>
#include <utility>
#include <alibabacloud/core/ServiceResult.h>
#include <alibabacloud/drds/DrdsExport.h>
namespace AlibabaCloud
{
namespace Drds
{
namespace Model
{
class ALIBABACLOUD_DRDS_EXPORT FlashbackRecycleBinTableResult : public ServiceResult
{
public:
FlashbackRecycleBinTableResult();
explicit FlashbackRecycleBinTableResult(const std::string &payload);
~FlashbackRecycleBinTableResult();
bool getData()const;
bool getSuccess()const;
protected:
void parse(const std::string &payload);
private:
bool data_;
bool success_;
};
}
}
}
#endif // !ALIBABACLOUD_DRDS_MODEL_FLASHBACKRECYCLEBINTABLERESULT_H_
|
# coding=utf8
# Copyright 2018 JDCLOUD.COM
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This class is auto generated by the jdcloud code generator program.
import unittest
import os
import json
class DetectionTest(unittest.TestCase):
def test_describe_agent_status(self):
cmd = """python ../../main.py detection describe-agent-status --filters '[{"":""}]'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_metric_data_am(self):
cmd = """python ../../main.py detection describe-metric-data-am --resource-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_create_probe_task(self):
cmd = """python ../../main.py detection create-probe-task --client-token 'xxx' --create-probe-task-spec '{"":""}'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_delete_probe_task(self):
cmd = """python ../../main.py detection delete-probe-task --probe-task-ids 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_probe_task(self):
cmd = """python ../../main.py detection describe-probe-task --probe-task-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_update_probe_task(self):
cmd = """python ../../main.py detection update-probe-task --probe-task-id 'xxx' --probes '[{"":""}]'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_probe_history(self):
cmd = """python ../../main.py detection describe-probe-history --probe-task-id 'xxx' --probe-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_discribe_probes(self):
cmd = """python ../../main.py detection discribe-probes --probe-task-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_probe_task_enable(self):
cmd = """python ../../main.py detection probe-task-enable """
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_probe_tasks(self):
cmd = """python ../../main.py detection describe-probe-tasks """
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_get_site_monitor(self):
cmd = """python ../../main.py detection get-site-monitor """
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_create_site_monitor(self):
cmd = """python ../../main.py detection create-site-monitor --address 'xxx' --cycle '5' --name 'xxx' --source '[{"":""}]' --task-type 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_update_site_monitor(self):
cmd = """python ../../main.py detection update-site-monitor --address 'xxx' --cycle '5' --name 'xxx' --source '[{"":""}]' --task-type 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_delete_site_monitor(self):
cmd = """python ../../main.py detection delete-site-monitor """
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_enable_site_monitor(self):
cmd = """python ../../main.py detection enable-site-monitor """
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_validate_site_monitor_address(self):
cmd = """python ../../main.py detection validate-site-monitor-address """
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_get_site_monitor_data_points(self):
cmd = """python ../../main.py detection get-site-monitor-data-points """
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_get_site_monitor_source(self):
cmd = """python ../../main.py detection get-site-monitor-source """
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_test_site_monitor(self):
cmd = """python ../../main.py detection test-site-monitor --address 'xxx' --cycle '5' --name 'xxx' --source '[{"":""}]' --task-type 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
|
import configureStore from './configureStore';
import history from './utils/history';
const initialState = {};
const store = configureStore(initialState, history);
const { dispatch } = store;
export { dispatch };
export default store;
|