repo_name
stringlengths
6
101
path
stringlengths
4
300
text
stringlengths
7
1.31M
WinstonLHS/tofu
tofu/tests/tests06_mesh/test_01_checks.py
""" This module contains tests for tofu.geom in its structured version """ # Built-in import os import shutil import itertools as itt import warnings # Standard import numpy as np import matplotlib.pyplot as plt # tofu-specific from tofu import __version__ import tofu as tf import tofu.data as tfd _HERE = os.path.abspath(os.path.dirname(__file__)) _PATH_DATA = os.path.join(_HERE, 'test_data') _TOFU_USER = os.path.join(os.path.expanduser("~"), '.tofu') _CUSTOM = os.path.dirname(os.path.dirname(os.path.dirname(_HERE))) _CUSTOM = os.path.join(_CUSTOM, 'scripts', 'tofucustom.py') VerbHead = 'tofu.mesh.test_01_checks' ####################################################### # # Setup and Teardown # ####################################################### def setup_module(): print("Removing user ~/.tofu/ if any") if os.path.isdir(_TOFU_USER): shutil.rmtree(_TOFU_USER) # Recreating clean .tofu # out = subprocess.run(_CUSTOM, stdout=PIPE, stderr=PIPE) os.system('python '+_CUSTOM) def teardown_module(): print("Removing user ~/.tofu/ if any") if os.path.isdir(_TOFU_USER): shutil.rmtree(_TOFU_USER) ####################################################### # # checking routines # ####################################################### class Test01_checks(): @classmethod def setup_class(cls): pass @classmethod def setup(self): pass def teardown(self): pass @classmethod def teardown_class(cls): pass def test01_mesh2DRect_X_check(self): lx = [[1, 2], [1, 2, 3, 4]] lres = [None, 10, 0.1, [0.1, 0.2], [0.1, 0.2, 0.3, 0.1]] for comb in itt.product(lx, lres): if hasattr(lres, '__iter__') and len(lres) != len(lx): continue x, res, ind = tfd._mesh_checks._mesh2DRect_X_check( x=[1, 2, 3, 4], res=10, ) if hasattr(lres, '__iter__'): assert x_new.size == np.unique(x_new).size == res.size + 1 ####################################################### # # object mesh2D # ####################################################### class Test02_Mesh2D(): @classmethod def setup_class(cls): pass def setup(self): self.dobj = { 'm0': tfd.Mesh2D(), 'm1': tfd.Mesh2D(), 'm2': tfd.Mesh2D(), 'm3': tfd.Mesh2D(), } # add mesh ldomain = [ [[2, 3], [-1, 1]], [[2, 2.3, 2.6, 3], [-1, 0., 1]], [[2, 3], [-1, 0, 1]], ] lres = [ 0.1, [[0.2, 0.1, 0.1, 0.2], [0.2, 0.1, 0.2]], [0.1, [0.2, 0.1, 0.2]], ] i0 = 0 for ii, (k0, v0) in enumerate(self.dobj.items()): if k0 != 'm2': self.dobj[k0].add_mesh( domain=ldomain[i0], res=lres[i0], key=k0, ) i0 += 1 else: self.dobj[k0].add_mesh( crop_poly=tf.load_config('WEST'), res=0.1, key=k0, ) # add splines for ii, (k0, v0) in enumerate(self.dobj.items()): self.dobj[k0].add_bsplines(deg=ii) # Add triangular mesh knots = np.array([ [2, 0], [2, 1], [3, 0], [3, 1], ]) faces = np.array([[0, 1, 2], [1, 2, 3]]) self.dobjtri = { 'tri0': tf.data.Mesh2D(), 'tri1': tf.data.Mesh2D(), } self.dobjtri['tri0'].add_mesh(cents=faces, knots=knots, key='tri0') # Add realistic NICE mesh for WEST pfe = os.path.join(_PATH_DATA, 'mesh_triangular_WEST_eq.txt') out = np.loadtxt(pfe) nknots, ncents = int(out[0, 0]), int(out[0, 1]) assert out.shape == (nknots + ncents + 1, 3) knots = out[1:nknots + 1, :][:, :2] cents = out[nknots + 1:, :] self.dobjtri['tri1'].add_mesh(cents=cents, knots=knots, key='tri1') # add splines for ii, (k0, v0) in enumerate(self.dobjtri.items()): self.dobjtri[k0].add_bsplines(deg=ii) def teardown(self): pass @classmethod def teardown_class(cls): pass def test01_get_summary(self): for ii, (k0, v0) in enumerate(self.dobj.items()): self.dobj[k0].get_summary() for ii, (k0, v0) in enumerate(self.dobjtri.items()): self.dobjtri[k0].get_summary() def test02_select_ind(self): # Rect mesh lkey = ['m0', 'm1-bs1', 'm2', 'm3-bs3'] lelements = ['knots', None, 'cents', None] lind = [None, ([0, 5], [0, 6]), [0, 10, 100], ([0, 5, 6], [0, 2, 3])] lcrop = [True, False, True, False] for ii, (k0, v0) in enumerate(self.dobj.items()): indt = self.dobj[k0].select_ind( key=lkey[ii], ind=lind[ii], elements=lelements[ii], returnas=tuple, crop=lcrop[ii], ) indf = self.dobj[k0].select_ind( key=lkey[ii], ind=indt, elements=lelements[ii], returnas=np.ndarray, crop=lcrop[ii], ) indt2 = self.dobj[k0].select_ind( key=lkey[ii], ind=indf, elements=lelements[ii], returnas=tuple, crop=lcrop[ii], ) assert all([np.allclose(indt[ii], indt2[ii]) for ii in [0, 1]]) # triangular meshes lkeys = ['tri0', 'tri0', 'tri1'] lind = [None, [1], 1] lelements = ['knots', None, 'cents'] for ii, k0 in enumerate(lkeys): out = self.dobjtri[k0].select_ind( key=k0, ind=lind[ii], elements=lelements[ii], returnas=int, crop=lcrop[ii], ) if ii == 0: assert np.allclose(out, np.r_[0, 1, 2, 3]) elif ii >= 1: assert np.allclose(out, np.r_[1]) def test03_select_mesh(self): # rectangular meshes lkey = ['m0', 'm1', 'm2', 'm3'] lind = [None, ([0, 5], [0, 6]), [0, 10, 100], ([0, 5, 6], [0, 2, 3])] lelements = ['cents', 'knots', 'cents', None] lreturnas = ['ind', 'data', 'data', 'ind'] lreturn_neig = [None, True, False, True] lcrop = [False, True, True, False] for ii, (k0, v0) in enumerate(self.dobj.items()): indf = self.dobj[k0].select_mesh_elements( key=lkey[ii], ind=lind[ii], elements=lelements[ii], returnas=lreturnas[ii], return_neighbours=lreturn_neig[ii], crop=lcrop[ii], ) # triangular meshes lkeys = ['tri0', 'tri0', 'tri0', 'tri1'] lind = [None, [1], 1, [0, 1]] lelements = ['knots', None, 'cents', 'cents'] lreturnas = ['ind', 'data', 'ind', 'data'] for ii, k0 in enumerate(lkeys): out = self.dobjtri[k0].select_mesh_elements( key=k0, ind=lind[ii], elements=lelements[ii], returnas=lreturnas[ii], return_neighbours=True, crop=lcrop[ii], ) def test04_select_bsplines(self): # rectangular meshes lkey = ['m0-bs0', 'm1-bs1', 'm2-bs2', 'm3-bs3'] lind = [None, ([0, 5], [0, 6]), [0, 10, 100], ([0, 5, 6], [0, 2, 3])] lreturnas = [None, 'data', 'data', 'ind'] lreturn_cents = [None, True, False, True] lreturn_knots = [None, False, True, True] for ii, (k0, v0) in enumerate(self.dobj.items()): indf = self.dobj[k0].select_bsplines( key=lkey[ii], ind=lind[ii], returnas=lreturnas[ii], return_cents=lreturn_cents[ii], return_knots=lreturn_knots[ii], ) # triangular meshes lkeys = ['tri0', 'tri0', 'tri0', 'tri1'] lkeysbs = ['tri0-bs0', None, 'tri0-bs0', 'tri1-bs1'] lind = [None, [1], 1, [0, 1]] lelements = ['knots', None, 'cents', 'cents'] lreturnas = ['ind', 'data', 'ind', 'data'] for ii, k0 in enumerate(lkeys): indf = self.dobjtri[k0].select_bsplines( key=lkeysbs[ii], ind=lind[ii], returnas=lreturnas[ii], return_cents=lreturn_cents[ii], return_knots=lreturn_knots[ii], ) def test05_sample_mesh(self): # rectangular meshes lres = [None, 0.1, 0.01, [0.1, 0.05]] lmode = [None, 'rel', 'abs', 'abs'] lgrid = [None, True, False, False] for ii, (k0, v0) in enumerate(self.dobj.items()): out = v0.get_sample_mesh( res=lres[ii], grid=lgrid[ii], mode=lmode[ii], ) # triangular meshes lkeys = ['tri0', 'tri0', 'tri0', 'tri1'] lres = [None, 0.1, 0.01, [0.1, 0.05]] lmode = [None, 'rel', 'abs', 'abs'] lgrid = [None, True, False, False] for ii, k0 in enumerate(lkeys): out = self.dobjtri[k0].get_sample_mesh( res=lres[ii], grid=lgrid[ii], mode=lmode[ii], ) """ def test06_sample_bspline(self): lres = [None, 0.1, 0.01, [0.1, 0.05]] lmode = [None, 'rel', 'abs', 'abs'] lgrid = [None, True, False, False] for ii, (k0, v0) in enumerate(self.dobj.items()): out = v0.get_sample_bspline( res=lres[ii], grid=lgrid[ii], mode=lmode[ii], ) """ def test07_ev_details_vs_sum(self): x = np.linspace(2.2, 2.8, 5) y = np.linspace(-0.5, 0.5, 5) x = np.tile(x, (y.size, 1)) y = np.tile(y, (x.shape[1], 1)).T # rectangular meshes lkey = ['m0-bs0', 'm1-bs1', 'm2-bs2', 'm3-bs3'] for ii, (k0, v0) in enumerate(self.dobj.items()): val = v0.interp2d( key=lkey[ii], R=x, Z=y, coefs=None, indbs=None, indt=None, grid=False, details=True, reshape=True, res=None, crop=True, nan0=ii % 2 == 0, imshow=False, ) crop = v0.dobj['bsplines'][lkey[ii]]['crop'] if crop is False: shap = np.prod(v0.dobj['bsplines'][lkey[ii]]['shape']) else: shap = v0.ddata[crop]['data'].sum() assert val.shape == tuple(np.r_[x.shape, shap]) val_sum = v0.interp2d( key=lkey[ii], R=x, Z=y, coefs=None, indbs=None, indt=None, grid=False, details=False, reshape=True, res=None, crop=True, nan0=ii % 2 == 0, imshow=False, ) indok = ~np.isnan(val_sum[0, ...]) # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! # Does not work because of knots padding used in func_details # Due to scpinterp._bspl.evaluate_spline()... if False: # To be debugged assert np.allclose( val_sum[0, indok], np.nansum(val, axis=-1)[indok], equal_nan=True, ) # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! # triangular meshes lkey = ['tri0-bs0', 'tri1-bs1'] for ii, (k0, v0) in enumerate(self.dobjtri.items()): val = v0.interp2d( key=lkey[ii], R=x, Z=y, coefs=None, indbs=None, indt=None, grid=False, details=True, reshape=None, res=None, crop=True, nan0=ii % 2 == 0, imshow=False, ) crop = v0.dobj['bsplines'][lkey[ii]].get('crop', False) if crop is False: shap = np.prod(v0.dobj['bsplines'][lkey[ii]]['shape']) else: shap = v0.ddata[crop]['data'].sum() assert val.shape == tuple(np.r_[x.shape, shap]) val_sum = v0.interp2d( key=lkey[ii], R=x, Z=y, coefs=None, indbs=None, indt=None, grid=False, details=False, reshape=None, res=None, crop=True, nan0=ii % 2 == 0, imshow=False, ) indok = ~np.isnan(val_sum[0, ...]) assert np.allclose( val_sum[0, indok], np.nansum(val, axis=-1)[indok], equal_nan=True, ) def test08_plot_mesh(self): # rectangular meshes lik = [None, ([0, 2], [0, 3]), [2, 3], None] lic = [None, ([0, 2], [0, 3]), None, [2, 3]] for ii, (k0, v0) in enumerate(self.dobj.items()): dax = self.dobj[k0].plot_mesh( ind_knot=lik[ii], ind_cent=lic[ii], ) plt.close('all') # triangular meshes lik = [None, [0, 2], [2, 3], None] lic = [None, [0, 2], None, [2, 3]] for ii, (k0, v0) in enumerate(self.dobjtri.items()): dax = self.dobjtri[k0].plot_mesh( ind_knot=lik[ii], ind_cent=lic[ii], ) plt.close('all') # TBF for triangular def test09_plot_bsplines(self): # rectangular meshes lkey = ['m0-bs0', 'm1-bs1', 'm2-bs2', 'm3-bs3'] lind = [None, ([1, 2], [2, 1]), (1, 1), [1, 2, 10]] lknots = [None, True, False, True] lcents = [False, False, True, True] for ii, (k0, v0) in enumerate(self.dobj.items()): dax = self.dobj[k0].plot_bsplines( key=lkey[ii], ind=lind[ii], knots=lknots[ii], cents=lcents[ii], ) plt.close('all') # triangular meshes lkey = ['tri0-bs0', 'tri1-bs1'] # , 'm2-bs2', 'm3-bs3'] lind = [None, [1, 2], (1, 1), [1, 2, 10]] lknots = [None, True, False, True] lcents = [False, False, True, True] for ii, (k0, v0) in enumerate(self.dobjtri.items()): dax = self.dobjtri[k0].plot_bsplines( key=lkey[ii], ind=lind[ii], knots=lknots[ii], cents=lcents[ii], ) plt.close('all') def test10_plot_profile2d(self): # rectangular meshes lkey = ['m0-bs0', 'm1-bs1', 'm2-bs2', 'm3-bs3'] for ii, (k0, v0) in enumerate(self.dobj.items()): key = str(ii) kbs = lkey[ii] ref = self.dobj[k0].dobj['bsplines'][kbs]['ref'] shapebs = self.dobj[k0].dobj['bsplines'][kbs]['shape'] self.dobj[k0].add_data( key=key, data=np.random.random(shapebs), ref=ref, ) dax = self.dobj[k0].plot_profile2d( key=key, ) plt.close('all') # triangular meshes # DEACTIVATED BECAUSE TOO SLOW IN CURRENT VERSION !!! if False: lkey = ['tri0-bs0', 'tri1-bs1'] for ii, (k0, v0) in enumerate(self.dobjtri.items()): key = str(ii) kbs = lkey[ii] ref = self.dobjtri[k0].dobj['bsplines'][kbs]['ref'] shapebs = self.dobjtri[k0].dobj['bsplines'][kbs]['shape'] self.dobjtri[k0].add_data( key=key, data=np.random.random(shapebs), ref=ref, ) dax = self.dobjtri[k0].plot_profile2d( key=key, ) plt.close('all') # TBF for triangular def test11_add_bsplines_operator(self): lkey = ['m0-bs0', 'm1-bs1', 'm2-bs2'] lop = ['D0N1', 'D0N2', 'D1N2', 'D2N2'] lgeom = ['linear', 'toroidal'] lcrop = [False, True] dfail = {} for ii, (k0, v0) in enumerate(self.dobj.items()): if ii == 3: continue for comb in itt.product(lop, lgeom, lcrop): deg = self.dobj[k0].dobj['bsplines'][lkey[ii]]['deg'] # only test exact operators if int(comb[0][1]) > deg: # except deg =0 D1N2 if deg == 0 and comb[0] == 'D1N2': pass else: continue try: self.dobj[k0].add_bsplines_operator( key=lkey[ii], operator=comb[0], geometry=comb[1], crop=comb[2], ) except Exception as err: dfail[k0] = ( f"key {lkey[ii]}, op '{comb[0]}', geom '{comb[1]}': " + str(err) ) # Raise error if any fail if len(dfail) > 0: lstr = [f'\t- {k0}: {v0}' for k0, v0 in dfail.items()] msg = ( "The following operators failed:\n" + "\n".join(lstr) ) raise Exception(msg) # TBF for triangular def test12_compute_plot_geometry_matrix(self): # get config and cam conf = tf.load_config('WEST-V0') cam = tf.geom.utils.create_CamLOS1D( pinhole=[3., 1., 0.], orientation=[np.pi, 0., 0], focal=0.1, sensor_nb=50, sensor_size=0.15, config=conf, Diag='SXR', Exp='WEST', Name='cam1', ) # compute geometry matrices for ii, (k0, v0) in enumerate(self.dobj.items()): self.dobj[k0].add_geometry_matrix( cam=cam, res=0.01, crop=True, store=True, ) dax = self.dobj[k0].plot_geometry_matrix( cam=cam, indchan=12, indbf=100, ) plt.close('all')
041616/react-jsonschema-form-test
source/Schema/components/navigationLink/index.js
import { getKeyValues } from 'Schema/utils'; import icon from './properties/icon'; import textColor from './properties/textColor'; import textHoverColor from './properties/textHoverColor'; import backgroundColor from './properties/backgroundColor'; import backgroundHoverColor from './properties/backgroundHoverColor'; import fontFamily from './properties/fontFamily'; const NAME = 'navigationLink'; const properties = Object.assign( {}, fontFamily, textColor, textHoverColor, backgroundColor, backgroundHoverColor, icon, ); const required = Object.keys(properties); const ui = getKeyValues('ui', properties); export default { [NAME]: { type: 'object', title: NAME, description: 'Navigation element (link) such as "show more", "read more", "more" etc.', additionalProperties: false, required, properties, ui, } };
Miaocool/MYG-NewVersion
myg/Src/My/Model/MyBuyListModel.h
<reponame>Miaocool/MYG-NewVersion<filename>myg/Src/My/Model/MyBuyListModel.h<gh_stars>0 // // MyBuyListModel.h // yyxb // // Created by lili on 15/12/3. // Copyright © 2015年 杨易. All rights reserved. // #import "MBTestMainModel.h" @interface MyBuyListModel : MBTestMainModel @property (nonatomic, copy) NSString *shopid; //商品id @property (nonatomic, copy) NSString *canyurenshu; //参与人数 @property (nonatomic, copy) NSString *gonumber; //购买次数 @property (nonatomic, copy) NSString *number;//中奖人购买次数 @property (nonatomic, copy) NSString *q_end_time; //揭晓时间 @property (nonatomic, copy) NSString *q_uid;//用户id @property (nonatomic, copy) NSString *q_user_code; // 中奖号吗 @property (nonatomic, copy) NSString *qishu;//期数 @property (nonatomic, copy) NSString *shopname; //商品标题 @property (nonatomic, copy) NSString *thumb; //商品图片 @property (nonatomic, copy) NSString *type;//商品状态 1已经揭晓 2未揭晓 @property (nonatomic, copy) NSString *uid; //---- @property (nonatomic, copy) NSString *username;//中奖人名字 @property (nonatomic, copy) NSString *img;//中奖人头像 @property (nonatomic, copy) NSString *zongrenshu; // 总人数 @property (nonatomic, copy) NSString *jiexiao_time; // 揭晓时间 @property (nonatomic, copy) NSString *yunjiage; //云价格 @property (nonatomic, copy) NSString *xiangou; //限购 @property (nonatomic, copy) NSString *xg_number; //限购 @end
org-metaeffekt/metaeffekt-dcc
modules/dcc-commons/src/main/java/org/metaeffekt/dcc/commons/mapping/Capability.java
/** * Copyright 2009-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.metaeffekt.dcc.commons.mapping; import org.apache.commons.lang3.StringUtils; import org.metaeffekt.dcc.commons.domain.Id; import org.metaeffekt.dcc.commons.domain.Type.CapabilityId; /** * A {@link Capability} represents a concrete realization of a {@link CapabilityDefinition} within * a {@link ConfigurationUnit}. * * @author <NAME> */ public class Capability implements Identifiable { private Id<CapabilityId> id; private String uniqueId; private CapabilityDefinition capabilityDefinition; private ConfigurationUnit unit; @Deprecated public Capability(String id, CapabilityDefinition capabilityDefinition, ConfigurationUnit unit) { this(Id.createCapabilityId(id), capabilityDefinition, unit); } public Capability(Id<CapabilityId> id, CapabilityDefinition capabilityDefinition, ConfigurationUnit unit) { this.id = id; this.capabilityDefinition = capabilityDefinition; this.unit = unit; } public Id<CapabilityId> getId() { return id; } public void setId(Id<CapabilityId> id) { this.id = id; } public CapabilityDefinition getCapabilityDefinition() { return capabilityDefinition; } public void setCapabilityDefinition(CapabilityDefinition capabilityDefinition) { this.capabilityDefinition = capabilityDefinition; } public ConfigurationUnit getUnit() { return unit; } public void setUnit(ConfigurationUnit unit) { this.unit = unit; } @Override public String toString() { StringBuilder sb = new StringBuilder("Capability ["); sb.append("unique id=" + getUniqueId()); sb.append("]"); return sb.toString(); } public String getUniqueId() { if (StringUtils.isEmpty(uniqueId) && unit != null) { this.uniqueId = unit.getId() + "/" + id; } return this.uniqueId; } }
stemey/dojo-generate-form
model/PrimitiveModel.js
define([ "dojo/_base/declare", "./Model" ], function (declare, Model) { // module: // gform/model/PrimitiveModel var PrimitiveModel = declare("gform.model.PrimitiveModel", [Model], { // summary: // Provides access to sibling attributes of modelHandle. value: null, oldValue: undefined, required: false, update: function (/*Object*/plainValue, setOldValue, bubble) { // summary: // update the attribute with the given plainValue. Attribute has a single valid type. // plainValue: // the new value of the attribute if (typeof plainValue === "undefined") { plainValue = this.getDefaultValue(); } if (plainValue !== null && !this.isInstance(plainValue)) { throw new Error("convert value " + plainValue + " to correct type"); } this._execute(function () { // set to undefined so that hasCHanged returns false //this.oldValue = undefined; this.set("value", plainValue); if (setOldValue !== false) { this.set("oldValue", this.getPlainValue()); } this.validate(); this.onChange(); }, bubble); }, isEmpty: function () { return this.value === null; }, isInstance: function (value) { return true; }, visit: function (cb, idx) { cb(this, function () { }, idx); }, getPlainValue: function () { return this.value; }, initDefault: function (setOldValue) { this._execute(function () { this.resetMeta(); this.set("oldValue", undefined); this.update(this.getDefaultValue(), setOldValue); }); }, getDefaultValue: function () { var defaultValue = this.schema.defaultValue; if (defaultValue === null || typeof defaultValue === "undefined") { return null; } else { return this.schema.defaultValue; } } }); return PrimitiveModel; });
WassimAbida/HyperAPI
HyperAPI/hdp_api/routes/tags.py
<filename>HyperAPI/hdp_api/routes/tags.py<gh_stars>0 from HyperAPI.hdp_api.base.resource import Resource from HyperAPI.hdp_api.base.route import Route class Tags(Resource): name = "Tags" available_since = "1.0" removed_since = None class _addtag(Route): name = "add tag" httpMethod = Route.POST path = "/projects/{project_ID}/variables/addTag" _path_keys = { 'project_ID': Route.VALIDATOR_OBJECTID } class _renameTag(Route): name = "renameTag" httpMethod = Route.POST path = "/projects/{project_ID}/tags/{tag_ID}/rename" _path_keys = { 'project_ID': Route.VALIDATOR_OBJECTID, 'tag_ID': Route.VALIDATOR_OBJECTID } class _addtags(Route): name = "add tags" httpMethod = Route.POST path = "/projects/{project_ID}/variables/addTags" _path_keys = { 'project_ID': Route.VALIDATOR_OBJECTID } class _addmetatag(Route): name = "add metatag" httpMethod = Route.POST path = "/projects/{project_ID}/datasets/{dataset_ID}/variables/tags" _path_keys = { 'project_ID': Route.VALIDATOR_OBJECTID, 'dataset_ID': Route.VALIDATOR_OBJECTID } class _deletetag(Route): name = "delete tag" httpMethod = Route.POST path = "/projects/{project_ID}/tags/delete" _path_keys = { 'project_ID': Route.VALIDATOR_OBJECTID } class _edittag(Route): name = "edit tag" httpMethod = Route.POST path = "/projects/{project_ID}/variables/tag" _path_keys = { 'project_ID': Route.VALIDATOR_OBJECTID } class _editmetatag(Route): name = "edit metatag" httpMethod = Route.POST path = "/projects/{project_ID}/metatype/tag" _path_keys = { 'project_ID': Route.VALIDATOR_OBJECTID } class _createmetatag(Route): name = "create metatag" httpMethod = Route.POST path = "/projects/{project_ID}/metatype/addTag" _path_keys = { 'project_ID': Route.VALIDATOR_OBJECTID }
subant05/jenp
__tests__/fn.clone.js
import {clone} from '../src/fn/index'; test("Tesing: fn.clone()", ()=>{ const originalObj = { name:"<NAME>" , age: 37 } const newObj = clone(originalObj) newObj.name = "<NAME>" newObj.age = 29 newObj.job = "Chairwoman" expect(originalObj.name).toBe("<NAME>") expect(originalObj.age).toBe(37) expect(originalObj.job).toBe(undefined) expect(newObj.name).toBe("<NAME>") expect(newObj.age).toBe(29) expect(newObj.job).toBe("Chairwoman") })
subbarayudu-j/katalon-studio-testing-framework
Include/scripts/groovy/com/kms/katalon/core/reporting/JsCallStepModel.java
package com.kms.katalon.core.reporting; import java.util.ArrayList; import java.util.List; import com.kms.katalon.core.logging.model.ILogRecord; import com.kms.katalon.core.logging.model.TestCaseLogRecord; import com.kms.katalon.core.logging.model.TestStepLogRecord; import com.kms.katalon.core.logging.model.TestStatus.TestStatusValue; public class JsCallStepModel extends JsModel { private TestStepLogRecord callerStep; private List<JsStepModel> callingSteps; private JsModel status; private List<String> listStrings; private JsTestModel testModel; private TestCaseLogRecord calledTest; public JsCallStepModel(TestStepLogRecord callerStep, TestCaseLogRecord testLog, List<String> listStrings) { this.callerStep = callerStep; this.calledTest = testLog; this.listStrings = listStrings; this.testModel = new JsTestModel(testLog, listStrings, callerStep); } private void init() { this.callingSteps = new ArrayList<JsStepModel>(); this.status = new JsModel(); props.add(new JsModelProperty("Type", "0", null)); props.add(new JsModelProperty("name", calledTest.getName(), listStrings)); props.add(new JsModelProperty("timeout", "0", null)); props.add(new JsModelProperty("doc", "0", null)); props.add(new JsModelProperty("args", "0", null)); // The Status initStatus(); // Sub steps for (ILogRecord logRecord : calledTest.getChildRecords()) { if (logRecord instanceof TestStepLogRecord) { callingSteps.add(new JsStepModel((TestStepLogRecord) logRecord, listStrings, callerStep.getName())); } } // No Log Records } @Override public StringBuilder toArrayString() { init(); // Create intermediate step to present for every loop StringBuilder sb = new StringBuilder(); // Start step sb.append(ARRAY_OPEN); // Properties for (JsModelProperty prop : props) { sb.append(prop.getPropertyValue()); sb.append(ARRAY_DLMT); } // Status sb.append(status.toArrayString()); sb.append(ARRAY_DLMT); // Called keyword/step sb.append(ARRAY_OPEN); for (int i = 0; i < testModel.getSteps().size(); i++) { sb.append(testModel.getSteps().get(i).toArrayString()); if (i < testModel.getSteps().size() - 1) { sb.append(ARRAY_DLMT); } } sb.append(ARRAY_CLOSE); sb.append(ARRAY_DLMT); // messages sb.append(ARRAY_EMPTY); // End step sb.append(ARRAY_CLOSE); return sb; } public JsTestModel getTestModel() { return this.testModel; } private void initStatus() { long startTime = calledTest.getStartTime(); long elapsedTime = calledTest.getEndTime() - startTime; TestStatusValue theStatus = calledTest.getStatus().getStatusValue(); String statVal = theStatus.ordinal() + ""; status.props.add(new JsModelProperty("status", statVal, null)); status.props.add(new JsModelProperty("startTime", String.valueOf(startTime), null)); status.props.add(new JsModelProperty("elapsedTime", String.valueOf(elapsedTime), null)); } }
reneelpetit/codestream-server
api_server/modules/versioner/test/unknown_ide_test.js
<gh_stars>1-10 'use strict'; const VersionerTest = require('./versioner_test'); const RandomString = require('randomstring'); class UnknownIDETest extends VersionerTest { constructor (options) { super(options); this.expectedDisposition = 'unknownIDE'; } get description () { return 'should set X-CS-Version-Disposition to "unknownIDE" when an unknown plugin IDE is sent with the request'; } // before the test runs... before (callback) { // set the X-CS-Plugin-IDE header to something unknown super.before(error => { if (error) { return callback(error); } this.apiRequestOptions.headers['x-cs-plugin-ide'] = `plugin-${RandomString.generate(12)}`; callback(); }); } // validate the version headers returned with the response to the test request validateVersionHeaders () { // we don't expect any version headers } // validate the version headers concerning the agent that are returned with the response // to the test request validateAgentHeaders () { // we don't expect any version headers concerning the agent } // validate the asset URL, which tells us where the latest extension lives // (this needs to be updated when we support multiple IDEs) validateAssetUrl () { // we don't expect the asset URL } } module.exports = UnknownIDETest;
Shashi-rk/azure-sdk-for-java
sdk/resourcemanager/azure-resourcemanager-authorization/src/main/java/com/azure/resourcemanager/authorization/fluent/models/MicrosoftGraphGroupLifecyclePolicyInner.java
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.authorization.fluent.models; import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.HashMap; import java.util.Map; /** groupLifecyclePolicy. */ @Fluent public final class MicrosoftGraphGroupLifecyclePolicyInner extends MicrosoftGraphEntity { @JsonIgnore private final ClientLogger logger = new ClientLogger(MicrosoftGraphGroupLifecyclePolicyInner.class); /* * List of email address to send notifications for groups without owners. * Multiple email address can be defined by separating email address with a * semicolon. */ @JsonProperty(value = "alternateNotificationEmails") private String alternateNotificationEmails; /* * Number of days before a group expires and needs to be renewed. Once * renewed, the group expiration is extended by the number of days defined. */ @JsonProperty(value = "groupLifetimeInDays") private Integer groupLifetimeInDays; /* * The group type for which the expiration policy applies. Possible values * are All, Selected or None. */ @JsonProperty(value = "managedGroupTypes") private String managedGroupTypes; /* * groupLifecyclePolicy */ @JsonIgnore private Map<String, Object> additionalProperties; /** * Get the alternateNotificationEmails property: List of email address to send notifications for groups without * owners. Multiple email address can be defined by separating email address with a semicolon. * * @return the alternateNotificationEmails value. */ public String alternateNotificationEmails() { return this.alternateNotificationEmails; } /** * Set the alternateNotificationEmails property: List of email address to send notifications for groups without * owners. Multiple email address can be defined by separating email address with a semicolon. * * @param alternateNotificationEmails the alternateNotificationEmails value to set. * @return the MicrosoftGraphGroupLifecyclePolicyInner object itself. */ public MicrosoftGraphGroupLifecyclePolicyInner withAlternateNotificationEmails(String alternateNotificationEmails) { this.alternateNotificationEmails = alternateNotificationEmails; return this; } /** * Get the groupLifetimeInDays property: Number of days before a group expires and needs to be renewed. Once * renewed, the group expiration is extended by the number of days defined. * * @return the groupLifetimeInDays value. */ public Integer groupLifetimeInDays() { return this.groupLifetimeInDays; } /** * Set the groupLifetimeInDays property: Number of days before a group expires and needs to be renewed. Once * renewed, the group expiration is extended by the number of days defined. * * @param groupLifetimeInDays the groupLifetimeInDays value to set. * @return the MicrosoftGraphGroupLifecyclePolicyInner object itself. */ public MicrosoftGraphGroupLifecyclePolicyInner withGroupLifetimeInDays(Integer groupLifetimeInDays) { this.groupLifetimeInDays = groupLifetimeInDays; return this; } /** * Get the managedGroupTypes property: The group type for which the expiration policy applies. Possible values are * All, Selected or None. * * @return the managedGroupTypes value. */ public String managedGroupTypes() { return this.managedGroupTypes; } /** * Set the managedGroupTypes property: The group type for which the expiration policy applies. Possible values are * All, Selected or None. * * @param managedGroupTypes the managedGroupTypes value to set. * @return the MicrosoftGraphGroupLifecyclePolicyInner object itself. */ public MicrosoftGraphGroupLifecyclePolicyInner withManagedGroupTypes(String managedGroupTypes) { this.managedGroupTypes = managedGroupTypes; return this; } /** * Get the additionalProperties property: groupLifecyclePolicy. * * @return the additionalProperties value. */ @JsonAnyGetter public Map<String, Object> additionalProperties() { return this.additionalProperties; } /** * Set the additionalProperties property: groupLifecyclePolicy. * * @param additionalProperties the additionalProperties value to set. * @return the MicrosoftGraphGroupLifecyclePolicyInner object itself. */ public MicrosoftGraphGroupLifecyclePolicyInner withAdditionalProperties(Map<String, Object> additionalProperties) { this.additionalProperties = additionalProperties; return this; } @JsonAnySetter void withAdditionalProperties(String key, Object value) { if (additionalProperties == null) { additionalProperties = new HashMap<>(); } additionalProperties.put(key, value); } /** {@inheritDoc} */ @Override public MicrosoftGraphGroupLifecyclePolicyInner withId(String id) { super.withId(id); return this; } /** * Validates the instance. * * @throws IllegalArgumentException thrown if the instance is not valid. */ @Override public void validate() { super.validate(); } }
scrumpi3/DOME
src/mit/cadlab/dome3/gui/objectmodel/dataobject/build/FileBuildPanel.java
<gh_stars>0 // FileBuildPanel.java package mit.cadlab.dome3.gui.objectmodel.dataobject.build; import mit.cadlab.dome3.objectmodel.dataobject.interfaces.DataObject; import mit.cadlab.dome3.objectmodel.dataobject.interfaces.DomeFile; import mit.cadlab.dome3.objectmodel.dataobject.FileData; import mit.cadlab.dome3.objectmodel.util.TypeInfo; import mit.cadlab.dome3.swing.DComboBox; import mit.cadlab.dome3.swing.DTextField; import mit.cadlab.dome3.swing.Templates; import mit.cadlab.dome3.util.FileUtils; import mit.cadlab.dome3.gui.guiutils.msg.TwoButton2Msg; import mit.cadlab.dome3.gui.objectmodel.dataobject.DataObjectPanel; import javax.swing.JButton; import javax.swing.JCheckBox; import javax.swing.JComponent; import javax.swing.JFileChooser; import javax.swing.JLabel; import javax.swing.JPanel; import java.awt.Dimension; import java.awt.GridBagConstraints; import java.awt.Insets; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.ItemEvent; import java.awt.event.ItemListener; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.io.File; public class FileBuildPanel extends DataObjectPanel { protected PropertyChangeListener propertyListener; protected DTextField valueTextField; protected JButton constraintsButton; protected DComboBox fileTypeComboBox; protected JCheckBox showFileCheckBox; protected JButton browseButton; protected mit.cadlab.dome3.objectmodel.dataobject.interfaces.DomeFile dataModel; public static final TypeInfo TYPE_INFO = new TypeInfo("FileBuildPanel"); public static final String XML_TAG = "filebuildpanel"; public FileBuildPanel(mit.cadlab.dome3.objectmodel.dataobject.interfaces.DomeFile f) { if (f == null) throw new IllegalArgumentException("DomeFile gui - null DomeFile"); dataModel = f; propertyListener = createPropertyListener(); dataModel.addPropertyChangeListener(propertyListener); layoutPanel(); configComponents(); } protected PropertyChangeListener createPropertyListener() { return new FilePropertyChangeListener(); } protected void layoutPanel() { fileTypeComboBox = Templates.makeDComboBox(FileData.validFileType); fileTypeComboBox.setSelectedObject(dataModel.getFileType()); fileTypeComboBox.setEnabled(isFileTypeComboBoxEnabled()); valueTextField = Templates.makeDTextField(dataModel.getFilePath()); showFileCheckBox = Templates.makeCheckBox("show file in browser"); browseButton = Templates.makeButton("choose..."); JLabel label = Templates.makeLabel("file path:"); JLabel label2 = Templates.makeLabel("file type:"); JPanel topPanel = new JPanel(); JComponent[] comps = {label2, fileTypeComboBox, label, valueTextField, browseButton, showFileCheckBox }; // gridx, gridy, gridwidth, gridheight, weightx, weighty, anchor, fill, insets(t,l,b,r), ipadx, ipady GridBagConstraints[] gbcs = { new GridBagConstraints(1, 0, 1, 1, 0.0, 0.0, gbc.EAST, gbc.NONE, new Insets(0, 0, 0, 0), 0, 0), new GridBagConstraints(2, 0, 1, 1, 0.0, 0.0, gbc.EAST, gbc.NONE, new Insets(0, 5, 0, 0), 0, 0), new GridBagConstraints(0, 1, 1, 1, 0.0, 0.0, gbc.WEST, gbc.NONE, new Insets(5, 0, 0, 0), 0, 0), new GridBagConstraints(1, 1, 1, 1, 1.0, 1.0, gbc.CENTER, gbc.HORIZONTAL, new Insets(5, 0, 0, 0), 0, 0), new GridBagConstraints(2, 1, 1, 1, 0.0, 0.0, gbc.EAST, gbc.NONE, new Insets(5, 5, 0, 0), 0, 0), new GridBagConstraints(0, 2, 2, 1, 0.0, 1.0, gbc.WEST, gbc.NONE, new Insets(5, 0, 0, 0), 0, 0) }; Templates.layoutGridBagB(topPanel, comps, gbcs); //setting the combobox size Dimension oldDimension = fileTypeComboBox.getPreferredSize(); Dimension expectDimension = browseButton.getPreferredSize(); double height = oldDimension.getHeight(); double width = expectDimension.getWidth(); fileTypeComboBox.setPreferredSize(new Dimension((int) width, (int) height)); JPanel middlePanel = new JPanel(); constraintsButton = Templates.makeButton("constraints"); JComponent[] comps2 = {topPanel, middlePanel, // filler constraintsButton, }; // gridx, gridy, gridwidth, gridheight, weightx, weighty, anchor, fill, insets(t,l,b,r), ipadx, ipady GridBagConstraints[] gbcs2 = { new GridBagConstraints(0, 0, 1, 1, 1.0, 0.0, gbc.WEST, gbc.HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0), // valueTextField new GridBagConstraints(0, 1, 1, 1, 1.0, 1.0, gbc.CENTER, gbc.BOTH, new Insets(0, 0, 0, 0), 0, 0), // fillerPanel new GridBagConstraints(0, 2, 1, 1, 0.0, 0.0, gbc.EAST, gbc.NONE, new Insets(5, 0, 0, 0), 0, -2), // constraintsButton }; Templates.layoutGridBagB(this, comps2, gbcs2); } protected void configComponents() { valueTextField.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { /*String newFilepath = getFilePath(); String suffix = FileUtils.getDefaultSuffixForType(dataModel.getFileType()); if(newFilepath.indexOf(".")==-1) { newFilepath = newFilepath + suffix; setFilePath(newFilepath); }*/ setModelData(); } }); browseButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { JFileChooser chooser = new JFileChooser(); chooser.setFileFilter(FileUtils.getFilterForType(getFileType())); if (chooser.showDialog(browseButton, "choose") != JFileChooser.APPROVE_OPTION) return; else { //if some file that is not that type is selected, we will assume people want other filetype String newFileName = chooser.getSelectedFile().getAbsolutePath(); // never empty String suffix = FileUtils.getDefaultSuffixForType(dataModel.getFileType()); if (!newFileName.endsWith(suffix)) { //this means user pick another type of file or forgot to add extension if (newFileName.indexOf(".") == -1) { int answer = TwoButton2Msg.showOption(FileBuildPanel.this, "No extension", "The file name has no extension " + suffix + "!", "", "add extension", "don't add extension", new Dimension(300, 100)); if (answer == TwoButton2Msg.LEFT_OPTION) { newFileName = newFileName + suffix; } else {// otherwise, keep it setFileType(FileUtils.getTypeForFile(newFileName)); } } else setFileType(FileUtils.getTypeForFile(newFileName)); } setFileType(FileUtils.getTypeForFile(newFileName)); setFilePath(newFileName); setModelData(); } } }); addCheckBoxListener(); fileTypeComboBox.addItemListener(new ItemListener() { public void itemStateChanged(ItemEvent e) { dataModel.setFileType(getFileType()); } }); } protected void addCheckBoxListener() { showFileCheckBox.addActionListener(createCheckBoxListener()); } protected ActionListener createCheckBoxListener() { return new ActionListener() { public void actionPerformed(ActionEvent event) { if (showFileCheckBox.isSelected()) { FileUtils.showFile(dataModel); } } }; } private boolean isFileTypeComboBoxEnabled() { if (dataModel.getFilePath().trim().equals("")) return true; return false; } // connect to data model public void setDataObject(DataObject data) { // sangmok: added code to fix memory problem // setDataObject(null) is invoked during the executino of releaseDataObjectReferenceOfDataObjectPanel() in DataObjectCards class // when DomeMatrixData is null, codes like setDataModel_GUI() should be skipped // instead setDataModel_Null() should be invoked if (data == null) { setDataModel_Null(); return; } // sangmok: added code ends if (data instanceof mit.cadlab.dome3.objectmodel.dataobject.interfaces.DomeFile) setModel((mit.cadlab.dome3.objectmodel.dataobject.interfaces.DomeFile) data); else throw new IllegalArgumentException("DomeFile gui - non-DomeFile parameter"); } /** * sangmok : a new method to fix memory leakage problem * set data object reference (=dataMatrix) as null * also release data object reference in TableModel object */ protected void setDataModel_Null() { if (dataModel != null) { dataModel.removePropertyChangeListener(propertyListener); } dataModel = null; } public void setModel(mit.cadlab.dome3.objectmodel.dataobject.interfaces.DomeFile model) { if (model == null) throw new IllegalArgumentException("DomeFile gui - null DomeFile"); if (dataModel != null) { dataModel.removePropertyChangeListener(propertyListener); } dataModel = model; dataModel.addPropertyChangeListener(propertyListener); getModelData(); } protected void getModelData() { setFilePath(dataModel.getFilePath()); valueTextField.setCurrent(); setFileType(dataModel.getFileType()); } /** * when calling this function ,don't forget to set file type as well */ protected void setModelData() { String newFilepath = getFilePath(); if (newFilepath.equals("")) { //empty string, dataModel.setFilePath(newFilepath); return; } dataModel.setFilePath(parseStr(newFilepath)); validateFile(dataModel.getFilePath()); //String oldType=getFileType(); dataModel.setFileType(getFileType()); } protected String parseStr(String newFilepath) { //deal with the suffix String suffix = FileUtils.getDefaultSuffixForType(dataModel.getFileType()); File file = new File(newFilepath); if (newFilepath.endsWith(File.separator)) return newFilepath + FileUtils.DEFAULTPREFIX + suffix; if (file.getName().trim().equals("")) return newFilepath + File.separator + FileUtils.DEFAULTPREFIX + suffix; /*if (!file.getName().endsWith(suffix)){ if(file.getName().indexOf(".")==-1) { newFilepath = newFilepath + suffix; } //return newFilepath = newFilepath + suffix; }*/ setFileType(FileUtils.getTypeForFile(newFilepath)); return newFilepath; } protected void validateFile(String filepath) { File file; try { file = new File(filepath); if (file.exists()) { //do nothing! } else { // OneButton1Msg.showWarning(null, "Warning:file not exist", filepath + " is not exist!", "ok", OneButton1Msg.DEFAULT_SIZE); } } catch (Exception e) { e.printStackTrace(); //OneButton1Msg.showWarning(this, "File Warning", "error in the file path", "ok", OneButton1Msg.DEFAULT_SIZE); return; } } public String getFilePath() { return valueTextField.getText().trim(); } public void setFilePath(String value) { valueTextField.setText(value); } public String getFileType() { return (String) fileTypeComboBox.getSelectedItem(); } public void setFileType(String value) { if (FileData.isValidFileType(value)) fileTypeComboBox.setSelectedItem(value); else { System.err.println("FileBuildPanel GUI Error--wrong file type!"); } } protected class FilePropertyChangeListener implements PropertyChangeListener { public void propertyChange(PropertyChangeEvent e) { String property = e.getPropertyName(); Object newValue = e.getNewValue(); if (property.equals(mit.cadlab.dome3.objectmodel.dataobject.interfaces.DomeFile.FILEPATH)) { setFilePath(newValue.toString()); valueTextField.setCurrent(); fileTypeComboBox.setEnabled(isFileTypeComboBoxEnabled()); } if (property.equals(mit.cadlab.dome3.objectmodel.dataobject.interfaces.DomeFile.FILETYPE)) { setFileType(newValue.toString()); } } } }
kraney/stackpath
pkg/cdn/model_content_disposition_by_header_default_type_enum_wrapper_value.go
/* * Content Delivery Network * * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) * * API version: 1.0.0 * Generated by: OpenAPI Generator (https://openapi-generator.tech) */ package cdn import ( "encoding/json" ) // ContentDispositionByHeaderDefaultTypeEnumWrapperValue the model 'ContentDispositionByHeaderDefaultTypeEnumWrapperValue' type ContentDispositionByHeaderDefaultTypeEnumWrapperValue string // List of ContentDispositionByHeaderDefaultTypeEnumWrapperValue const ( CONTENTDISPOSITIONBYHEADERDEFAULTTYPEENUMWRAPPERVALUE_UNKNOWN ContentDispositionByHeaderDefaultTypeEnumWrapperValue = "UNKNOWN" CONTENTDISPOSITIONBYHEADERDEFAULTTYPEENUMWRAPPERVALUE_ATTACHMENT ContentDispositionByHeaderDefaultTypeEnumWrapperValue = "attachment" CONTENTDISPOSITIONBYHEADERDEFAULTTYPEENUMWRAPPERVALUE_INLINE ContentDispositionByHeaderDefaultTypeEnumWrapperValue = "inline" ) // Ptr returns reference to ContentDispositionByHeaderDefaultTypeEnumWrapperValue value func (v ContentDispositionByHeaderDefaultTypeEnumWrapperValue) Ptr() *ContentDispositionByHeaderDefaultTypeEnumWrapperValue { return &v } type NullableContentDispositionByHeaderDefaultTypeEnumWrapperValue struct { value *ContentDispositionByHeaderDefaultTypeEnumWrapperValue isSet bool } func (v NullableContentDispositionByHeaderDefaultTypeEnumWrapperValue) Get() *ContentDispositionByHeaderDefaultTypeEnumWrapperValue { return v.value } func (v *NullableContentDispositionByHeaderDefaultTypeEnumWrapperValue) Set(val *ContentDispositionByHeaderDefaultTypeEnumWrapperValue) { v.value = val v.isSet = true } func (v NullableContentDispositionByHeaderDefaultTypeEnumWrapperValue) IsSet() bool { return v.isSet } func (v *NullableContentDispositionByHeaderDefaultTypeEnumWrapperValue) Unset() { v.value = nil v.isSet = false } func NewNullableContentDispositionByHeaderDefaultTypeEnumWrapperValue(val *ContentDispositionByHeaderDefaultTypeEnumWrapperValue) *NullableContentDispositionByHeaderDefaultTypeEnumWrapperValue { return &NullableContentDispositionByHeaderDefaultTypeEnumWrapperValue{value: val, isSet: true} } func (v NullableContentDispositionByHeaderDefaultTypeEnumWrapperValue) MarshalJSON() ([]byte, error) { return json.Marshal(v.value) } func (v *NullableContentDispositionByHeaderDefaultTypeEnumWrapperValue) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) }
theclashingfritz/sonic-3-remastered
source/Scene_HCZ.cpp
#include "Standard.h" #include "Scene_MainMenu.h" #include "Application.h" #include "Resources.h" #include "Scene_HCZ.h" #include "S3Object.h" #include "LevelScene.h" #include "S3/Objects/InvisibleSolid.h" #include "Scene_CNZ.h" #define Z_MID 0 #define Z_FRONT -1 #define Z_BACK 1 #define T(name) App->Textures[name] #define S(name) App->Sprites[name] // Unique const char LevelID = 0x20; // Modularized Scene_HCZ::Scene_HCZ(Application* app, int act, int checkpoint) { SDL_RWops *_RWList[1 + 2 + 20 + 12] = { newFile(res_HCZ_level_lvl, "Resource/Stages/HCZ/level.lvl"), // Musics newFile(res_HCZ_act_1_ogg, "Resource/Stages/HCZ/Music/Act_1.ogg"), newFile(res_HCZ_act_2_ogg, "Resource/Stages/HCZ/Music/Act_2.ogg"), // Layouts newFile(res_HCZ_layout_1_bin, "Resource/Stages/HCZ/Data (Layout)/1.bin"), // Pre-Act newFile(res_HCZ_layout_1_bin, "Resource/Stages/HCZ/Data (Layout)/1.bin"), // Act 1 newFile(res_HCZ_layout_2_bin, "Resource/Stages/HCZ/Data (Layout)/2.bin"), newFile(res_HCZ_layout_2_bin, "Resource/Stages/HCZ/Data (Layout)/2.bin"), // Chunks (128x128) newFile(res_HCZ_chunks_1_bin, "Resource/Stages/HCZ/Data (Chunks)/1.bin"), // Pre-Act newFile(res_HCZ_chunks_1_bin, "Resource/Stages/HCZ/Data (Chunks)/1.bin"), // Act 1 newFile(res_HCZ_chunks_2_bin, "Resource/Stages/HCZ/Data (Chunks)/2.bin"), newFile(res_HCZ_chunks_2_bin, "Resource/Stages/HCZ/Data (Chunks)/2.bin"), // Tiles (16x16) newFile(res_HCZ_tiles_0_bin, "Resource/Stages/HCZ/Data (Tiles)/0.bin"), // Pre-Act newFile(res_HCZ_tiles_1_bin, "Resource/Stages/HCZ/Data (Tiles)/1.bin"), // Act 1 newFile(res_HCZ_tiles_2_bin, "Resource/Stages/HCZ/Data (Tiles)/2.bin"), newFile(res_HCZ_tiles_2_bin, "Resource/Stages/HCZ/Data (Tiles)/2.bin"), // Rings newFile(res_HCZ_rings_2_bin, "Resource/Stages/HCZ/Data (Rings)/1.bin"), // Pre-Act newFile(res_HCZ_rings_1_bin, "Resource/Stages/HCZ/Data (Rings)/1.bin"), // Act 1 newFile(res_HCZ_rings_2_bin, "Resource/Stages/HCZ/Data (Rings)/2.bin"), newFile(res_HCZ_rings_2_bin, "Resource/Stages/HCZ/Data (Rings)/2.bin"), // Objects newFile(res_HCZ_objects_1_bin, "Resource/Stages/HCZ/Data (Objects)/1.bin"), // Pre-Act newFile(res_HCZ_objects_1_bin, "Resource/Stages/HCZ/Data (Objects)/1.bin"), // Act 1 newFile(res_HCZ_objects_2_bin, "Resource/Stages/HCZ/Data (Objects)/2.bin"), newFile(res_HCZ_objects_2_bin, "Resource/Stages/HCZ/Data (Objects)/2.bin"), // Palettes (Dry) newFile(res_HCZ_PAL1_DRY_png, "Resource/Stages/HCZ/Palettes/PAL1_DRY.png"), // Pre-Act newFile(res_HCZ_PAL1_DRY_png, "Resource/Stages/HCZ/Palettes/PAL1_DRY.png"), // Act 1 newFile(res_HCZ_PAL2_DRY_png, "Resource/Stages/HCZ/Palettes/PAL2_DRY.png"), newFile(res_HCZ_PAL2_DRY_png, "Resource/Stages/HCZ/Palettes/PAL2_DRY.png"), // Palettes (Wet) newFile(res_HCZ_PAL1_WET_png, "Resource/Stages/HCZ/Palettes/PAL1_WET.png"), // Pre-Act newFile(res_HCZ_PAL1_WET_png, "Resource/Stages/HCZ/Palettes/PAL1_WET.png"), // Act 1 newFile(res_HCZ_PAL2_WET_png, "Resource/Stages/HCZ/Palettes/PAL2_WET.png"), newFile(res_HCZ_PAL2_WET_png, "Resource/Stages/HCZ/Palettes/PAL2_WET.png"), // Palettes (Other) newFile(res_HCZ_PALm_DRY_png, "Resource/Stages/HCZ/Palettes/PAL_MB_DRY.png"), // Mini-boss newFile(res_HCZ_PALm_DRY_png, "Resource/Stages/HCZ/Palettes/PAL_MB_DRY.png"), // Boss newFile(res_HCZ_PALm_WET_png, "Resource/Stages/HCZ/Palettes/PAL_MB_WET.png"), // Extra 1 (Mini-boss) newFile(res_HCZ_PALm_WET_png, "Resource/Stages/HCZ/Palettes/PAL_MB_WET.png"), // Extra 2 }; App = app; memcpy(RWList, _RWList, sizeof(_RWList)); Init(app, LevelID + act, checkpoint); } void Scene_HCZ::DoSwitchActZone() { if (!doSwitch) return; doSwitch = false; if ((act & 0xF) == 2) { Scene_CNZ* nextLevel = new Scene_CNZ(App, 1, -1); nextLevel->score = score; nextLevel->lives = lives; App->nextScene = nextLevel; return; } MyPlayer->X -= 0x3600; MyPlayer->Y = 0x0830; act = LevelID + (act & 0xF) + 1; InitZone(true, 0, -1, true); cameraX[App->CurrentViewport] -= 0x3600; cameraY[App->CurrentViewport] = 0x0830 - App->gameHeight / 2; cameraMinX = 0; cameraMinY = 0; } // Unique void Scene_HCZ::InitZone(bool resetTextures, int check, int specialRing, bool actTransition) { int a = act & 0xF; if (a == 1) Tileset = new ITexture(IResources::Load("Stages/HCZ/Sprites/TILES1.png"), false); else if (a == 2) Tileset = new ITexture(IResources::Load("Stages/HCZ/Sprites/TILES2.png"), false); // Parallax Sizes and Auto Scrolls if (a <= 1) { int PS[] = { 0x0040, 0x0008, 0x0008, 0x0005, 0x0005, 0x0006, 0x00F0, 0x0006, 0x0005, 0x0005, 0x0008, 0x0008, 0x0030, 0x80C0, 0x7FFF }; float PAS[] = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, }; float PM[] = { 0.250f, 0.21875f, 0.1875f, 0.15625f, 0.125f, 0.09375f, 0.0625f, 0.09375f, 0.125f, 0.15625f, 0.1875f, 0.21875f, 0.250f, 0.250f, 0.125f }; // $FFFFA800 - BG deformation RAM address // d0 = 1 / 4 // d1 = 1 / 32 // 0x00: 1 / 4 // 0x02: 1 / 4 - 1 / 32 // 0x04: 1 / 4 - 1 / 32 - 1 / 32 // 0x06: 1 / 4 - 1 / 32 - 1 / 32 - 1 / 32 // 0x08: 1 / 4 - 1 / 32 - 1 / 32 - 1 / 32 - 1 / 32 // 0x0A: 1 / 4 - 1 / 32 - 1 / 32 - 1 / 32 - 1 / 32 - 1 / 32 // 0x0C: 1 / 4 - 1 / 32 - 1 / 32 - 1 / 32 - 1 / 32 - 1 / 32 - 1 / 32 // 0x0E: 1 / 4 - 1 / 32 - 1 / 32 - 1 / 32 - 1 / 32 - 1 / 32 // 0x10: 1 / 4 - 1 / 32 - 1 / 32 - 1 / 32 - 1 / 32 // 0x12: 1 / 4 - 1 / 32 - 1 / 32 - 1 / 32 // 0x14: 1 / 4 - 1 / 32 - 1 / 32 // 0x16: 1 / 4 - 1 / 32 // 0x18: 1 / 4 //0x19A: 1 / 4 - 1 / 32 - 1 / 32 - 1 / 32 - 1 / 32 - 1 / 32 - 1 / 32 // d3 = 1 / 4 memcpy(ParallaxAutoScroll, PAS, sizeof(ParallaxAutoScroll)); memcpy(ParallaxSizes, PS, sizeof(ParallaxSizes)); memcpy(ParallaxMult, PM, sizeof(PM)); ParallaxCount = sizeof(PS) / sizeof(*PS); } else { int PS[] = { 0x0008, 0x0008, 0x0090, 0x0010, 0x0008, 0x0030, 0x0018, 0x0008, 0x0008, 0x00A8, 0x0030, 0x0018, 0x0008, 0x0008, 0x00A8, 0x0030, 0x0018, 0x0008, 0x0008, 0x00B0, 0x0010, 0x0008, 0x7FFF }; float PAS[] = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, }; float PM[] = { 0.3750f,0.3125f,0.1250f,0.1875f,0.3750f,0.5000f,0.4375f,0.3750f,0.3125f,0.2500f,0.5000f,0.4375f,0.3750f,0.3125f,0.2500f,0.5000f,0.4375f,0.3750f,0.3125f,0.1250f,0.1875f,0.3750f,0.5000f }; memcpy(ParallaxAutoScroll, PAS, sizeof(PAS)); memcpy(ParallaxSizes, PS, sizeof(PS)); memcpy(ParallaxMult, PM, sizeof(PM)); ParallaxCount = sizeof(PS) / sizeof(*PS); } // Animated Tiles if (a == 1) { int animTilesTmp[] = { // Tile ID # of tiles AniTile ID # of frames # frames to next anim frame (0 for background tile) 277, 6, 0, 9, 3, 732, 24, 186, 1, 2, 756, 24, 162, 1, 2, 780, 36, 54, 3, 4, }; memcpy(AnimTiles, animTilesTmp, sizeof(AnimTiles)); AnimTilesCount = sizeof(animTilesTmp) / sizeof(*animTilesTmp) / 5; } else { int animTilesTmp[] = { // Tile ID # of tiles AniTile ID # of frames # frames to next anim frame (0 for background tile) 277, 6, 0, 9, 3, 606, 21, 54, 4, 4, 722, 4, 138, 8, 0, 726, 8, 170, 8, 0, 734, 16, 234, 8, 0, 750, 48, 362, 8, 0, }; memcpy(AnimTiles, animTilesTmp, sizeof(AnimTiles)); AnimTilesCount = sizeof(animTilesTmp) / sizeof(*animTilesTmp) / 5; } LevelScene::InitZone(resetTextures, check, specialRing, actTransition); // Create Object: Act 2 wall if ((act & 0xF) == 2) { IInvisibleSolid* obj = new IInvisibleSolid(); obj->X = 0x400 + 2 * 128; obj->Y = 0x500 + 2 * 128; obj->W = 128 * 4; obj->H = 128 * 4; //obj->data1 = 0x28; obj->Scene = this; obj->App = app; Objects[objects_count] = obj; giantWallID = objects_count; objects_count++; giantWallX = 0; obj->Create(); } if (check <= 0) { MyPlayer->Action = ActionType::Peril; } } void Scene_HCZ::CreateUniqueTextures(Application* app, int act) { unsigned int nextTick = SDL_GetTicks(); T("HCZ") = new ITexture(IResources::Load("Stages/HCZ/Sprites/Texture.png"), false); S("Blastoid") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Blastoid.spr"), T("HCZ")); S("Block") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Block.spr"), T("HCZ")); S("Boss Effects") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Boss Effects.spr"), T("HCZ")); S("Boss Extra") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Boss Extra.spr"), T("HCZ")); S("Breakable Bar") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Breakable Bar.spr"), T("HCZ")); S("Breakable Platforms") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Breakable Platforms.spr"), T("HCZ")); S("Breakable Wall") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Breakable Wall.spr"), T("HCZ")); S("Bridge") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Bridge.spr"), T("HCZ")); S("Bubbler") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Bubbler.spr"), T("HCZ")); S("Buggernaut") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Buggernaut.spr"), T("HCZ")); S("Button") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Button.spr"), T("HCZ")); S("Fan") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Fan.spr"), T("HCZ")); S("Floating Platform") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Floating Platform.spr"), T("HCZ")); S("Geyser Particles") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Geyser Particles.spr"), T("HCZ")); S("Geyser") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Geyser.spr"), T("HCZ")); S("Hand Launcher") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Hand Launcher.spr"), T("HCZ")); S("Jawz") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Jawz.spr"), T("HCZ")); S("Large Fan") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Large Fan.spr"), T("HCZ")); S("Mega Chopper") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Mega Chopper.spr"), T("HCZ")); S("Miniboss Pole") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Miniboss Pole.spr"), T("HCZ")); S("Miniboss") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Miniboss.spr"), T("HCZ")); S("Non Anim 1") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Non Anim 1.spr"), T("HCZ")); S("Non Anim 2") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Non Anim 2.spr"), T("HCZ")); S("Pointdexter") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Pointdexter.spr"), T("HCZ")); S("Snake Block") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Snake Block.spr"), T("HCZ")); S("Spinning Column") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Spinning Column.spr"), T("HCZ")); S("Turbo Spiker") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Turbo Spiker.spr"), T("HCZ")); S("Water Drop") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Water Drop.spr"), T("HCZ")); S("Water Rush") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Water Rush.spr"), T("HCZ")); S("Water Splash") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Water Splash.spr"), T("HCZ")); S("Waterline Top") = new ISprite(IResources::Load("Stages/HCZ/Sprites/Waterline Top.spr"), T("HCZ")); App->print(0, "UNIQUE Texture load finished in %.3f seconds.", (SDL_GetTicks() - nextTick) / 1000.0f); } void Scene_HCZ::UnloadUniqueTextures(Application* app, int act) { } void Scene_HCZ::Update() { BGx = 1.f; if ((act & 0xF) == 1) { BGstart = 0; BGsize = 4; BGy = (backHorizonChunkHeight - 1.75f / 2.f) / (frontHorizonChunkHeight - 1.75f / 2.f); } else { BGstart = 0; BGsize = 4; BGy = 1.f / 4.f; } LevelScene::Update(); HCZ2_UpdateMovingWall(); // Do palette stuffs if (frameAnim % 8 == 0) { int len = 4; int pick; if ((act & 0xF) == 1) { int ew[] = { 0xEC8, 0xEC0, 0xEA0, 0xE80 }; // Waterfalls len = 4; pick = (frameAnim / 8) % len; setPixel(paletteDry->pixels, 2, 3, (ew[pick] & 0xF), (ew[pick] & 0xF0) / 0x10, (ew[pick] & 0xF00) / 0x100); setPixel(paletteWet->pixels, 2, 3, (ew[pick] & 0xF), (ew[pick] & 0xF0) / 0x10, (ew[pick] & 0xF00) / 0x100); pick = (frameAnim / 8 + 1) % len; setPixel(paletteDry->pixels, 2, 4, (ew[pick] & 0xF), (ew[pick] & 0xF0) / 0x10, (ew[pick] & 0xF00) / 0x100); setPixel(paletteWet->pixels, 2, 4, (ew[pick] & 0xF), (ew[pick] & 0xF0) / 0x10, (ew[pick] & 0xF00) / 0x100); pick = (frameAnim / 8 + 2) % len; setPixel(paletteDry->pixels, 2, 5, (ew[pick] & 0xF), (ew[pick] & 0xF0) / 0x10, (ew[pick] & 0xF00) / 0x100); setPixel(paletteWet->pixels, 2, 5, (ew[pick] & 0xF), (ew[pick] & 0xF0) / 0x10, (ew[pick] & 0xF00) / 0x100); pick = (frameAnim / 8 + 3) % len; setPixel(paletteDry->pixels, 2, 6, (ew[pick] & 0xF), (ew[pick] & 0xF0) / 0x10, (ew[pick] & 0xF00) / 0x100); setPixel(paletteWet->pixels, 2, 6, (ew[pick] & 0xF), (ew[pick] & 0xF0) / 0x10, (ew[pick] & 0xF00) / 0x100); } // Do common palette stuffs (Super Sonic) if (MyPlayer->SuperForm || MyPlayer->SuperFormAnim != SuperFormAnimType::None) { // Super Sonic len = 4; pick = 3 * ((frameAnim / 16) % len + 6); if (MyPlayer->SuperFormAnim == SuperFormAnimType::Transforming) { pick = ((MyPlayer->SuperFormAnimTimerMax - MyPlayer->SuperFormAnimTimer) / 4); if (pick < 2) pick = 2; pick = 3 * pick; } else if (MyPlayer->SuperFormAnim == SuperFormAnimType::Deforming) { pick = 3 * (MyPlayer->SuperFormAnimTimer / 4); } else if (MyPlayer->SuperFormAnim == SuperFormAnimType::Super) { pick = 0; } int *PalCycle_ChoseSuper = PalCycle_SuperSonicUnderwaterHCZCNZLBZ; if (LevelID == 0x10 || LevelID == 0x50) PalCycle_ChoseSuper = PalCycle_SuperSonicUnderwaterAIZICZ; setPixel(paletteDry->pixels, 0, 2, (PalCycle_SuperSonic[pick] & 0xF), (PalCycle_SuperSonic[pick] & 0xF0) / 0x10, (PalCycle_SuperSonic[pick] & 0xF00) / 0x100); setPixel(paletteWet->pixels, 0, 2, (PalCycle_ChoseSuper[pick] & 0xF), (PalCycle_ChoseSuper[pick] & 0xF0) / 0x10, (PalCycle_ChoseSuper[pick] & 0xF00) / 0x100); pick++; setPixel(paletteDry->pixels, 0, 3, (PalCycle_SuperSonic[pick] & 0xF), (PalCycle_SuperSonic[pick] & 0xF0) / 0x10, (PalCycle_SuperSonic[pick] & 0xF00) / 0x100); setPixel(paletteWet->pixels, 0, 3, (PalCycle_ChoseSuper[pick] & 0xF), (PalCycle_ChoseSuper[pick] & 0xF0) / 0x10, (PalCycle_ChoseSuper[pick] & 0xF00) / 0x100); pick++; setPixel(paletteDry->pixels, 0, 4, (PalCycle_SuperSonic[pick] & 0xF), (PalCycle_SuperSonic[pick] & 0xF0) / 0x10, (PalCycle_SuperSonic[pick] & 0xF00) / 0x100); setPixel(paletteWet->pixels, 0, 4, (PalCycle_ChoseSuper[pick] & 0xF), (PalCycle_ChoseSuper[pick] & 0xF0) / 0x10, (PalCycle_ChoseSuper[pick] & 0xF00) / 0x100); } glBindTexture(GL_TEXTURE_2D, paletteDry->tex); glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, paletteDry->w, paletteDry->h, GL_RGBA, GL_UNSIGNED_BYTE, paletteDry->pixels); glBindTexture(GL_TEXTURE_2D, 0); glBindTexture(GL_TEXTURE_2D, paletteWet->tex); glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, paletteWet->w, paletteWet->h, GL_RGBA, GL_UNSIGNED_BYTE, paletteWet->pixels); glBindTexture(GL_TEXTURE_2D, 0); } // WaterLevel updating if ((act & 0xF) == 1) { if (MyPlayer->X < 0x9A0) { waterLevel = 0x500; visualWaterHeight = 0x780; } else if (MyPlayer->X < 0x35B0) { waterLevel = 0x680; visualWaterHeight = 0x680; } else { waterLevel = 0x6A0; visualWaterHeight = 0x6A0; } } else { if (MyPlayer->X > 0xC00) { waterLevel = 0x700; visualWaterHeight = 0x700; } } #if 0 // HCZ_WaterTunnels WITH_ALL( if (player->Action == Actions::Dead) continue; if ((act & 0xF) == 2 && player->Ground && player->Layer == 1) { if (player->x >= 0x2CC0 && player->x <= 0x3100) { if (player->y >= 0x0000 && player->y <= 0x0500) { player->Action = Actions::Slide; } } else if (player->x >= 0x1240 && player->x <= 0x1500) { if (player->y >= 0x0000 && player->y <= 0x0460) { player->Action = Actions::Slide; } } } bool piped = false; if ((act & 0xF) == 1) { int start = 1; Objectx37* obj = (Objectx37*)objects[9]; if (!obj->solid) { start = 0; } for (int i = start; i < 16 && !piped; i++) { int h = 0; if (i == 0) h = 16; if (player->x >= waterTunnels[i * 7 + 0] && player->y - h >= waterTunnels[i * 7 + 1] && player->x <= waterTunnels[i * 7 + 2] && player->y + h <= waterTunnels[i * 7 + 3]) { int XVel = waterTunnels[i * 7 + 4]; int YVel = waterTunnels[i * 7 + 5]; if ((XVel & 0x8000) != 0) { XVel = 0x8000 - (XVel & 0x7FFF); } if ((YVel & 0x8000) != 0) { YVel = 0x8000 - (YVel & 0x7FFF); } YVel = -YVel; if (waterTunnels[i * 7 + 6] != 0x200 && player->Action != Actions::Hurt && player->Action != Actions::InStreamGrab && player->Action != Actions::InStream && player->Action != Actions::InStreamPipe) { player->Action = Actions::InStream; player->Speed = XVel / 128.f; player->Gravity += YVel / 128.f; } if (waterTunnels[i * 7 + 6] == 0x200 && player->Action != Actions::Hurt && (player->Action == Actions::InStream || player->Action == Actions::InStreamPipe)) { player->Action = Actions::InStreamPipe; player->Speed = XVel / 128.f; player->Gravity += YVel / 128.f; } if (player->Action != Actions::InStreamGrab && player->Action != Actions::Hurt && (player->Action == Actions::InStream || player->Action == Actions::InStreamPipe)) { player->Speed = XVel / 128.f; player->Gravity += YVel / 128.f; } piped = true; } } } else { for (int i = 16; i < 18 && !piped; i++) { if (player->x >= waterTunnels[i * 7 + 0] && player->y >= waterTunnels[i * 7 + 1] && player->x <= waterTunnels[i * 7 + 2] && player->y <= waterTunnels[i * 7 + 3]) { if (waterTunnels[i * 7 + 6] != 0x200 && player->Action != Actions::Hurt && player->Action != Actions::InStreamGrab && player->Action != Actions::InStream && player->Action != Actions::InStreamPipe) { player->Action = Actions::InStream; player->Speed = waterTunnels[i * 7 + 4] / 128.f; player->Gravity = waterTunnels[i * 7 + 5] / 128.f; } if (waterTunnels[i * 7 + 6] == 0x200 && player->Action != Actions::Hurt && (player->Action == Actions::InStream || player->Action == Actions::InStreamPipe)) { player->Action = Actions::InStreamPipe; player->Speed = waterTunnels[i * 7 + 4] / 128.f; player->Gravity = waterTunnels[i * 7 + 5] / 128.f; } if (player->Action != Actions::InStreamGrab && player->Action != Actions::Hurt && (player->Action == Actions::InStream || player->Action == Actions::InStreamPipe)) { player->Speed = waterTunnels[i * 7 + 4] / 128.f; player->Gravity = waterTunnels[i * 7 + 5] / 128.f; } piped = true; } } } if (!piped && (player->Action == Actions::InStreamGrab || player->Action == Actions::InStream || player->Action == Actions::InStreamPipe)) { player->Action = Actions::Peril; player->Speed /= 2; player->JumpVariable = 1; } ); #endif } int Scene_HCZ::CustomAnimTile(int ind, int i, int parRow) { int tileID = AnimTiles[i * 5 + 0]; int tileCount = AnimTiles[i * 5 + 1]; int aniID = AnimTiles[i * 5 + 2]; int frmCount = AnimTiles[i * 5 + 3]; int frmSpd = AnimTiles[i * 5 + 4]; if ((act & 0xF) == 1) { if (ind >= 277 && ind < 277 + 6) { int frm = frameAnim % 48; if (frm < 5) return 2048 + aniID + ind - 277 + 0; else if (frm < 9) return 2048 + aniID + ind - 277 + 6; else if (frm < 12) return 2048 + aniID + ind - 277 + 12; else if (frm < 14) return 2048 + aniID + ind - 277 + 18; else if (frm < 15) return 2048 + aniID + ind - 277 + 24; else if (frm < 17) return 2048 + aniID + ind - 277 + 30; else if (frm < 20) return 2048 + aniID + ind - 277 + 36; else if (frm < 24) return 2048 + aniID + ind - 277 + 42; else if (frm < 29) return 2048 + aniID + ind - 277 + 48; else if (frm < 33) return 2048 + aniID + ind - 277 + 42; else if (frm < 36) return 2048 + aniID + ind - 277 + 36; else if (frm < 38) return 2048 + aniID + ind - 277 + 30; else if (frm < 39) return 2048 + aniID + ind - 277 + 24; else if (frm < 41) return 2048 + aniID + ind - 277 + 18; else if (frm < 44) return 2048 + aniID + ind - 277 + 12; else if (frm < 48) return 2048 + aniID + ind - 277 + 6; } else if (ind >= 732 && ind < 732 + 24) { return 2048 + 186 + ind - 732; } else if (ind >= 756 && ind < 756 + 24) { return 2048 + 162 + ind - 756; } else if (ind >= 780 && ind < 780 + 36) { int frm = frameAnim % 12; return 2048 + 54 + ind - 780 + (frm / 4) * 36; } } else { if (ind >= 277 && ind < 277 + 6) { i = 0; aniID = AnimTiles[i * 5 + 2]; int frm = frameAnim % 48; if (frm < 5) return 2048 + aniID + ind - 277 + 0; else if (frm < 9) return 2048 + aniID + ind - 277 + 6; else if (frm < 12) return 2048 + aniID + ind - 277 + 12; else if (frm < 14) return 2048 + aniID + ind - 277 + 18; else if (frm < 15) return 2048 + aniID + ind - 277 + 24; else if (frm < 17) return 2048 + aniID + ind - 277 + 30; else if (frm < 20) return 2048 + aniID + ind - 277 + 36; else if (frm < 24) return 2048 + aniID + ind - 277 + 42; else if (frm < 29) return 2048 + aniID + ind - 277 + 48; else if (frm < 33) return 2048 + aniID + ind - 277 + 42; else if (frm < 36) return 2048 + aniID + ind - 277 + 36; else if (frm < 38) return 2048 + aniID + ind - 277 + 30; else if (frm < 39) return 2048 + aniID + ind - 277 + 24; else if (frm < 41) return 2048 + aniID + ind - 277 + 18; else if (frm < 44) return 2048 + aniID + ind - 277 + 12; else if (frm < 48) return 2048 + aniID + ind - 277 + 6; } else if (ind >= 606 && ind < 606 + 21) { i = 1; int frm = frameAnim % 16; return 2048 + 54 + ind - 606 + (frm / 4) * 21; } else if (ind >= 722 && ind < 722 + 4) i = 2; else if (ind >= 726 && ind < 726 + 8) i = 3; else if (ind >= 734 && ind < 734 + 16) i = 4; else if (ind >= 750 && ind < 750 + 48) i = 5; if (i >= 2 && i <= 5) { int frm = (int)cameraX[App->CurrentViewport]; if (frmSpd != 0) frm = (frameAnim / frmSpd); int bgTileColumnCount = 4; int bgTileColumnHeight = 1; if (i == 2) { //float cX = cameraX[App->CurrentViewport] * (ParallaxMult[parRow]); // Multiply Camera X by Parallax Row Multiplier to Freeze Anim Tile X float cX = cameraX[App->CurrentViewport] * (ParallaxMult[parRow] / 4.f); frm = bgTileColumnCount * 8 - (int)cX % (bgTileColumnCount * 8); } else if (i == 3) { bgTileColumnCount = 4; bgTileColumnHeight = 2; float cX = cameraX[App->CurrentViewport] * (ParallaxMult[parRow] - ParallaxMult[parRow + 1]); frm = bgTileColumnCount * 8 - (int)cX % (bgTileColumnCount * 8); } else if (i == 4) { bgTileColumnCount = 4; bgTileColumnHeight = 4; float cX = cameraX[App->CurrentViewport] * (ParallaxMult[parRow] / 2.f); frm = bgTileColumnCount * 8 - (int)cX % (bgTileColumnCount * 8); } else if (i == 5) { bgTileColumnCount = 8; bgTileColumnHeight = 6; float cX = cameraX[App->CurrentViewport] * (ParallaxMult[parRow] / 2.f); frm = bgTileColumnCount * 8 - (int)cX % (bgTileColumnCount * 8); frm = 7 - (frm % 8) + (frm / 8) * 8; } int offset = ( (tileCount * frm) / (tileCount * frmCount) ) % bgTileColumnCount; return 2048 + aniID + (ind - tileID + offset * bgTileColumnHeight) % (tileCount) + (tileCount * frm) % (tileCount * frmCount); } } /* Tile ID # of tiles AniTile ID # of frames # frames to next anim frame (0 for background tile) 277, 6, 0, 9, 3, 606, 21, 54, 4, 4, 722, 4, 138, 8, 0, 726, 8, 170, 8, 0, 734, 16, 234, 8, 0, 750, 48, 362, 8, 0, //*/ return ind; } // UNIQUE void Scene_HCZ::HCZ1_RenderWaterline() { //App->ChangeShader(App->shader_basic); if ((act & 0xF) != 1) { //App->ChangeShader(App->shader_test); return; } // HCZ Waterline Rendering int pieces = 12; float scaleY = (frontHorizonChunkHeight - 1.75f / 2.f) / (backHorizonChunkHeight - 1.75f / 2.f); int pos_BG = (int)(4 * 128.f - cameraY[App->CurrentViewport] / scaleY); int pos_FG = (int)(13 * 128.f - cameraY[App->CurrentViewport]); for (int y = 0; y < pieces; y++) { //float uhhX = (x * 8.f + (frame * .001f) * 13.f - 13.f - cameraX[App->CurrentViewport] / (4 + y / 3.f)) + (int)(cameraX[App->CurrentViewport] / (4 + y / 3.f) / (levelSubW * 128)) * (levelSubW * 128); //if (uhhX >= -8 && uhhX < App->gameWidth + 8) { //float pos_BG = 4 * 128.f - cameraY[App->CurrentViewport] / ((13.f - 1.75f / 2.f) / (4.f - 1.75f / 2.f)); //float pos_FG = 13 * 128.f - cameraY[App->CurrentViewport]; //float scalar = (pos_FG - pos_BG) / 12.f; //App->drawTextureTop(tex_WaterlineTop, 0/*(x * 8.f - cameraX[App->CurrentViewport] / (4 + y / 3.f)) + (int)(cameraX[App->CurrentViewport] / (4 + y / 3.f) / (levelSubW * 128)) * (levelSubW * 128)*/, slope(pos_BG, pos_FG, y / 12.f), App->gameWidth, scalar, 0.f, y * 1.f / 12.f, 1.f, 1.f / 12.f); int myX = 0; if (pos_FG - pos_BG > 0) myX = (int)((0 * 8.f - cameraX[App->CurrentViewport] / (4 - 3.f * y / pieces))); else myX = (int)((0 * 8.f - cameraX[App->CurrentViewport] / (4 - 3.f * (pieces - y) / pieces))); myX += (int)(std::sin((frameAnim % 120) / 120.f * Math_PI / 180.f) * 3); myX %= App->gameWidth; int myY = 0; if (pos_FG - pos_BG > 0) myY = slope(pos_BG, pos_BG + std::min(std::abs(pos_FG - pos_BG), 96), y / (float)pieces); else myY = slope(pos_BG - std::min(std::abs(pos_BG - pos_FG), 96), pos_BG, y / (float)pieces); int myH = (int)std::ceil(std::min(std::abs(pos_FG - pos_BG), 96) / (float)pieces); if (myY + myH >= 0 && myY < App->gameHeight) { if (pos_FG - pos_BG > 0) { App->drawSpriteOPT( myX, myY, App->gameWidth, myH, App->drawZ, 0.f, y * 0.5f / pieces, 1.f, 0.5f / pieces, 1); if (myX > 0) App->drawSpriteOPT( myX - App->gameWidth, myY, App->gameWidth, myH, App->drawZ, 0.f, y * 0.5f / pieces, 1.f, 0.5f / pieces, 1); else App->drawSpriteOPT( myX + App->gameWidth, myY, App->gameWidth, myH, App->drawZ, 0.f, y * 0.5f / pieces, 1.f, 0.5f / pieces, 1); } else if (pos_FG - pos_BG < 0) { App->drawSpriteOPT( myX, myY, App->gameWidth, myH + 1, App->drawZ, 0.f, 0.5f + y * 0.5f / pieces, 1.f, 0.5f / pieces, 1); if (myX > 0) App->drawSpriteOPT( myX - App->gameWidth, myY, App->gameWidth, myH + 1, App->drawZ, 0.f, 0.5f + y * 0.5f / pieces, 1.f, 0.5f / pieces, 1); else App->drawSpriteOPT( myX + App->gameWidth, myY, App->gameWidth, myH + 1, App->drawZ, 0.f, 0.5f + y * 0.5f / pieces, 1.f, 0.5f / pieces, 1); } } } App->renderSpriteOPT(tex_WaterlineTop->tex, 1); //App->ChangeShader(App->shader_test); } void Scene_HCZ::HCZ2_UpdateMovingWall() { if (!App->realPaused && giantWallID >= 0) { if (giantWallX == 0) { objects[giantWallID]->x = 0x400 + 2 * 128; objects[giantWallID]->y = 0x600 + 2 * 128; if (player->X > 0x680 && player->Y > 0x600) giantWallX++; } if (giantWallX > 0 && giantWallX < 0x600) { giantWallX += (float)0xE0 / 0x100; if ((int)(0x500 + giantWallX) + 0x100 < cameraX[App->CurrentViewport]) { giantWallX += (float)(0x140 - 0xE0) / 0x100; // 140 / 100 } objects[giantWallID]->x = (int)(0x500 + giantWallX); if (frameAnim % 16 == 0) { aud[0x6F]->Play(4); } } if (giantWallX > 0 && objects[giantWallID]->x < 0xB00) { objects[giantWallID]->data2 = 0xFF; objects[giantWallID]->prior = true; cameraY[App->CurrentViewport] += (float)((frameAnim / 4) % 2); } else { objects[giantWallID]->data2 = 0; objects[giantWallID]->prior = false; } bool allPast = true; for (int i = 0; i < player_count; i++) { allPast &= cameraX[App->CurrentViewport] > 0xC00; } if (allPast) { giantWallX = 0x600; objects[giantWallID]->x = (int)(0x500 + giantWallX); } } } void Scene_HCZ::HCZ2_DrawMovingWall() { for (int x = 0; x < 4; x++) { for (int y = 2; y < 6; y++) { int us = level[(level[y * 4 + 10] & 0xFF) * 0x100 + (level[y * 4 + 11] & 0xFF) - 0x8000 + (x + 4)] & 0xFF; DrawChunk(0, 0, us, objects[giantWallID]->x - 2 * 128 + x * 128, 0x500 + y * 128, 16, 16, -0.4f); } } App->renderSpriteOPT(tex_ChunksFront->tex, 46); } // OVERRIDES void Scene_HCZ::HandleCamera() { LevelScene::HandleCamera(); if (MyPlayer->Action == ActionType::Dead) return; if ((act & 0xF) == 1) { if (MyPlayer->X > 0x35B0) { cameraMinX = cameraX[App->CurrentViewport]; cameraMaxX = (float)(levelW * 128 - 24 - App->gameWidth + (App->gameWidth - App->renderWidth) / 2); if (cameraY[App->CurrentViewport] > 0x400) { cameraMinY = cameraY[App->CurrentViewport]; cameraMaxY = (float)(0x0718 - App->gameHeight); } else { cameraMinY = 0x300; cameraMaxY = (float)(0x0718 - App->gameHeight); } } } else { cameraMaxX = (float)(levelW * 128 - App->renderWidth); cameraMaxY = (float)(levelH * 128 - App->gameHeight); } } void Scene_HCZ::DoPaletteWaterStuff() { LevelScene::DoPaletteWaterStuff(); return; if ((act & 0xF) == 1) { // Fix pallette at Beginning of HCZ1 water glUniform4f(App->locData, 1, 0, 0, 0); } else { glUniform4f(App->locData, 0, 0, 0, 0); } } void Scene_HCZ::DoBackgroundStuff() { return; LevelScene::DoBackgroundStuff(); // Background and Parallax Rendering if ((act & 0xF) == 2) HCZ2_DrawMovingWall(); App->drawZ = 0.6f; // HCZ Waterline Rendering HCZ1_RenderWaterline(); App->drawZ = 0.4f; } void Scene_HCZ::DrawAboveEverythingNonHUD() { LevelScene::DrawAboveEverythingNonHUD(); return; int waveOff = (int)std::sin(frameAnim / 360.0f * Math_PI) * 10; cameraX[App->CurrentViewport] += waveOff; int waveFr = (frameAnim / 10) % 3; if ((act & 0xF) == 1) { for (unsigned int o = 0; o < 3; o++) { App->drawSpriteOPT( 256 * o + ((int)(cameraX[App->CurrentViewport]) / 256) * 256 + (0) * 32 - cameraX[App->CurrentViewport], waterLevel - 16 - cameraY[App->CurrentViewport], 256, 24, App->drawZ, waveFr / 3.0f, 0, 1 / 3.0f, 1, 2); App->drawSpriteOPT( 256 * o + ((int)(cameraX[App->CurrentViewport]) / 256) * 256 + (1) * 32 - cameraX[App->CurrentViewport], waterLevel - 16 - cameraY[App->CurrentViewport], 256, 24, App->drawZ, waveFr / 3.0f, 0, 1 / 3.0f, 1, 2); } } else { for (unsigned int o = 0; o < 3; o++) { App->drawSpriteOPT( 256 * o + ((int)(cameraX[App->CurrentViewport]) / 256) * 256 + (0) * 32 - cameraX[App->CurrentViewport], visualWaterHeight - 16 - cameraY[App->CurrentViewport], 256, 24, App->drawZ, waveFr / 3.0f, 0, 1 / 3.0f, 1, 2); App->drawSpriteOPT( 256 * o + ((int)(cameraX[App->CurrentViewport]) / 256) * 256 + (1) * 32 - cameraX[App->CurrentViewport], visualWaterHeight - 16 - cameraY[App->CurrentViewport], 256, 24, App->drawZ, waveFr / 3.0f, 0, 1 / 3.0f, 1, 2); } } App->renderSpriteOPT(tex_Waves->tex, 2); cameraX[App->CurrentViewport] -= waveOff; } void Scene_HCZ::Render() { LevelScene::Render(); } void Scene_HCZ::Free() { LevelScene::Free(); UnloadUniqueTextures(app, 1); UnloadUniqueTextures(app, 2); unloadAnimatedChunks(app); }
lycantropos/lz
tests/left_tests/folder_tests/test_definition.py
from hypothesis import given from lz import (left, right) from lz.replication import duplicate from tests.hints import LeftFolderCall from tests.utils import are_objects_similar from . import strategies @given(strategies.empty_folder_calls) def test_base_case(empty_folder_call: LeftFolderCall) -> None: function, initial, empty_iterable = empty_folder_call fold = left.folder(function, initial) result = fold(empty_iterable) assert result is initial @given(strategies.non_empty_folder_calls) def test_step(non_empty_folder_call: LeftFolderCall) -> None: function, initial, non_empty_iterable = non_empty_folder_call non_empty_iterator = iter(non_empty_iterable) element = next(non_empty_iterator) original, target = duplicate(non_empty_iterator) fold = left.folder(function, initial) attach = right.attacher(element) result = fold(attach(target)) assert are_objects_similar(result, function(fold(original), element))
maciejg-git/vue-bootstrap-icons
dist-fontawesome/fontawesome/brands/octopus-deploy-brand.js
<gh_stars>0 import { h } from 'vue' export default { name: "OctopusDeploy", vendor: "Fa", type: "Brand", tags: ["octopus","deploy"], render() { return h( "svg", {"xmlns":"http://www.w3.org/2000/svg","viewBox":"0 0 512 512","class":"v-icon","fill":"currentColor","data-name":"fa-octopus-deploy","innerHTML":"<path d='M455.6,349.2c-45.891-39.09-36.67-77.877-16.095-128.11C475.16,134.04,415.967,34.14,329.93,8.3,237.04-19.6,134.252,24.341,99.677,117.147a180.862,180.862,0,0,0-10.988,73.544c1.733,29.543,14.717,52.97,24.09,80.3,17.2,50.161-28.1,92.743-66.662,117.582-46.806,30.2-36.319,39.857-8.428,41.858,23.378,1.68,44.478-4.548,65.265-15.045,9.2-4.647,40.687-18.931,45.13-28.588C135.9,413.388,111.122,459.5,126.621,488.9c19.1,36.229,67.112-31.77,76.709-45.812,8.591-12.572,42.963-81.279,63.627-46.926,18.865,31.361,8.6,76.391,35.738,104.622,32.854,34.2,51.155-18.312,51.412-44.221.163-16.411-6.1-95.852,29.9-59.944C405.428,418,436.912,467.8,472.568,463.642c38.736-4.516-22.123-67.967-28.262-78.695,5.393,4.279,53.665,34.128,53.818,9.52C498.234,375.678,468.039,359.8,455.6,349.2Z'/>"}, ) } }
takumakei/go-urfave-cli
zapflag/named.go
<reponame>takumakei/go-urfave-cli package zapflag import ( "strings" "github.com/urfave/cli/v2" "go.uber.org/zap" ) // Named adds a new path segment of command/subcommand name to the logger's name. func Named(logger *zap.Logger, c *cli.Context) *zap.Logger { app := strings.ReplaceAll(c.App.Name, " ", ".") if len(c.Command.Name) > 0 { return logger.Named(app + "." + c.Command.Name) } return logger.Named(app) }
vinzenz/fcppt
include/fcppt/optional/cat.hpp
<reponame>vinzenz/fcppt // Copyright <NAME> 2009 - 2016. // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) #ifndef FCPPT_OPTIONAL_CAT_HPP_INCLUDED #define FCPPT_OPTIONAL_CAT_HPP_INCLUDED #include <fcppt/move_if_rvalue.hpp> #include <fcppt/algorithm/range_element_type.hpp> #include <fcppt/optional/maybe_void.hpp> namespace fcppt { namespace optional { /** \brief Removes empty optionals from a range \ingroup fcpptoptional For every element \em e in \a _source, if \em e is set to <code>x</code>, then <code>x</code> is inserted into the target container. \tparam Source Must be a container of optionals \tparam TargetContainer Must be a container whose value type matches that of the optionals from \a Source */ template< typename TargetContainer, typename Source > TargetContainer cat( Source &&_source ) { TargetContainer result; for( fcppt::algorithm::range_element_type< Source > element : _source ) fcppt::optional::maybe_void( element, [ &result ]( auto &&_element ) { result.insert( result.end(), fcppt::move_if_rvalue< Source >( _element ) ); } ); return result; } } } #endif
pH14/fdb-zk
src/main/java/com/ph14/fdb/zk/layer/changefeed/ChangefeedWatchEvent.java
<filename>src/main/java/com/ph14/fdb/zk/layer/changefeed/ChangefeedWatchEvent.java package com.ph14.fdb.zk.layer.changefeed; import java.util.Objects; import org.apache.zookeeper.Watcher.Event.EventType; import com.apple.foundationdb.tuple.Versionstamp; import com.google.common.base.MoreObjects; public class ChangefeedWatchEvent { private final Versionstamp versionstamp; private final EventType eventType; private final String zkPath; public ChangefeedWatchEvent(Versionstamp versionstamp, EventType eventType, String zkPath) { this.versionstamp = versionstamp; this.eventType = eventType; this.zkPath = zkPath; } public Versionstamp getVersionstamp() { return versionstamp; } public EventType getEventType() { return eventType; } public String getZkPath() { return zkPath; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj instanceof ChangefeedWatchEvent) { final ChangefeedWatchEvent that = (ChangefeedWatchEvent) obj; return Objects.equals(this.getVersionstamp(), that.getVersionstamp()) && Objects.equals(this.getEventType(), that.getEventType()) && Objects.equals(this.getZkPath(), that.getZkPath()); } return false; } @Override public int hashCode() { return Objects.hash(getVersionstamp(), getEventType(), getZkPath()); } @Override public String toString() { return MoreObjects.toStringHelper(this) .add("versionstamp", versionstamp) .add("eventType", eventType) .add("zkPath", zkPath) .toString(); } }
stephenh/dtonator
features/src/main/java/com/bizo/dtonator/domain/EmployeeWithTypedAccounts.java
package com.bizo.dtonator.domain; import java.util.ArrayList; import java.util.List; public class EmployeeWithTypedAccounts { private Long id; private String name; private List<Account> accounts = new ArrayList<Account>(); public EmployeeWithTypedAccounts() { } public EmployeeWithTypedAccounts(final Long id, final String name) { setId(id); setName(name); } public Long getId() { return id; } public void setId(final Long id) { this.id = id; } public String getName() { return name; } public void setName(final String name) { this.name = name; } public List<Account> getAccounts() { return accounts; } public void setAccounts(final List<Account> accounts) { this.accounts = accounts; } }
caponetto/kogito-editors-java
drools-wb-screens/drools-wb-scenario-simulation-editor/drools-wb-scenario-simulation-editor-client/src/test/java/org/drools/workbench/screens/scenariosimulation/client/popup/ScenarioConfirmationPopupViewTest.java
/* * Copyright 2018 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.workbench.screens.scenariosimulation.client.popup; import com.ait.lienzo.test.LienzoMockitoTestRunner; import org.junit.Before; import org.junit.runner.RunWith; import static org.mockito.Mockito.spy; @RunWith(LienzoMockitoTestRunner.class) public class ScenarioConfirmationPopupViewTest extends AbstractScenarioConfirmationPopupViewTest { @Before public void setup() { super.commonSetup(); popupView = spy(new ScenarioConfirmationPopupView() { { this.mainTitle = mainTitleMock; this.mainQuestion = mainQuestionMock; this.text1 = text1Mock; this.textQuestion = textQuestionMock; this.cancelButton = cancelButtonMock; this.okButton = okDeleteButtonMock; this.modal = modalMock; this.translationService = translationServiceMock; } }); } }
MidnightTinge/Yuugure
src/main/java/com/mtinge/yuugure/data/http/RenderableComment.java
package com.mtinge.yuugure.data.http; import com.squareup.moshi.Json; import lombok.AllArgsConstructor; import java.sql.Timestamp; @AllArgsConstructor public class RenderableComment { public final int id; public final Timestamp timestamp; public final SafeAccount account; @Json(name = "content_raw") public final String contentRaw; @Json(name = "content_rendered") public final String contentRendered; }
gh-determined-ai/determined
master/internal/api_tasks.go
<filename>master/internal/api_tasks.go package internal import ( "context" "fmt" "time" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" "github.com/google/uuid" "github.com/hashicorp/go-multierror" "github.com/determined-ai/determined/master/internal/api" "github.com/determined-ai/determined/master/internal/grpcutil" "github.com/determined-ai/determined/master/internal/task" "github.com/determined-ai/determined/master/pkg/model" "github.com/determined-ai/determined/proto/pkg/apiv1" ) const ( taskLogsChanBuffer = 5 taskLogsBatchSize = 1000 ) var ( taskReadyCheckLogs = "/run/determined/check_ready_logs.py" taskLogsBatchMissWaitTime = time.Second taskLogsFieldsBatchWaitTime = 5 * time.Second // Common errors. taskNotFound = status.Error(codes.NotFound, "task not found") ) func (a *apiServer) AllocationReady( ctx context.Context, req *apiv1.AllocationReadyRequest, ) (*apiv1.AllocationReadyResponse, error) { handler, err := a.allocationHandlerByID(model.AllocationID(req.AllocationId)) if err != nil { return nil, err } if err := a.ask(handler.Address(), task.AllocationReady{}, nil); err != nil { return nil, err } return &apiv1.AllocationReadyResponse{}, nil } func (a *apiServer) AllocationAllGather( ctx context.Context, req *apiv1.AllocationAllGatherRequest, ) (*apiv1.AllocationAllGatherResponse, error) { if req.AllocationId == "" { return nil, status.Error(codes.InvalidArgument, "allocation ID missing") } handler, err := a.allocationHandlerByID(model.AllocationID(req.AllocationId)) if err != nil { return nil, err } wID, err := uuid.Parse(req.RequestUuid) if err != nil { return nil, status.Error(codes.InvalidArgument, err.Error()) } var w task.AllGatherWatcher if err = a.ask(handler.Address(), task.WatchAllGather{ WatcherID: wID, NumPeers: int(req.NumPeers), Data: req.Data, }, &w); err != nil { return nil, err } defer a.m.system.TellAt(handler.Address(), task.UnwatchAllGather{WatcherID: wID}) select { case rsp := <-w.C: if rsp.Err != nil { return nil, rsp.Err } return &apiv1.AllocationAllGatherResponse{Data: rsp.Data}, nil case <-ctx.Done(): return nil, nil } } func (a *apiServer) PostAllocationProxyAddress( ctx context.Context, req *apiv1.PostAllocationProxyAddressRequest, ) (*apiv1.PostAllocationProxyAddressResponse, error) { if req.AllocationId == "" { return nil, status.Error(codes.InvalidArgument, "allocation ID missing") } handler, err := a.allocationHandlerByID(model.AllocationID(req.AllocationId)) if err != nil { return nil, err } if err := a.ask(handler.Address(), task.SetAllocationProxyAddress{ ProxyAddress: req.ProxyAddress, }, nil); err != nil { return nil, err } return &apiv1.PostAllocationProxyAddressResponse{}, nil } func (a *apiServer) TaskLogs( req *apiv1.TaskLogsRequest, resp apiv1.Determined_TaskLogsServer, ) error { if err := grpcutil.ValidateRequest( grpcutil.ValidateLimit(req.Limit), grpcutil.ValidateFollow(req.Limit, req.Follow), ); err != nil { return err } taskID := model.TaskID(req.TaskId) switch exists, err := a.m.db.CheckTaskExists(taskID); { case err != nil: return err case !exists: return taskNotFound } ctx, cancel := context.WithCancel(resp.Context()) defer cancel() res := make(chan api.BatchResult, taskLogsChanBuffer) go a.taskLogs(ctx, req, res) return processBatches(res, func(b api.Batch) error { return b.ForEach(func(i interface{}) error { pl, pErr := i.(*model.TaskLog).Proto() if pErr != nil { return pErr } return resp.Send(pl) }) }) } func (a *apiServer) taskLogs( ctx context.Context, req *apiv1.TaskLogsRequest, res chan api.BatchResult, ) { taskID := model.TaskID(req.TaskId) filters, err := constructTaskLogsFilters(req) if err != nil { res <- api.ErrBatchResult( status.Error(codes.InvalidArgument, fmt.Sprintf("unsupported filter: %s", err)), ) return } var followState interface{} fetch := func(r api.BatchRequest) (api.Batch, error) { switch { case r.Follow, r.Limit > taskLogsBatchSize: r.Limit = taskLogsBatchSize case r.Limit <= 0: return nil, nil } b, state, fErr := a.m.taskLogBackend.TaskLogs( taskID, r.Limit, filters, req.OrderBy, followState) if fErr != nil { return nil, fErr } followState = state return model.TaskLogBatch(b), nil } total, err := a.m.taskLogBackend.TaskLogsCount(taskID, filters) if err != nil { res <- api.ErrBatchResult(fmt.Errorf("getting log count from backend: %w", err)) return } effectiveLimit := api.EffectiveLimit(int(req.Limit), 0, total) api.NewBatchStreamProcessor( api.BatchRequest{Limit: effectiveLimit, Follow: req.Follow}, fetch, a.isTaskTerminalFunc(taskID, a.m.taskLogBackend.MaxTerminationDelay()), false, nil, &taskLogsBatchMissWaitTime, ).Run(ctx, res) } func constructTaskLogsFilters(req *apiv1.TaskLogsRequest) ([]api.Filter, error) { var filters []api.Filter addInFilter := func(field string, values interface{}, count int) { if values != nil && count > 0 { filters = append(filters, api.Filter{ Field: field, Operation: api.FilterOperationIn, Values: values, }) } } addInFilter("allocation_id", req.AllocationIds, len(req.AllocationIds)) addInFilter("agent_id", req.AgentIds, len(req.AgentIds)) addInFilter("container_id", req.ContainerIds, len(req.ContainerIds)) addInFilter("rank_id", req.RankIds, len(req.RankIds)) addInFilter("stdtype", req.Stdtypes, len(req.Stdtypes)) addInFilter("source", req.Sources, len(req.Sources)) addInFilter("level", func() interface{} { var levels []string for _, l := range req.Levels { levels = append(levels, model.TaskLogLevelFromProto(l)) } return levels }(), len(req.Levels)) if req.TimestampBefore != nil { if err := req.TimestampBefore.CheckValid(); err != nil { return nil, err } filters = append(filters, api.Filter{ Field: "timestamp", Operation: api.FilterOperationLessThanEqual, Values: req.TimestampBefore.AsTime(), }) } if req.TimestampAfter != nil { if err := req.TimestampAfter.CheckValid(); err != nil { return nil, err } filters = append(filters, api.Filter{ Field: "timestamp", Operation: api.FilterOperationGreaterThan, Values: req.TimestampAfter.AsTime(), }) } return filters, nil } func (a *apiServer) TaskLogsFields( req *apiv1.TaskLogsFieldsRequest, resp apiv1.Determined_TaskLogsFieldsServer, ) error { taskID := model.TaskID(req.TaskId) fetch := func(lr api.BatchRequest) (api.Batch, error) { fields, err := a.m.taskLogBackend.TaskLogsFields(taskID) return api.ToBatchOfOne(fields), err } ctx, cancel := context.WithCancel(resp.Context()) defer cancel() res := make(chan api.BatchResult) go api.NewBatchStreamProcessor( api.BatchRequest{Follow: req.Follow}, fetch, a.isTaskTerminalFunc(taskID, a.m.taskLogBackend.MaxTerminationDelay()), true, &taskLogsFieldsBatchWaitTime, &taskLogsFieldsBatchWaitTime, ).Run(ctx, res) return processBatches(res, func(b api.Batch) error { return b.ForEach(func(r interface{}) error { return resp.Send(r.(*apiv1.TaskLogsFieldsResponse)) }) }) } // isTaskTerminalFunc returns an api.TerminationCheckFn that waits for a task to finish and // optionally, additionally, waits some buffer duration to give trials a bit to finish sending // stuff after termination. func (a *apiServer) isTaskTerminalFunc( taskID model.TaskID, buffer time.Duration, ) api.TerminationCheckFn { return func() (bool, error) { switch task, err := a.m.db.TaskByID(taskID); { case err != nil: return true, err case task.EndTime != nil && task.EndTime.UTC().Add(buffer).Before(time.Now().UTC()): return true, nil default: return false, nil } } } func processBatches(res chan api.BatchResult, h func(api.Batch) error) error { var err *multierror.Error for r := range res { if r.Err() != nil { // Noting the failure but not exiting here will cause us to wait for the downstream // processor to fail from its error or continue. err = multierror.Append(err, r.Err()) continue } hErr := h(r.Batch()) if hErr != nil { // Since this is our failure, we fail and return. This should cause upstream // processses and cause downstream senders to cancel. return hErr } } return err.ErrorOrNil() } func zipBatches(res1, res2 chan api.BatchResult, z func(api.Batch, api.Batch) error) error { var err *multierror.Error for { b1, ok := <-res1 switch { case !ok: return err.ErrorOrNil() case b1.Err() != nil: // Noting the failure but not exiting here will cause us to wait for the downstream // processor to fail from its error or continue. err = multierror.Append(err, b1.Err()) continue } b2, ok := <-res2 switch { case !ok: return err.ErrorOrNil() case b2.Err() != nil: // Noting the failure but not exiting here will cause us to wait for the downstream // processor to fail from its error or continue. err = multierror.Append(err, b2.Err()) continue } if zErr := z(b1.Batch(), b2.Batch()); zErr != nil { // Since this is our failure, we fail and return. This should cause upstream // processses and cause downstream senders to cancel. return zErr } } }
OLC-Bioinformatics/olc_genomics_portal
olc_webportalv2/api/urls.py
from olc_webportalv2.api import views from django.conf.urls import url, include from django.urls import path from rest_framework.urlpatterns import format_suffix_patterns from rest_framework.schemas import get_schema_view schema_view = get_schema_view(title='API') urlpatterns = [ # REST API Stuff path('schema/', schema_view), # TODO: Enforce run name regex path('upload/<str:run_name>/<str:filename>', views.UploadView.as_view()), path('run_cowbat/<str:run_name>', views.StartCowbatView.as_view()), ] urlpatterns = format_suffix_patterns(urlpatterns)
AProgrammerFemale/Yet-another-c-framework
Configuration.hpp
//Copyright <NAME>, All Rights Reserved #pragma once #define AliceOSUnknown "This Operating System is not supported by Alice" #if defined(_WIN32) #define AliceWindows #elif defined(__ANDROID__) #define AliceAndroid #elif defined(__linux__) #define AliceLinux #elif defined(__APPLE__) #if defined(TARGET_OS_IPHONE) #define AliceiOS #elif defined(TARGET_OS_MAC) #define AliceMac #endif #endif
bohap/e-store
src/main/java/com/finki/emt/bookstore/repository/BookRepository.java
package com.finki.emt.bookstore.repository; import com.finki.emt.bookstore.domain.Book; import org.springframework.data.domain.Pageable; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.JpaSpecificationExecutor; import org.springframework.data.jpa.repository.Query; import java.util.List; import java.util.Optional; public interface BookRepository extends JpaRepository<Book, Long>, JpaSpecificationExecutor<Book> { Optional<Book> findBySlug(String slug); Optional<Book> findFirstByOrderByIdDesc(); List<Book> findDistinctByCategoriesNameIn(Pageable pageable, String... categories); Optional<Book> findBySlugAndFavoritesId(String slug, long userId); @Query("select b from Book b left join b.orders bo group by b.id order by count(b.id) DESC") List<Book> groupByOrdersCount(Pageable pageable); }
mesg-foundation/application
x/instance/internal/types/genesis.go
<gh_stars>10-100 package types import ( "fmt" "github.com/mesg-foundation/engine/ext/xvalidator" "github.com/mesg-foundation/engine/instance" ) // GenesisState - all instance state that must be provided at genesis type GenesisState struct { Instances []*instance.Instance `json:"instances" yaml:"instances" validate:"dive"` } // NewGenesisState creates a new GenesisState object func NewGenesisState(instances []*instance.Instance) GenesisState { return GenesisState{ Instances: instances, } } // DefaultGenesisState is the default GenesisState func DefaultGenesisState() GenesisState { return GenesisState{ Instances: []*instance.Instance{}, } } // ValidateGenesis validates the instance genesis parameters func ValidateGenesis(data GenesisState) error { if err := xvalidator.Struct(data); err != nil { return fmt.Errorf("failed to validate %s genesis state: %w", ModuleName, err) } return nil }
Khan/render-gateway
dist/shared/get-runtime-mode.js
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.getRuntimeMode = void 0; /** * Determine the node runtime mode. * * The mode is calculated from NODE_ENV. If NODE_ENV is not set or set to * something other than expected values, the defaultMode is returned. * * @returns {Runtime} The runtime mode of production, development, or test. */ const getRuntimeMode = defaultMode => { switch (process.env.NODE_ENV) { case "test": return "test"; case "production": case "prod": return "production"; case "development": case "dev": return "development"; default: return defaultMode; } }; exports.getRuntimeMode = getRuntimeMode; //# sourceMappingURL=get-runtime-mode.js.map
fengjijiao/java-spring-framework-learning
src/us/fjj/spring/learning/annotationdemo/UserDaoImpl.java
package us.fjj.spring.learning.annotationdemo; import org.springframework.stereotype.Repository; @Repository("userDao") public class UserDaoImpl implements UserDao { @Override public void outContent() { System.out.println("spring!!!"); } }
scala-steward/fmq
core/src/main/scala/io/fmq/frame/FrameEncoder.scala
package io.fmq.frame import java.nio.charset.StandardCharsets trait FrameEncoder[A] { def encode(value: A): Array[Byte] } object FrameEncoder { def apply[A](implicit instance: FrameEncoder[A]): FrameEncoder[A] = instance implicit val byteArrayEncoder: FrameEncoder[Array[Byte]] = identity implicit val utf8stringEncoder: FrameEncoder[String] = s => s.getBytes(StandardCharsets.UTF_8) }
bianapis/sd-corporate-treasury-v2.0
src/main/java/org/bian/dto/BQBankRatesRetrieveOutputModelBankRatesInstanceRecord.java
<filename>src/main/java/org/bian/dto/BQBankRatesRetrieveOutputModelBankRatesInstanceRecord.java package org.bian.dto; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import javax.validation.Valid; /** * BQBankRatesRetrieveOutputModelBankRatesInstanceRecord */ public class BQBankRatesRetrieveOutputModelBankRatesInstanceRecord { private String bankRateType = null; private String bankRateValue = null; private String bankRateValueDate = null; private String bankRateValueHistory = null; private String bankRateRecordReference = null; private Object bankRateRecord = null; private String bankRateRecordDate = null; /** * `status: Not Mapped` core-data-type-reference: BIAN::DataTypesLibrary::CoreDataTypes::UNCEFACT::Text general-info: The type of bank interest or currency rate that is to be applied in production (e.g. bank's consumer mortgage rate) * @return bankRateType **/ public String getBankRateType() { return bankRateType; } public void setBankRateType(String bankRateType) { this.bankRateType = bankRateType; } /** * `status: Not Mapped` core-data-type-reference: BIAN::DataTypesLibrary::CoreDataTypes::UNCEFACT::Text general-info: The current value (value range if appropriate) to apply * @return bankRateValue **/ public String getBankRateValue() { return bankRateValue; } public void setBankRateValue(String bankRateValue) { this.bankRateValue = bankRateValue; } /** * `status: Not Mapped` core-data-type-reference: BIAN::DataTypesLibrary::CoreDataTypes::UNCEFACT::DateTime general-info: The key date and time values associated with the rate * @return bankRateValueDate **/ public String getBankRateValueDate() { return bankRateValueDate; } public void setBankRateValueDate(String bankRateValueDate) { this.bankRateValueDate = bankRateValueDate; } /** * `status: Not Mapped` core-data-type-reference: BIAN::DataTypesLibrary::CoreDataTypes::UNCEFACT::Text general-info: Historical values (maintained for back testing, corrections etc.) * @return bankRateValueHistory **/ public String getBankRateValueHistory() { return bankRateValueHistory; } public void setBankRateValueHistory(String bankRateValueHistory) { this.bankRateValueHistory = bankRateValueHistory; } /** * `status: Not Mapped` core-data-type-reference: BIAN::DataTypesLibrary::CoreDataTypes::ISO20022andUNCEFACT::Identifier general-info: Reference to the distributed record containing the prevailing bank rates * @return bankRateRecordReference **/ public String getBankRateRecordReference() { return bankRateRecordReference; } public void setBankRateRecordReference(String bankRateRecordReference) { this.bankRateRecordReference = bankRateRecordReference; } /** * `status: Not Mapped` core-data-type-reference: BIAN::DataTypesLibrary::CoreDataTypes::UNCEFACT::Binary general-info: The record of the prevailing bank rates * @return bankRateRecord **/ public Object getBankRateRecord() { return bankRateRecord; } public void setBankRateRecord(Object bankRateRecord) { this.bankRateRecord = bankRateRecord; } /** * `status: Not Mapped` core-data-type-reference: BIAN::DataTypesLibrary::CoreDataTypes::UNCEFACT::DateTime general-info: Date and time stamp for rate value record * @return bankRateRecordDate **/ public String getBankRateRecordDate() { return bankRateRecordDate; } public void setBankRateRecordDate(String bankRateRecordDate) { this.bankRateRecordDate = bankRateRecordDate; } }
MayerRoman/Lagerta
core/src/test/java/org/apache/ignite/activestore/rules/FullClusterTestResourcesFactory.java
<gh_stars>1-10 /* * Copyright (c) 2017. EPAM Systems * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.activestore.rules; import java.nio.file.Paths; import org.apache.ignite.activestore.cluster.XmlOneProcessClusterManager; import org.junit.rules.RuleChain; import org.junit.rules.TemporaryFolder; import org.junit.rules.Timeout; /** * @author Aleksandr_Meterko * @since 12/14/2016 */ public class FullClusterTestResourcesFactory { private static final int PER_TEST_TIMEOUT = 2 * 60_000; private static final String REPLICATION_DIR = "replication/"; private static final String MAIN_CLUSTER_CONFIG_XML = "ignite-main-cluster-config.xml"; private static final String DR_CLUSTER_CONFIG_XML = "ignite-dr-cluster-config.xml"; private static final SuperClusterResource SUPER_CLUSTER_RESOURCE = new SuperClusterResource("superCuster"); private static final TestResources MAIN_CLUSTER_RESOURCE = new TestResources("mainCluster", 2); private static final TestResources DR_CLUSTER_RESOURCE = new TestResources("drCluster", 2); private static final TemporaryFolder FOLDER = new TemporaryFolder(Paths.get("").toAbsolutePath().toFile()); private static RuleChain allResourcesRule; static { XmlOneProcessClusterManager mainManager = new XmlOneProcessClusterManager(getConfigClassPath(MAIN_CLUSTER_CONFIG_XML)); FullClusterTestResourcesFactory.getMainClusterResource().setClusterManager(mainManager); XmlOneProcessClusterManager drManager = new XmlOneProcessClusterManager(getConfigClassPath(DR_CLUSTER_CONFIG_XML)); FullClusterTestResourcesFactory.getDrClusterResource().setClusterManager(drManager); } public static TestResources getMainClusterResource() { return MAIN_CLUSTER_RESOURCE; } public static TestResources getDrClusterResource() { return DR_CLUSTER_RESOURCE; } public static RuleChain getPerTestMethodRules() { return RuleChain .outerRule(new Timeout(PER_TEST_TIMEOUT)) .around(MAIN_CLUSTER_RESOURCE.perTestMethodCleanupRule()) .around(DR_CLUSTER_RESOURCE.perTestMethodCleanupRule()); } public static RuleChain getAllResourcesRule(int timeout) { if (allResourcesRule == null) { allResourcesRule = RuleChain .outerRule(FOLDER) .around(new Timeout(timeout)) .around(new EmbeddedKafkaRule(FOLDER, "mainKafka", 3, 2181, 9092)) .around(new EmbeddedKafkaRule(FOLDER, "drKafka", 3, 2182, 9096)) .around(SUPER_CLUSTER_RESOURCE) .around(MAIN_CLUSTER_RESOURCE) .around(DR_CLUSTER_RESOURCE); } return allResourcesRule; } private static String getConfigClassPath(String configFile) { return REPLICATION_DIR + configFile; } }
hcxxiaomo/ChCarAppNew_as
app/src/main/java/com/xiaomo/chcarappnew/adapt/CarIllegalHistoryResultInfoAdapter.java
<reponame>hcxxiaomo/ChCarAppNew_as package com.xiaomo.chcarappnew.adapt; import android.content.Context; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.BaseAdapter; import android.widget.TextView; import com.xiaomo.chcarappnew.R; import com.xiaomo.chcarappnew.util.BaseViewHolder; import com.xiaomo.db.model.CarIllegalInfo; import java.util.List; public class CarIllegalHistoryResultInfoAdapter extends BaseAdapter { private List<CarIllegalInfo> mListCarHisInfo = null; private Context mContext; public CarIllegalHistoryResultInfoAdapter( List<CarIllegalInfo> mListCarHisInfo, Context mContext) { super(); this.mListCarHisInfo = mListCarHisInfo; this.mContext = mContext; } @Override public int getCount() { return mListCarHisInfo.size(); } @Override public Object getItem(int position) { return mListCarHisInfo.get(position); } @Override public long getItemId(int position) { return position; } @Override public View getView(int position, View convertView, ViewGroup parent) { if (convertView == null) { convertView = LayoutInflater.from(mContext).inflate(R.layout.carnumber_illegal_list_item, parent,false); } CarIllegalInfo ci = mListCarHisInfo.get(position); TextView raid_carnumber = BaseViewHolder.get(convertView, R.id.raid_carnumber_illegal); TextView raid_illegalstring = BaseViewHolder.get(convertView, R.id.raid_illegalstring_illegal); TextView raid_isreport = BaseViewHolder.get(convertView, R.id.raid_isreport_illegal); TextView raid_time = BaseViewHolder.get(convertView, R.id.raid_time_illegal); raid_carnumber.setText(ci.carNumber); raid_illegalstring.setText(ci.illegalId.concat("-").concat(ci.illegalInfo)); if (ci.isReported == 1){ raid_isreport.setText("已上报"); }else { raid_isreport.setText("未上报"); } raid_time.setText(ci.createTime); return convertView; } }
bobosoft/intrepyd
intrepyd/tests/test_scr.py
<reponame>bobosoft/intrepyd """ Copyright (C) 2017 <NAME> <<EMAIL>> This file is distributed under the terms of the 3-clause BSD License. A copy of the license can be found in the root directory or at https://opensource.org/licenses/BSD-3-Clause. Author: <NAME> <<EMAIL>> Date: 27/03/2017 """ import intrepyd from intrepyd.engine import EngineResult import intrepyd.scr from . import traffic_light from . import A7E_requirements import os import unittest class TestScr(unittest.TestCase): def test_scr_01(self): ctx = intrepyd.Context() myTrafficLight = traffic_light.SimulinkCircuit(ctx, 'MyTrafficLight') myTrafficLight.mk_circuit() lightOut = myTrafficLight.outputs['traffic_light/out'] targetGreen = ctx.mk_eq(lightOut, myTrafficLight.nets['traffic_light/Green'], 'Green') targetYellow = ctx.mk_eq(lightOut, myTrafficLight.nets['traffic_light/Yellow'], 'Yellow') targetRed = ctx.mk_eq(lightOut, myTrafficLight.nets['traffic_light/Red'], 'Red') targetOff = ctx.mk_eq(lightOut, myTrafficLight.nets['traffic_light/Off'], 'Off') bmc = ctx.mk_bmc() bmc.add_target(targetGreen) bmc.add_target(targetYellow) bmc.add_target(targetRed) bmc.add_target(targetOff) bmc.set_allow_targets_at_any_depth() bmc.set_current_depth(4) res = bmc.reach_targets() self.assertEqual(res, EngineResult.REACHABLE) reachedTargets = list(bmc.get_last_reached_targets()) self.assertEqual(1, len(reachedTargets)) def test_scr_02(self): ctx = intrepyd.Context() myA7E = A7E_requirements.SimulinkCircuit(ctx, 'A7E_requirements') myA7E.mk_circuit() navigationUpdateMode = myA7E.nets['A7E_requirements/NU/Mode'] weaponDeliveryMode = myA7E.nets['A7E_requirements/WD/Mode'] aflyUpdate = myA7E.nets['A7E_requirements/NU/AflyUpd'] boc = myA7E.nets['A7E_requirements/WD/BOC'] sboc = myA7E.nets['A7E_requirements/WD/SBOC'] navAfly = ctx.mk_neq(navigationUpdateMode, aflyUpdate) wpnBoc = ctx.mk_eq(weaponDeliveryMode, boc) wpnSboc = ctx.mk_eq(weaponDeliveryMode, sboc) target1 = ctx.mk_and(navAfly, wpnBoc) target2 = ctx.mk_and(navAfly, wpnSboc) target = ctx.mk_or(target1, target2) wdm = myA7E.nets['A7E_requirements/WD/Mode'] num = myA7E.nets['A7E_requirements/NU/Mode'] br = ctx.mk_backward_reach() br.add_target(target) result = br.reach_targets() self.assertEqual(EngineResult.UNREACHABLE, result) if __name__ == '__main__': unittest.main()
T3chy/otm-sim
src/main/java/output/animation/AbstractLaneGroupInfo.java
package output.animation; import common.AbstractLaneGroup; public abstract class AbstractLaneGroupInfo implements InterfaceLaneGroupInfo { public Long lg_id; public AbstractLaneGroupInfo(AbstractLaneGroup lg) { this.lg_id = lg.id; } }
traitor6789/osm-tile-server
osm2pgsql/src/output-null.cpp
/** * SPDX-License-Identifier: GPL-2.0-or-later * * This file is part of osm2pgsql (https://osm2pgsql.org/). * * Copyright (C) 2006-2022 by the osm2pgsql developer community. * For a full list of authors see the git log. */ #include "output-null.hpp" std::shared_ptr<output_t> output_null_t::clone(std::shared_ptr<middle_query_t> const &mid, std::shared_ptr<db_copy_thread_t> const &) const { return std::make_shared<output_null_t>(mid, m_thread_pool, *get_options()); } output_null_t::output_null_t(std::shared_ptr<middle_query_t> const &mid, std::shared_ptr<thread_pool_t> thread_pool, options_t const &options) : output_t(mid, std::move(thread_pool), options) {} output_null_t::~output_null_t() = default;
annumishra123/stage
client/modules/Stories/StoriesActions.js
<filename>client/modules/Stories/StoriesActions.js import clientConfig from '../../config'; import axios from 'axios'; export function fetchStories() { return function (dispatch) { let loopbackFilter = { where: { status: true } }; let url = clientConfig.targetURL + '/catalogv2/catalogv2/ShopStories?filter=' + JSON.stringify(loopbackFilter); return axios({ url: url, timeout: 20000, method: 'get', responseType: 'json' }).then((response) => { dispatch({ type: 'FETCH_STORIES', payload: response.data }); }).catch((error) => { console.log(error); }); } } export function fetchShopCatalog(param) { return function (dispatch) { let url = `${clientConfig.targetURL}/catalogv2/catalogv2/SaleProducts/filter`; return axios({ url: url, timeout: 20000, method: 'get', responseType: 'json' }).then((response) => { dispatch({ type: 'FETCH_SHOP_CATALOG', payload: response.data }); }).catch((error) => { console.log(error); }); } } export function fetchFilterData(param) { return function (dispatch) { let url = `${clientConfig.targetURL}/catalogv2/catalogv2/SaleProducts/filter${param}`; return axios({ url: url, timeout: 20000, method: 'get', responseType: 'json' }).then((response) => { dispatch({ type: 'FETCH_SHOP_CATALOG', payload: response.data }); }).catch((error) => { console.log(error); }); } } export function createNewStore(param) { return function (dispatch) { let url = `${clientConfig.targetURL}/catalogv2/catalogv2/ShopDynamicCollections`, storeRawData = { skus: param.skuList, title: param.title } return axios({ url: url, timeout: 20000, method: 'post', responseType: 'json', data: storeRawData }).then((response) => { console.log(response.data); dispatch(createStories(param)); }).catch((error) => { console.log(error); }); } } export function createStories(param) { return function (dispatch) { let cloudinaryUrl = `${clientConfig.targetURL}/catalogv2/catalogv2/SaleProducts/upload/cloudinary`, formData = new FormData(); formData.append('image', param.image); return axios({ url: cloudinaryUrl, timeout: 20000, method: 'post', responseType: 'json', data: formData, headers: { 'Content-Type': 'multipart/form-data' } }).then((response) => { console.log(response); let imageUrl = response.data.length != 0 && response.data.map(i => Object.values(i)).toString() || ""; let url = `${clientConfig.targetURL}/catalogv2/catalogv2/ShopStories`, storiesRawData = { title: param.title, type: param.type, link: param.link, image: imageUrl, status: param.status } return axios({ url: url, timeout: 20000, method: 'post', responseType: 'json', data: storiesRawData, }).then((response) => { console.log(response.data); dispatch(fetchStories()); }).catch((error) => { console.log(error); alert('Story creation failed'); }); }).catch((error) => { console.log(error); alert('Image Upload Failed'); }); } } export function getAllStores() { return function (dispatch) { let url = `${clientConfig.targetURL}/catalogv2/catalogv2/ShopDynamicCollections`; return axios({ url: url, timeout: 20000, method: 'get', responseType: 'json' }).then(function (response) { dispatch({ type: 'FETCH_STORES', payload: response.data }) }).catch(function (error) { console.log(error); }); } } export function getAllSellers() { return function (dispatch) { let url = `/api/myaccount/profile/backend/get/sellers`; return axios({ url: url, timeout: 20000, method: 'get', responseType: 'json', headers: { "Authorization": localStorage.getItem('token') ? 'JWT ' + localStorage.getItem('token') : null } }).then(function (response) { dispatch({ type: 'FETCH_SELLERS', payload: response.data }) }).catch(function (error) { console.log(error); }); } } export function deactivateStory(id) { return function (dispatch) { let url = `${clientConfig.targetURL}/catalogv2/catalogv2/ShopStories/${id}/replace`, storiesRawData = { status: false } return axios({ url: url, timeout: 20000, method: 'post', responseType: 'json', data: storiesRawData, }).then((response) => { console.log(response.data); dispatch(fetchStories()); }).catch((error) => { console.log(error); alert('Story fails to disable'); }); } }
kernt/infrakit
vendor/github.com/spiegela/gorackhd/client/tags/get_tag_responses.go
<gh_stars>1000+ package tags // This file was generated by the swagger tool. // Editing this file might prove futile when you re-run the swagger generate command import ( "fmt" "io" "github.com/go-openapi/runtime" strfmt "github.com/go-openapi/strfmt" "github.com/spiegela/gorackhd/models" ) // GetTagReader is a Reader for the GetTag structure. type GetTagReader struct { formats strfmt.Registry } // ReadResponse reads a server response into the received o. func (o *GetTagReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { case 200: result := NewGetTagOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil case 404: result := NewGetTagNotFound() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return nil, result default: result := NewGetTagDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } if response.Code()/100 == 2 { return result, nil } return nil, result } } // NewGetTagOK creates a GetTagOK with default headers values func NewGetTagOK() *GetTagOK { return &GetTagOK{} } /*GetTagOK handles this case with default header values. Successfully retrieved information about the specified tag */ type GetTagOK struct { Payload GetTagOKBody } func (o *GetTagOK) Error() string { return fmt.Sprintf("[GET /tags/{tagName}][%d] getTagOK %+v", 200, o.Payload) } func (o *GetTagOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { // response payload if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { return err } return nil } // NewGetTagNotFound creates a GetTagNotFound with default headers values func NewGetTagNotFound() *GetTagNotFound { return &GetTagNotFound{} } /*GetTagNotFound handles this case with default header values. The tag name identifier was not found */ type GetTagNotFound struct { Payload *models.Error } func (o *GetTagNotFound) Error() string { return fmt.Sprintf("[GET /tags/{tagName}][%d] getTagNotFound %+v", 404, o.Payload) } func (o *GetTagNotFound) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { o.Payload = new(models.Error) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { return err } return nil } // NewGetTagDefault creates a GetTagDefault with default headers values func NewGetTagDefault(code int) *GetTagDefault { return &GetTagDefault{ _statusCode: code, } } /*GetTagDefault handles this case with default header values. Unexpected error */ type GetTagDefault struct { _statusCode int Payload *models.Error } // Code gets the status code for the get tag default response func (o *GetTagDefault) Code() int { return o._statusCode } func (o *GetTagDefault) Error() string { return fmt.Sprintf("[GET /tags/{tagName}][%d] getTag default %+v", o._statusCode, o.Payload) } func (o *GetTagDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { o.Payload = new(models.Error) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { return err } return nil } /*GetTagOKBody get tag o k body swagger:model GetTagOKBody */ type GetTagOKBody interface{}
yhswjtuILMARE/ILMARE
ILMARE/YHArticleWrapper.h
<gh_stars>0 // // YHArticleWrapper.h // ILMARE // // Created by yh_swjtu on 17/7/13. // Copyright © 2017年 yh_swjtu. All rights reserved. // #import <Foundation/Foundation.h> @class YHArticleInfo; @interface YHArticleWrapper : NSObject @property (nonatomic, strong) YHArticleInfo *articleInfo; @property (nonatomic, assign) CGFloat rowHeight; @end
hillbw/exi-test
java/esiaisparser/src/testGenerator/src/main/java/nl/esi/metis/testgenerator/PositionAccuracySetter.java
package nl.esi.metis.testgenerator; public interface PositionAccuracySetter { void setPositionAccuracy (TestBitVectorConstructor tbv , int from, int to); }
SusanaParker/calcentral
src/redux/actions/eftEnrollmentActions.js
import axios from 'axios'; import { FETCH_EFT_ENROLLMENT_START, FETCH_EFT_ENROLLMENT_SUCCESS, FETCH_EFT_ENROLLMENT_FAILURE, } from '../action-types'; export const fetchEftEnrollmentStart = () => ({ type: FETCH_EFT_ENROLLMENT_START, }); export const fetchEftEnrollmentSuccess = links => ({ type: FETCH_EFT_ENROLLMENT_SUCCESS, value: links, }); export const fetchEftEnrollmentFailure = error => ({ type: FETCH_EFT_ENROLLMENT_FAILURE, value: error, }); export const fetchEftEnrollment = () => { return (dispatch, getState) => { const { myEftEnrollment } = getState(); if (myEftEnrollment.loaded || myEftEnrollment.isLoading) { return new Promise((resolve, _reject) => resolve(myEftEnrollment)); } else { dispatch(fetchEftEnrollmentStart()); return axios .get(`/api/my/eft_enrollment`) .then(response => { dispatch(fetchEftEnrollmentSuccess(response.data)); }) .catch(error => { if (error.response) { const failure = { status: error.response.status, statusText: error.response.statusText, }; dispatch(fetchEftEnrollmentFailure(failure)); } }); } }; };
45258E9F/IntPTI
src/org/sosy_lab/cpachecker/cpa/smg/objects/generic/SMGEdgeHasValueTemplateWithConcreteValue.java
/* * IntPTI: integer error fixing by proper-type inference * Copyright (c) 2017. * * Open-source component: * * CPAchecker * Copyright (C) 2007-2014 <NAME> * * Guava: Google Core Libraries for Java * Copyright (C) 2010-2006 Google * * */ package org.sosy_lab.cpachecker.cpa.smg.objects.generic; import org.sosy_lab.cpachecker.cfa.types.c.CType; public interface SMGEdgeHasValueTemplateWithConcreteValue { public SMGObjectTemplate getObjectTemplate(); public int getValue(); public int getOffset(); public CType getType(); }
zhoupan/cuba
modules/gui/src/com/haulmont/cuba/gui/sys/FragmentHelper.java
/* * Copyright (c) 2008-2019 Haulmont. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.haulmont.cuba.gui.sys; import com.haulmont.cuba.core.global.BeanLocator; import com.haulmont.cuba.core.global.DevelopmentException; import com.haulmont.cuba.core.global.Scripting; import com.haulmont.cuba.gui.components.AbstractFrame; import com.haulmont.cuba.gui.components.AbstractWindow; import com.haulmont.cuba.gui.components.Fragment; import com.haulmont.cuba.gui.components.Frame; import com.haulmont.cuba.gui.components.compatibility.LegacyFragmentAdapter; import com.haulmont.cuba.gui.config.WindowAttributesProvider; import com.haulmont.cuba.gui.config.WindowInfo; import com.haulmont.cuba.gui.logging.ScreenLifeCycle; import com.haulmont.cuba.gui.screen.FrameOwner; import com.haulmont.cuba.gui.screen.MapScreenOptions; import com.haulmont.cuba.gui.screen.ScreenFragment; import com.haulmont.cuba.gui.screen.ScreenOptions; import com.haulmont.cuba.gui.xml.layout.ComponentLoader; import com.haulmont.cuba.gui.xml.layout.ScreenXmlLoader; import com.haulmont.cuba.gui.xml.layout.loaders.ComponentLoaderContext; import org.apache.commons.lang3.StringUtils; import org.dom4j.DocumentHelper; import org.dom4j.Element; import org.perf4j.StopWatch; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.annotation.ParametersAreNonnullByDefault; import javax.inject.Inject; import java.lang.reflect.InvocationTargetException; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import static com.haulmont.cuba.gui.ComponentsHelper.getFullFrameId; import static com.haulmont.cuba.gui.logging.UIPerformanceLogger.createStopWatch; import static com.haulmont.cuba.gui.screen.UiControllerUtils.fireEvent; import static org.apache.commons.lang3.reflect.ConstructorUtils.invokeConstructor; /** * Provides shared functionality for fragment initialization from XML and programmatic creation. */ @Component(FragmentHelper.NAME) @ParametersAreNonnullByDefault public class FragmentHelper { private static final Logger log = LoggerFactory.getLogger(FragmentHelper.class); @Inject protected ScreenXmlLoader screenXmlLoader; @Inject protected Scripting scripting; public static final String NAME = "cuba_FragmentHelper"; @SuppressWarnings("unchecked") public ScreenFragment createController(WindowInfo windowInfo, Fragment fragment) { Class screenClass = windowInfo.getControllerClass(); if (AbstractWindow.class.isAssignableFrom(screenClass)) { AbstractWindow legacyScreen; try { legacyScreen = (AbstractWindow) invokeConstructor(screenClass); } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException | InstantiationException e) { throw new DevelopmentException("Unable to create " + screenClass); } LegacyFragmentAdapter adapter = new LegacyFragmentAdapter(legacyScreen); legacyScreen.setFrame(fragment); adapter.setWrappedFrame(fragment); log.warn( "Fragment class '{}' should not be inherited from AbstractWindow. " + "It may cause problems with controller life cycle. " + "Fragment controllers should inherit ScreenFragment.", screenClass.getSimpleName()); return adapter; } // new screens cannot be opened in fragments if (!ScreenFragment.class.isAssignableFrom(screenClass)) { throw new IllegalStateException( String.format("Fragment controllers should inherit ScreenFragment." + " UI controller is not ScreenFragment - %s %s", windowInfo.toString(), screenClass.getSimpleName())); } ScreenFragment controller; try { controller = (ScreenFragment) invokeConstructor(screenClass); } catch (NoSuchMethodException | InstantiationException | IllegalAccessException | InvocationTargetException e) { throw new RuntimeException("Unable to create instance of screen class " + screenClass); } return controller; } public String getMessagePack(String descriptorPath) { if (descriptorPath.contains("/")) { descriptorPath = StringUtils.substring(descriptorPath, 0, descriptorPath.lastIndexOf("/")); } String messagesPack = descriptorPath.replace("/", "."); int start = messagesPack.startsWith(".") ? 1 : 0; messagesPack = messagesPack.substring(start); return messagesPack; } @SuppressWarnings("unchecked") public WindowInfo createFakeWindowInfo(String src, String fragmentId) { Element screenElement = DocumentHelper.createElement("screen"); screenElement.addAttribute("template", src); screenElement.addAttribute("id", fragmentId); Element windowElement = screenXmlLoader.load(src, fragmentId, Collections.emptyMap()); Class<? extends ScreenFragment> fragmentClass; String className = windowElement.attributeValue("class"); if (StringUtils.isNotEmpty(className)) { fragmentClass = (Class<? extends ScreenFragment>) scripting.loadClassNN(className); } else { fragmentClass = AbstractFrame.class; } return new WindowInfo(fragmentId, new WindowAttributesProvider() { @Override public WindowInfo.Type getType(WindowInfo wi) { return WindowInfo.Type.FRAGMENT; } @Override public String getTemplate(WindowInfo wi) { return src; } @Nonnull @Override public Class<? extends FrameOwner> getControllerClass(WindowInfo wi) { return fragmentClass; } @Override public WindowInfo resolve(WindowInfo windowInfo) { return windowInfo; } }, screenElement); } public static class FragmentLoaderInjectTask implements ComponentLoader.InjectTask { protected Fragment fragment; protected ScreenOptions options; protected BeanLocator beanLocator; public FragmentLoaderInjectTask(Fragment fragment, ScreenOptions options, BeanLocator beanLocator) { this.fragment = fragment; this.options = options; this.beanLocator = beanLocator; } @Override public void execute(ComponentLoader.ComponentContext windowContext, Frame window) { String loggingId = getFullFrameId(this.fragment); StopWatch injectStopWatch = createStopWatch(ScreenLifeCycle.INJECTION, loggingId); FrameOwner controller = fragment.getFrameOwner(); beanLocator.getAll(ControllerDependencyInjector.class).values() .forEach(uiControllerDependencyInjector -> uiControllerDependencyInjector.inject(new ControllerDependencyInjector.InjectionContext(controller, options)) ); injectStopWatch.stop(); } } public static class FragmentLoaderInitTask implements ComponentLoader.InitTask { protected Fragment fragment; protected ScreenOptions options; protected ComponentLoaderContext fragmentLoaderContext; protected BeanLocator beanLocator; public FragmentLoaderInitTask(Fragment fragment, ScreenOptions options, @Nullable ComponentLoaderContext fragmentLoaderContext, BeanLocator beanLocator) { this.fragment = fragment; this.options = options; this.fragmentLoaderContext = fragmentLoaderContext; this.beanLocator = beanLocator; } @Override public void execute(ComponentLoader.ComponentContext windowContext, Frame window) { String loggingId = getFullFrameId(this.fragment); StopWatch stopWatch = createStopWatch(ScreenLifeCycle.INIT, loggingId); ScreenFragment frameOwner = fragment.getFrameOwner(); fireEvent(frameOwner, ScreenFragment.InitEvent.class, new ScreenFragment.InitEvent(frameOwner, options)); // compatibility with old screens in frames if (frameOwner instanceof LegacyFragmentAdapter) { Map<String, Object> params = new HashMap<>(0); if (options instanceof MapScreenOptions) { params = ((MapScreenOptions) options).getParams(); } ((LegacyFragmentAdapter) frameOwner).init(params); } stopWatch.stop(); fireEvent(frameOwner, ScreenFragment.AfterInitEvent.class, new ScreenFragment.AfterInitEvent(frameOwner, options)); if (fragmentLoaderContext != null) { List<UiControllerProperty> properties = fragmentLoaderContext.getProperties(); if (!properties.isEmpty()) { UiControllerPropertyInjector propertyInjector = beanLocator.getPrototype(UiControllerPropertyInjector.NAME, frameOwner, properties); propertyInjector.inject(); } } FragmentContextImpl fragmentContext = (FragmentContextImpl) fragment.getContext(); fragmentContext.setInitialized(true); // fire attached if (!fragmentContext.isManualInitRequired()) { fireEvent(frameOwner, ScreenFragment.AttachEvent.class, new ScreenFragment.AttachEvent(frameOwner)); } } } }
ycdfwzy/NaiveDB
src/main/java/org/naivedb/Statement/StatementInsert.java
package org.naivedb.Statement; import org.naivedb.Database.Database; import org.naivedb.Table.Table; import org.naivedb.utils.NDException; import java.io.IOException; import java.util.ArrayList; import java.util.LinkedList; public class StatementInsert { private String targetTableName; private LinkedList<LinkedList> valueList; private LinkedList<String> attrList; // private Table targetTable; public StatementInsert(String targetTableName, LinkedList<LinkedList> valueList) { this.valueList = valueList; this.targetTableName = targetTableName; this.attrList = null; } public StatementInsert(String targetTableName, LinkedList<String> attrList, LinkedList<LinkedList> valueList) { this.targetTableName = targetTableName; this.attrList = attrList; this.valueList = valueList; } /* execute insert operation params: db: current database return: the number of inserted rows */ public ExecResult exec(Database db) throws IOException, NDException { if (db == null) throw new NDException("not using any database"); Table targetTable = db.getTable(targetTableName); if (this.attrList != null) { LinkedList<LinkedList> valueList = this.valueList; this.valueList = new LinkedList<>(); ArrayList<String> colNames = targetTable.getColNames(); for (int i = 0; i < valueList.size(); ++i) { LinkedList newCol = new LinkedList(); for (int j = 0; j < colNames.size(); ++j) newCol.add(null); this.valueList.add(newCol); } for (int j = 0; j < this.attrList.size(); ++j) { String attr = this.attrList.get(j); int idx = colNames.indexOf(attr); if (idx >= 0) { for (int i = 0; i < valueList.size(); ++i) { LinkedList values = this.valueList.get(i); values.set(idx, valueList.get(i).get(j)); } } else { throw new NDException("Unknown column name '" + attr + "'!"); } } } LinkedList<String> tableHeader = new LinkedList<>(); tableHeader.add("Update_Count"); ExecResult execResult = new ExecResult(tableHeader); int succeed = 0; // if ((int)this.valueList.get(0).get(2) == 5844) { // System.out.println(this.valueList.get(0)); // } for (LinkedList value: this.valueList) { targetTable.insert(value); succeed += 1; } // targetTable.close(); LinkedList val = new LinkedList(); val.add(succeed); execResult.insert(val); return execResult; } }
lffranca/gonga
domain/route/route_gwt_repository.go
<reponame>lffranca/gonga package route import ( "context" "github.com/lffranca/gonga/domain" ) type RGatewayRepository interface { List(ctx context.Context, gateway *domain.Gateway, size *int, offset *string, tags []*string, matchAllTags *bool) ([]*domain.Route, *domain.Option, error) }
shubhamg931/datahub
datahub-web/@datahub/shared/app/components/forms/action-drawer.js
<gh_stars>1-10 export { default } from '@datahub/shared/components/forms/action-drawer';
GrowMoi/haedus
app/controllers/api/payments_controller.rb
module Api class PaymentsController < BaseController api :POST, "/payments/tutor_basic_account", "Create tutor account by payment method" param :total, Integer param :source, String param :payment_id, String param :code_item, String param :quantity, Float param :name, String param :email, String def tutor_basic_account tutor_isValid = !params[:name].blank? && !params[:email].blank? payment_isValid = validate_params(params) product_key = Rails.application.secrets.basic_account_tutor_key product = Product.where(code: params[:code_item], key: product_key ).first isValidCode = !product.nil? if (tutor_isValid && payment_isValid && isValidCode) user = User.new(name: params[:name], email: params[:email], username: generate_username, password: <PASSWORD>, role: "tutor_familiar") if user.save #account and 1 student free product_key = Rails.application.secrets.add_client_to_tutor_key product_add_student = Product.find_by_key(product_key) payment_products = [ { total: params[:total], source: params[:source], payment_id: params[:payment_id], code_item: params[:code_item], user_id: user.id, product_id: product.id, quantity: 1 }, { total: 0, source: params[:source], payment_id: params[:payment_id], code_item: product_add_student.code, user_id: user.id, product_id: product_add_student.id, quantity: 1 } ] payment_products.map {|payment| Payment.new(payment).save } TutorMailer.payment_account(user.name, user.password, user.email).deliver_now render text: "Valid payment", status: :accepted else render text: user.errors.full_messages, status: :unprocessable_entity, errors: user.errors.full_messages end else render text: "invalid payment", status: :unprocessable_entity end end api :POST, "/payments/add_students", "Allow to add students at the tutor list by payment method" param :total, Integer param :source, String param :payment_id, String param :code_item, String param :quantity, Float param :name, String param :email, String def add_students tutor_isValid = !params[:email].blank? payment_isValid = validate_params(params) && !params[:quantity].blank? product_key = Rails.application.secrets.add_client_to_tutor_key product = Product.where(code: params[:code_item], key: product_key).first isValidCode = !product.nil? if (tutor_isValid && payment_isValid && isValidCode) user = User.find_by_email(params[:email]) if user payment = Payment.new(payment_params) payment.user = user payment.product = product payment.save TutorMailer.payment_add_student(user.name, user.email, params[:quantity]).deliver_now render text: "Valid payment", status: :accepted else render nothing: true, status: 404 end else render text: "invalid payment", status: :unprocessable_entity end end private def payment_params params.require(:payment).permit( :total, :source, :payment_id, :code_item, :user_id, :quantity ) end def generate_username username = "moi-" + params[:email].parameterize + rand(1000).to_s end def generate_password password = Devise.friendly_token.first(10) end def validate_params(params) return !params[:payment_id].blank? && !params[:code_item].blank? end end end
PaloAltoNetworks/cortex-xpanse-python-sdk
xpanse/api/issues/v1/issues.py
<reponame>PaloAltoNetworks/cortex-xpanse-python-sdk from typing import Any, Dict, List, Tuple from xpanse.const import V1_PREFIX from xpanse.endpoint import ExEndpoint from xpanse.error import UnexpectedValueError from xpanse.iterator import ExResultIterator VALID_UPDATE_TYPES = { "Assignee", "Comment", "Priority", "ProgressStatus", } class IssuesEndpoint(ExEndpoint): """ Part of the Issues V1 API. See: https://api.expander.expanse.co/api/v1/docs/ """ def list(self, **kwargs: Any) -> ExResultIterator: """ This endpoint will return a paginated list of issues. Args: limit (int, optional): Returns at most this many results in a single api call. Default is 100, max is 10000. pageToken (str, optional): Page token for pagination. contentSearch (str, optional): Returns only results whose contents match the given query. providerId (str, optional): Comma-separated string; Returns only results that were found on the given providers. If not set, results will include anything regardless of provider status. providerName (str, optional): Comma-separated string; Returns only results that were found on the given providers. If not set, results will include anything regardless of provider status. businessUnitId (str, optional): Comma-separated string; Returns only results whose Business Unit's ID falls in the provided list. NOTE: If omitted, API will return results for all Business Units the user has permissions to view. businessUnitName (str, optional): Comma-separated string; Returns only results whose Business Unit's name falls in the provided list. NOTE: If omitted, API will return results for all Business Units the user has permissions to view. assigneeUsername (str, optional): Comma-separated string; Returns only results whose assignee's username matches one of the given usernames. Use "Unassigned" to fetch issues that are not assigned to any user. issueTypeId (str, optional): Comma-separated string; Returns only results whose issue type name matches one of the given types. issueTypeName (str, optional): Comma-separated string; Returns only results whose issue type name matches one of the given types. inetSearch (str, optional): Search for results in a given IP/CIDR block using a single IP (d.d.d.d), a dashed IP range (d.d.d.d-d.d.d.d), a CIDR block (d.d.d.d/m), a partial CIDR (d.d.), or a wildcard (d.d.*.d). Returns results whose identifier includes an IP matching the query. domainSearch (str, optional): Search for a a given domain value via substring match. Returns results whose identifier includes a domain matching the query. portNumber (str, optional): Comma-separated string; Returns only results whose identifier includes one of the given port numbers. progressStatus (str, optional): Comma-separated string; Returns only results whose progress status matches one of the given values. Allowed values are `New`, `Investigating`, `InProgress`, `AcceptableRisk`, `Resolved`. activityStatus (str, optional): Comma-separated string; Returns only results whose activity status matches one of the given values. Allowed values are `Active` and `Inactive`. priority (str, optional): Comma-separated string; Returns only results whose priority matches one of the given values. Allowed values are `Critical`, `High`, `Medium`, and `Low`. tagName (str, optional): Comma-separated string; Returns only results that are associated with the provided tags. The tag name should be used here rather than the tag id. tagId (str, optional): Comma-separated string; Returns only results that are associated with the provided tags. The tag id should be used here rather than the tag name. createdAfter (str, optional): Returns only results created after the provided timestamp (YYYY-MM-DDTHH:MM:SSZ). createdBefore (str, optional): Returns only results created before the provided timestamp (YYYY-MM-DDTHH:MM:SSZ). modifiedAfter (str, optional): Returns only results modified after the provided timestamp (YYYY-MM-DDTHH:MM:SSZ). modifiedBefore (str, optional): Returns only results modified before the provided timestamp (YYYY-MM-DDTHH:MM:SSZ). countryCode (str, optional): Comma-separated string of ISO-3166 two character country codes; Returns any assets with an IP located in a country in the provided set. cloudManagementStatus (str, optional): Comma-separated string; Returns only results whose cloud management status matches one of the given values. Allowed values are `NotApplicable`, `ManagedCloud`, `UnmanagedCloud`. sort (str, optional): Sort by specified properties. Allowed values are `created`, `-created`, `modified`, `-modified`, `assigneeUsername`, `-assigneeUsername`, `priority`, `-priority`, `progressStatus`, `-progressStatus`, `activityStatus`, `-activityStatus`, `headline`, and `-headline`. Returns: :obj:`ExResultIterator`: An iterator containing all of the issues results. Results can be iterated or called by page using `<iterator>.next()`. Examples: >>> # Return all issues dumped to a list: >>> bus = client.issues.issues.list().dump() """ return ExResultIterator(self._api, f"{V1_PREFIX}/issues/issues", kwargs) def count(self, **kwargs: Any) -> Dict[str, Any]: """ Get a count of issues. Returns the total count of issues matching the provided filters, up to 10K. Args: contentSearch (str, optional): Returns only results whose contents match the given query. providerId (str, optional): Comma-separated string; Returns only results that were found on the given providers. If not set, results will include anything regardless of provider status. providerName (str, optional): Comma-separated string; Returns only results that were found on the given providers. If not set, results will include anything regardless of provider status. businessUnitId (str, optional): Comma-separated string; Returns only results whose Business Unit's ID falls in the provided list. NOTE: If omitted, API will return results for all Business Units the user has permissions to view. businessUnitName (str, optional): Comma-separated string; Returns only results whose Business Unit's name falls in the provided list. NOTE: If omitted, API will return results for all Business Units the user has permissions to view. assigneeUsername (str, optional): Comma-separated string; Returns only results whose assignee's username matches one of the given usernames. Use "Unassigned" to fetch issues that are not assigned to any user. issueTypeId (str, optional): Comma-separated string; Returns only results whose issue type name matches one of the given types. issueTypeName (str, optional): Comma-separated string; Returns only results whose issue type name matches one of the given types. inetSearch (str, optional): Search for results in a given IP/CIDR block using a single IP (d.d.d.d), a dashed IP range (d.d.d.d-d.d.d.d), a CIDR block (d.d.d.d/m), a partial CIDR (d.d.), or a wildcard (d.d.*.d). Returns results whose identifier includes an IP matching the query. domainSearch (str, optional): Search for a a given domain value via substring match. Returns results whose identifier includes a domain matching the query. portNumber (str, optional): Comma-separated string; Returns only results whose identifier includes one of the given port numbers. progressStatus (str, optional): Comma-separated string; Returns only results whose progress status matches one of the given values. Allowed values are `New`, `Investigating`, `InProgress`, `AcceptableRisk`, `Resolved`. activityStatus (str, optional): Comma-separated string; Returns only results whose activity status matches one of the given values. Allowed values are `Active` and `Inactive`. priority (str, optional): Comma-separated string; Returns only results whose priority matches one of the given values. Allowed values are `Critical`, `High`, `Medium`, and `Low`. tag (str, optional): Comma-separated string; Returns only results that are associated with the provided tags. The tag name should be used here rather than the tag id. createdAfter (str, optional): Returns only results created after the provided timestamp (YYYY-MM-DDTHH:MM:SSZ). createdBefore (str, optional): Returns only results created before the provided timestamp (YYYY-MM-DDTHH:MM:SSZ). modifiedAfter (str, optional): Returns only results modified after the provided timestamp (YYYY-MM-DDTHH:MM:SSZ). modifiedBefore (str, optional): Returns only results modified before the provided timestamp (YYYY-MM-DDTHH:MM:SSZ). cloudManagementStatus (str, optional): Comma-separated string; Returns only results whose cloud management status matches one of the given values. Allowed values are `NotApplicable`, `ManagedCloud`, `UnmanagedCloud`. Returns: :obj:`dict`: A dictionary containing count and overflow details. Examples: >>> # Return total issue count for assets with the `validated` tag. >>> bus = client.issues.issues.v1.count(tag="validated") """ return self._api.get(f"{V1_PREFIX}/issues/issues/count", params=kwargs).json() def counts(self, include="issueTypeId", **kwargs): """ Get bulk counts of issues. Returns the counts of issues matching the provided filters for each value of the fields specified by the include parameter, up to 100 per value. At this time, the only supported value for the include parameter is issueTypeId. Filters for the field specified by the include parameter are ignored when computing counts for that field. Args: include (str): Comma-separated string; Include counts for all possible values of the provided fields. Allowed values are `issueTypeId`. contentSearch (str, optional): Returns only results whose contents match the given query. providerId (str, optional): Comma-separated string; Returns only results that were found on the given providers. If not set, results will include anything regardless of provider status. providerName (str, optional): Comma-separated string; Returns only results that were found on the given providers. If not set, results will include anything regardless of provider status. businessUnitId (str, optional): Comma-separated string; Returns only results whose Business Unit's ID falls in the provided list. NOTE: If omitted, API will return results for all Business Units the user has permissions to view. businessUnitName (str, optional): Comma-separated string; Returns only results whose Business Unit's name falls in the provided list. NOTE: If omitted, API will return results for all Business Units the user has permissions to view. assigneeUsername (str, optional): Comma-separated string; Returns only results whose assignee's username matches one of the given usernames. Use "Unassigned" to fetch issues that are not assigned to any user. issueTypeId (str, optional): Comma-separated string; Returns only results whose issue type name matches one of the given types. issueTypeName (str, optional): Comma-separated string; Returns only results whose issue type name matches one of the given types. inetSearch (str, optional): Search for results in a given IP/CIDR block using a single IP (d.d.d.d), a dashed IP range (d.d.d.d-d.d.d.d), a CIDR block (d.d.d.d/m), a partial CIDR (d.d.), or a wildcard (d.d.*.d). Returns results whose identifier includes an IP matching the query. domainSearch (str, optional): Search for a a given domain value via substring match. Returns results whose identifier includes a domain matching the query. portNumber (str, optional): Comma-separated string; Returns only results whose identifier includes one of the given port numbers. progressStatus (str, optional): Comma-separated string; Returns only results whose progress status matches one of the given values. Allowed values are `New`, `Investigating`, `InProgress`, `AcceptableRisk`, `Resolved`. activityStatus (str, optional): Comma-separated string; Returns only results whose activity status matches one of the given values. Allowed values are `Active` and `Inactive`. priority (str, optional): Comma-separated string; Returns only results whose priority matches one of the given values. Allowed values are `Critical`, `High`, `Medium`, and `Low`. tag (str, optional): Comma-separated string; Returns only results that are associated with the provided tags. The tag name should be used here rather than the tag id. createdAfter (str, optional): Returns only results created after the provided timestamp (YYYY-MM-DDTHH:MM:SSZ). createdBefore (str, optional): Returns only results created before the provided timestamp (YYYY-MM-DDTHH:MM:SSZ). modifiedAfter (str, optional): Returns only results modified after the provided timestamp (YYYY-MM-DDTHH:MM:SSZ). modifiedBefore (str, optional): Returns only results modified before the provided timestamp (YYYY-MM-DDTHH:MM:SSZ). cloudManagementStatus (str, optional): Comma-separated string; Returns only results whose cloud management status matches one of the given values. Allowed values are `NotApplicable`, `ManagedCloud`, `UnmanagedCloud`. Returns: :obj:`dict`: A dictionary containing field count and overflow details. Examples: >>> # Return total issue counts >>> bus = client.issues.issues.v1.counts() """ kwargs["include"] = include return self._api.get(f"{V1_PREFIX}/issues/issues/counts", params=kwargs).json() def get(self, id, **kwargs): """ Returns the details for a given Issue. Arguments should be passed as keyword args using the names below. Args: id (str): ID of the requested issue. Returns: :obj:`dict`: A dictionary containing all of the details about the issue. Examples: >>> # Return Issue. >>> issue = client.issues.issues.get(<id>) """ return self._api.get(f"{V1_PREFIX}/issues/issues/{id}", params=kwargs).json() def get_updates(self, id: str, **kwargs: Any) -> ExResultIterator: """ Returns the issue updates for a specified issue. Arguments should be passed as keyword args using the names below. Args: id (str): ID of the requested issue. limit (int, optional): Returns at most this many results in a single api call. Default is 100, max is 10000. pageToken (str, optional): Page token for pagination. Returns: :obj:`dict`: A dictionary containing all of the details about the issue's updates. Examples: >>> # Return updates for issue and dump to list. >>> issue_updates = client.issues.issues.get_updates(<id>).dump() """ return ExResultIterator( self._api, f"{V1_PREFIX}/issues/issues/{id}/updates", kwargs ) def get_update(self, id: str, update_id: str, **kwargs: Any) -> Dict[str, Any]: """ Returns the update details for a specified issue and update. Arguments should be passed as keyword args using the names below. Args: id (str): ID of the requested issue. update_id (str): ID of the requested issue update. Returns: :obj:`dict`: A dictionary containing all of the details about the issue update. Examples: >>> # Return issue update details >>> issue_update = client.issues.issues.get_update(id=<id>, update_id=<update_id>) """ return self._api.get( f"{V1_PREFIX}/issues/issues/{id}/updates/{update_id}", params=kwargs ).json() def update( self, id: str, value: str, updateType: str, **kwargs: Any ) -> Dict[str, Any]: """ Make an update to an issue. Requires a value and an updateType. Valid updateType values include: `Assignee`, `Comment`, `Priority`, or `ProgressStatus`. Valid values will vary based on the updateType, with some being limited and others being open text fields. Assignee - `value` must be an existing username. Comment - `value` can be any plaintext. Priority - `value` can be `Low`, `Medium`, `High`, or `Critical` ProgressStatus - `value` can be `New`, `Investigating`, `InProgress`, `AcceptableRisk`, or `Resolved` Args: id (str): ID of the requested issue. value (str): The value to be updated. updateType (str): The update type to be performed. Allowed types are `Assignee`, `Comment`, `Priority`, or `ProgressStatus`. Returns: :obj:`dict`: A dictionary containing all of the details about the issue's updates. Examples: >>> # Return updates for issue and dump to list. >>> updates = client.issues.issues.update(id=<id>, value=<username>, updateType="Assignee") """ if id is None or value is None or updateType is None: raise UnexpectedValueError("A required update value was missing") if updateType not in VALID_UPDATE_TYPES: raise UnexpectedValueError( f"updateType '{updateType}' was not found in list of allowed types: {VALID_UPDATE_TYPES}" ) payload = {"value": value, "updateType": updateType} return self._api.post( f"{V1_PREFIX}/issues/issues/{id}/updates", json=payload ).json() def bulk_update( self, updates: List[Tuple[str, str, str]], **kwargs: Any ) -> Dict[str, Any]: """ Makes updates to multiple issues. Requires an id, value, and updateType for each issue. Valid updateType values include: `Assignee`, `Comment`, `Priority`, or `ProgressStatus`. Valid values will vary based on the updateType, with some being limited and others being open text fields. Assignee - `value` must be an existing username. Comment - `value` can be any plaintext. Priority - `value` can be `Low`, `Medium`, `High`, or `Critical` ProgressStatus - `value` can be `New`, `Investigating`, `InProgress`, `AcceptableRisk`, or `Resolved` Args: updates (list): A list of tuples containing (`id`, `value`, `updateType`), where id is the issue ID, value is the assigned value and updateType is one of the following values `Assignee`, `Comment`, `Priority`, `ProgressStatus`, or `ActivityStatus`. Returns: :obj:`dict`: A dictionary containing details about the bulk update's execution. Examples: >>> # Return updates for issue and dump to list. >>> response = client.issues.issues.bulk_update([('7320d57a-a39f-3bae-beae-9e56b1cf95cc', 'InProgress', 'ProgressStatus')]) """ payload = [] for items in updates: id_, value, updateType = items if id_ is None or value is None or updateType is None: raise UnexpectedValueError(f"A required update value was missing") if updateType not in VALID_UPDATE_TYPES: raise UnexpectedValueError( f"updateType '{updateType}' was not found in list of allowed types: {VALID_UPDATE_TYPES}" ) payload.append( { "issueId": id_, "updateRequest": {"value": value, "updateType": updateType}, "operationType": "Update", } ) return self._api.post( f"{V1_PREFIX}/issues/issues/bulk", json={"operations": payload} ).json() def csv(self, file: str, **kwargs: Any) -> bool: """ This endpoint will export a filtered list of issues to csv. Args: file (str): The name of the returned CSV file. limit (int, optional): Returns at most this many results in a single api call. Default is 100, max is 10000. pageToken (str, optional): Page token for pagination. contentSearch (str, optional): Returns only results whose contents match the given query. providerId (str, optional): Comma-separated string; Returns only results that were found on the given providers. If not set, results will include anything regardless of provider status. providerName (str, optional): Comma-separated string; Returns only results that were found on the given providers. If not set, results will include anything regardless of provider status. businessUnitId (str, optional): Comma-separated string; Returns only results whose Business Unit's ID falls in the provided list. NOTE: If omitted, API will return results for all Business Units the user has permissions to view. businessUnitName (str, optional): Comma-separated string; Returns only results whose Business Unit's name falls in the provided list. NOTE: If omitted, API will return results for all Business Units the user has permissions to view. assigneeUsername (str, optional): Comma-separated string; Returns only results whose assignee's username matches one of the given usernames. Use "Unassigned" to fetch issues that are not assigned to any user. issueTypeId (str, optional): Comma-separated string; Returns only results whose issue type name matches one of the given types. issueTypeName (str, optional): Comma-separated string; Returns only results whose issue type name matches one of the given types. inetSearch (str, optional): Search for results in a given IP/CIDR block using a single IP (d.d.d.d), a dashed IP range (d.d.d.d-d.d.d.d), a CIDR block (d.d.d.d/m), a partial CIDR (d.d.), or a wildcard (d.d.*.d). Returns results whose identifier includes an IP matching the query. domainSearch (str, optional): Search for a a given domain value via substring match. Returns results whose identifier includes a domain matching the query. portNumber (str, optional): Comma-separated string; Returns only results whose identifier includes one of the given port numbers. progressStatus (str, optional): Comma-separated string; Returns only results whose progress status matches one of the given values. Allowed values are `New`, `Investigating`, `InProgress`, `AcceptableRisk`, `Resolved`. activityStatus (str, optional): Comma-separated string; Returns only results whose activity status matches one of the given values. Allowed values are `Active` and `Inactive`. priority (str, optional): Comma-separated string; Returns only results whose priority matches one of the given values. Allowed values are `Critical`, `High`, `Medium`, and `Low`. tag (str, optional): Comma-separated string; Returns only results that are associated with the provided tags. The tag name should be used here rather than the tag id. createdAfter (str, optional): Returns only results created after the provided timestamp (YYYY-MM-DDTHH:MM:SSZ). createdBefore (str, optional): Returns only results created before the provided timestamp (YYYY-MM-DDTHH:MM:SSZ). modifiedAfter (str, optional): Returns only results modified after the provided timestamp (YYYY-MM-DDTHH:MM:SSZ). modifiedBefore (str, optional): Returns only results modified before the provided timestamp (YYYY-MM-DDTHH:MM:SSZ). cloudManagementStatus (str, optional): Comma-separated string; Returns only results whose cloud management status matches one of the given values. Allowed values are `NotApplicable`, `ManagedCloud`, `UnmanagedCloud`. sort (str, optional): Sort by specified properties. Allowed values are `created`, `-created`, `modified`, `-modified`, `assigneeUsername`, `-assigneeUsername`, `priority`, `-priority`, `progressStatus`, `-progressStatus`, `activityStatus`, `-activityStatus`, `headline`, and `-headline`. Returns: :obj:`boolean`: `True` if the download was successful, otherwise `False`. Examples: >>> # Download a csv named `insecure_tls.csv` for all 'InsecureTLS' issues. >>> client.issues.issues.csv(file="insecure_tls.csv", issueTypeId="InsecureTLS") """ return self._api.csv( path=f"{V1_PREFIX}/issues/issues/csv", file_=file, **kwargs )
DarkFighterLuke/WhatsappWeb4j
src/main/java/it/auties/whatsapp4j/manager/WhatsappKeysManager.java
package it.auties.whatsapp4j.manager; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectWriter; import it.auties.whatsapp4j.binary.BinaryArray; import it.auties.whatsapp4j.utils.CypherUtils; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import lombok.SneakyThrows; import lombok.experimental.Accessors; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.security.KeyPair; import java.util.Base64; import java.util.Objects; import java.util.prefs.Preferences; /** * This class is a data class used to hold the clientId, serverToken, clientToken, publicKey, privateKey, encryptionKey and macKey. * It can be serialized using Jackson and deserialized using the fromPreferences named constructor. */ @AllArgsConstructor @NoArgsConstructor @Data @Accessors(fluent = true, chain = true) public class WhatsappKeysManager { private static final ObjectWriter JACKSON_WRITER = new ObjectMapper().writer(); private static final ObjectReader JACKSON_READER = new ObjectMapper().reader(); @JsonProperty private @NotNull String clientId; @JsonProperty private @Nullable String serverToken, clientToken; @JsonProperty private @NotNull KeyPair keyPair; @JsonProperty private @Nullable BinaryArray encKey, macKey; /** * Constructs a new WhatsappKeysManager from the saved preferences on this machine * * @return a new WhatsappKeysManager with the above characteristics */ @SneakyThrows public static WhatsappKeysManager fromPreferences() { final var preferences = Preferences.userRoot().get("whatsapp", null); if(preferences != null){ return JACKSON_READER.readValue(preferences, WhatsappKeysManager.class); } return new WhatsappKeysManager(Base64.getEncoder().encodeToString(BinaryArray.random(16).data()), null, null, CypherUtils.calculateRandomKeyPair(), null, null); } /** * Checks if the serverToken and clientToken are not null * * @return true if both the serverToken and clientToken are not null */ public boolean mayRestore() { return Objects.nonNull(serverToken) && Objects.nonNull(clientToken); } /** * Initializes the serverToken, clientToken, encryptionKey and macKey with non null values */ @SneakyThrows public void initializeKeys(@NotNull String serverToken, @NotNull String clientToken, @NotNull BinaryArray encKey, @NotNull BinaryArray macKey){ encKey(encKey).macKey(macKey).serverToken(serverToken).clientToken(clientToken); Preferences.userRoot().put("whatsapp", JACKSON_WRITER.writeValueAsString(this)); } /** * Clears all the keys from this machine's memory. * This method doesn't clear this object's values. */ @SneakyThrows public void deleteKeysFromMemory(){ Preferences.userRoot().clear(); } }
chandan9369/MY-C-Code
string6.c
<filename>string6.c<gh_stars>1-10 /* this is a program to find frequency of digits used in the given string */ #include <stdio.h> #include <string.h> #include <math.h> #include <stdlib.h> int main() { /* Enter your code here. Read input from STDIN. Print output to STDOUT */ int c0 = 0, c1 = 0, c2 = 0, c3 = 0, c4 = 0, c5 = 0, c6 = 0, c7 = 0, c8 = 0, c9 = 0; char str[1000]; scanf("%[^\n]", str); for (int i = 0; str[i] != '\0'; i++) { if (str[i] == '0') { c0++; } else if (str[i] == '1') { c1++; } else if (str[i] == '2') { c2++; } else if (str[i] == '3') { c3++; } else if (str[i] == '4') { c4++; } else if (str[i] == '5') { c5++; } else if (str[i] == '6') { c6++; } else if (str[i] == '7') { c7++; } else if (str[i] == '8') { c8++; } else if (str[i] == '9') { c9++; } } printf("%d %d %d %d %d %d %d %d %d %d", c0, c1, c2, c3, c4, c5, c6, c7, c8, c9); return 0; }
platy11/ev3sim
ev3sim/robots/demo.py
<reponame>platy11/ev3sim """ Some demo code for the ev3dev simulator. This code will: * print sensor values * randomly move the motors every few seconds * correct itself if it goes over the white line. """ from ev3dev2.motor import LargeMotor from ev3dev2.sensor.lego import ColorSensor, UltrasonicSensor from ev3dev2.sensor import Sensor from ev3sim.code_helpers import is_sim if is_sim: print("Hello from the simulator!!!") else: print("Hello from the brick!!!") import random import time from collections import deque # Some behavioural constants STEP_LENGTH = (1, 3) # Move in a new direction every 1-3 seconds MOTOR_SPEEDS = (-100, 100) # Motor values are anything between -100 and 100 PRINT_TIME = 5 # Print sensor values every 5 seconds def random_between(a, b): # Returns a random float between a and b: return a + random.random() * (b-a) # Initialise all sensors. lm1 = LargeMotor(address='outB') lm2 = LargeMotor(address='outC') cs = ColorSensor(address='in2') us = UltrasonicSensor(address='in3') ir = Sensor(address='in1', driver_name='ht-nxt-ir-seek-v2') compass = Sensor(address='in4', driver_name='ht-nxt-compass') compass.command = 'BEGIN-CAL' compass.command = 'END-CAL' # This code moves in random directions, and stores the movements in a circular queue. movement_queue = deque([], maxlen=5) last_step_time = time.time() last_print_time = time.time() current_step_wait = 0 solving_white = False while True: if time.time() - last_step_time > current_step_wait: # Set some new motor speeds, and a wait time. last_step_time = time.time() m1Speed, m2Speed = random_between(*MOTOR_SPEEDS), random_between(*MOTOR_SPEEDS) current_step_wait = random_between(*STEP_LENGTH) lm1.on_for_seconds(m1Speed, current_step_wait, block=False) lm2.on_for_seconds(m2Speed, current_step_wait, block=False) movement_queue.append({ 'motor1Speed': m1Speed, 'motor2Speed': m2Speed, 'wait_time': current_step_wait, }) solving_white = False if time.time() - last_print_time > PRINT_TIME: # Print sensor values. last_print_time = time.time() print("Sensor Values") print("=============") print("Colour Sensor") print(f"RGB: {cs.rgb}") print("Ultrasonic") print(f"Distance: {us.distance_centimeters}cm") print("Infrared") print(f"Values: {[ir.value(x) for x in range(7)]}") print("Compass") print(f"Bearing: {compass.value()}") print("=============") # If we hit the white line, then reverse this ongoing action # This white detection is bad, you should replace with something better (and more stable). if sum(cs.rgb) > 600 and not solving_white: # Reverse motor speeds, for the amount so far elapsed. elapsed = time.time() - last_step_time if len(movement_queue) > 0: movement = movement_queue.pop() # Set the last_step_time to now, and make sure we wait `elapsed` seconds. last_step_time = time.time() current_step_wait = elapsed lm1.on_for_seconds(-movement['motor1Speed'], elapsed, block=False) lm2.on_for_seconds(-movement['motor2Speed'], elapsed, block=False) # Set this so we don't infinitely back up. solving_white = True
LifebookerInc/api_resource
spec/lib/typecasters/float_typecaster_spec.rb
require 'spec_helper' describe ApiResource::Typecast::FloatTypecaster do let(:klass) { ApiResource::Typecast::FloatTypecaster } context ".from_api" do it "should typecast integers, floats, strings, dates, and times reasonably" do klass.from_api(1).should eql(1.0) klass.from_api(1.0).should eql(1.0) klass.from_api("1.0").should eql(1.0) klass.from_api("1").should eql(1.0) klass.from_api("0.123").should eql(0.123) klass.from_api(false).should eql(0.0) klass.from_api(true).should eql(1.0) klass.from_api(Date.today).should eql(Date.today.year.to_f) tme = Time.now klass.from_api(tme).should eql(tme.to_f) end it "should be able to typecast any value you can think of" do klass.from_api(nil).should eql(0.0) klass.from_api("").should eql(0.0) klass.from_api(BasicObject).should eql(0.0) klass.from_api("abc").should eql(0.0) end end context ".to_api" do it "should return itself" do val = 0.6 klass.to_api(val).object_id.should eql(val.object_id) end end end
fangrx/my-blog
admin/src/main/java/com/nonelonely/admin/system/validator/NoteValid.java
package com.nonelonely.admin.system.validator; import lombok.Data; import java.io.Serializable; import javax.validation.constraints.NotEmpty; /** * @author nonelonely * @date 2020/01/01 */ @Data public class NoteValid implements Serializable { @NotEmpty(message = "标题不能为空") private String title; }
adamtootle/servizi
app/redux/reducers/update.js
const keys = require('../actions/keys'); const initialState = { updateAvailable: false, updateDownloaded: false, updateProgress: -1, }; module.exports = function updateReducer(state = initialState, action) { switch (action.type) { case keys.UPDATE_AVAILABLE: return Object.assign({}, state, { updateAvailable: true, updateInfo: action.payload }); case keys.NO_UPDATE_AVAILABLE: return Object.assign({}, state, { updateAvailable: false }); case keys.UPDATE_PROGRESS: return Object.assign({}, state, { updateProgress: action.payload }); case keys.UPDATE_DOWNLOADED: return Object.assign({}, state, { updateProgress: -1, updateDownloaded: true }); default: return state; } };
Otamaa/Antares
src/Ext/Techno/Hooks.Alpha.cpp
<reponame>Otamaa/Antares #include "Body.h" #include "../TechnoType/Body.h" #include <AlphaShapeClass.h> #include <AnimClass.h> #include <BuildingClass.h> #include <TacticalClass.h> #include <Notifications.h> AresMap<ObjectClass*, AlphaShapeClass*> TechnoExt::AlphaExt; // conventions for hashmaps like this: // the value's CTOR is the only thing allowed to .insert() or [] stuff // the value's (SD)DTOR is the only thing allowed to .erase() stuff DEFINE_HOOK(420960, AlphaShapeClass_CTOR, 5) { GET_STACK(ObjectClass*, pSource, 0x4); GET(AlphaShapeClass*, pAlpha, ECX); if(auto pOldAlpha = TechnoExt::AlphaExt.get_or_default(pSource)) { GameDelete(pOldAlpha); // pSource is erased from map } TechnoExt::AlphaExt[pSource] = pAlpha; return 0; } DEFINE_HOOK(420A71, AlphaShapeClass_CTOR_Anims, 5) { GET(AlphaShapeClass*, pThis, ESI); if(pThis->AttachedTo->WhatAmI() == AnimClass::AbsID) { PointerExpiredNotification::NotifyInvalidAnim.Add(pThis); } return 0; } DEFINE_HOOK(421730, AlphaShapeClass_SDDTOR, 8) { GET(AlphaShapeClass*, pAlpha, ECX); TechnoExt::AlphaExt.erase(pAlpha->AttachedTo); return 0; } DEFINE_HOOK(421798, AlphaShapeClass_SDDTOR_Anims, 6) { GET(AlphaShapeClass*, pThis, ESI); PointerExpiredNotification::NotifyInvalidAnim.Remove(pThis); return 0; } DEFINE_HOOK(5F3D65, ObjectClass_DTOR, 6) { GET(ObjectClass*, pThis, ESI); if(auto pAlpha = TechnoExt::AlphaExt.get_or_default(pThis)) { GameDelete(pAlpha); // pThis is erased from map } return 0; } void UpdateAlphaShape(ObjectClass* pSource) { ObjectTypeClass* pSourceType = pSource->GetType(); if(!pSourceType) { return; } const SHPStruct* pImage = pSourceType->AlphaImage; if(!pImage) { return; } CoordStruct XYZ; RectangleStruct *ScreenArea = &TacticalClass::Instance->VisibleArea; Point2D off = {ScreenArea->X - (pImage->Width / 2), ScreenArea->Y - (pImage->Height / 2)}; Point2D xy; // for animations attached to the owner object, consider // the owner object as source, so the display is refreshed // whenever the owner object moves. auto pOwner = pSource; if(auto pAnim = abstract_cast<AnimClass*>(pSource)) { if(pAnim->OwnerObject) { pOwner = pAnim->OwnerObject; } } if(auto pFoot = abstract_cast<FootClass*>(pOwner)) { if(pFoot->LastMapCoords != pFoot->CurrentMapCoords) { // we moved - need to redraw the area we were in // alas, we don't have the precise XYZ we were in, only the cell we were last seen in // so we need to add the cell's dimensions to the dirty area just in case XYZ = CellClass::Cell2Coord(pFoot->LastMapCoords); Point2D xyTL, xyBR; TacticalClass::Instance->CoordsToClient(&XYZ, &xyTL); // because the coord systems are different - xyz is x/, y\, xy is x-, y| XYZ.X += 256; XYZ.Y += 256; TacticalClass::Instance->CoordsToClient(&XYZ, &xyBR); Point2D cellDimensions = xyBR - xyTL; xy = xyTL; xy.X += cellDimensions.X / 2; xy.Y += cellDimensions.Y / 2; xy += off; RectangleStruct Dirty = {xy.X - ScreenArea->X - cellDimensions.X, xy.Y - ScreenArea->Y - cellDimensions.Y, pImage->Width + cellDimensions.X * 2, pImage->Height + cellDimensions.Y * 2}; TacticalClass::Instance->RegisterDirtyArea(Dirty, true); } } bool Inactive = pSource->InLimbo; if(auto pTechno = abstract_cast<TechnoClass*>(pSource)) { Inactive |= pTechno->Deactivated; } if(auto pBld = abstract_cast<BuildingClass*>(pSource)) { if(pBld->GetCurrentMission() != Mission::Construction) { Inactive |= !pBld->IsPowerOnline(); } } if(Inactive) { if(auto pAlpha = TechnoExt::AlphaExt.get_or_default(pSource)) { GameDelete(pAlpha); // pSource is erased from map } return; } if(Unsorted::CurrentFrame % 2) { // lag reduction - don't draw a new alpha every frame XYZ = pSource->GetCoords(); TacticalClass::Instance->CoordsToClient(&XYZ, &xy); xy += off; ++Unsorted::IKnowWhatImDoing; GameCreate<AlphaShapeClass>(pSource, xy.X, xy.Y); --Unsorted::IKnowWhatImDoing; //int Margin = 40; RectangleStruct Dirty = {xy.X - ScreenArea->X, xy.Y - ScreenArea->Y, pImage->Width, pImage->Height}; TacticalClass::Instance->RegisterDirtyArea(Dirty, true); } } DEFINE_HOOK(5F3E70, ObjectClass_Update_AlphaLight, 5) { GET(ObjectClass*, pThis, ECX); UpdateAlphaShape(pThis); return 0; } DEFINE_HOOK(423B0B, AnimClass_Update_AlphaLight, 6) { GET(AnimClass*, pThis, ESI); // flaming guys do the update via base class if(!pThis->Type->IsFlamingGuy) { UpdateAlphaShape(pThis); } return 0; } DEFINE_HOOK(420F75, AlphaLightClass_UpdateScreen_ShouldDraw, 5) { GET(AlphaShapeClass*, pAlpha, ECX); bool shouldDraw = !pAlpha->IsObjectGone; if(shouldDraw) { if(auto pTechno = abstract_cast<TechnoClass*>(pAlpha->AttachedTo)) { auto pData = TechnoExt::ExtMap.Find(pTechno); shouldDraw = pData->DrawVisualFX(); } } return shouldDraw ? 0x420F80 : 0x42132A; } DEFINE_HOOK(4210AC, AlphaLightClass_UpdateScreen_Header, 5) { GET(AlphaShapeClass*, pAlpha, EDX); GET(SHPStruct *, pImage, ECX); if(auto pTechno = abstract_cast<TechnoClass*>(pAlpha->AttachedTo)) { auto pData = TechnoExt::ExtMap.Find(pTechno); unsigned int idx = pData->AlphaFrame(pImage); R->Stack(0x0, idx); } return 0; } DEFINE_HOOK(4211AC, AlphaLightClass_UpdateScreen_Body, 8) { GET_STACK(int, AlphaLightIndex, STACK_OFFS(0xDC, 0xB4)); GET_STACK(SHPStruct*, pImage, STACK_OFFS(0xDC, 0x6C)); auto pAlpha = AlphaShapeClass::Array->Items[AlphaLightIndex]; if(auto pTechno = abstract_cast<TechnoClass*>(pAlpha->AttachedTo)) { auto pData = TechnoExt::ExtMap.Find(pTechno); unsigned int idx = pData->AlphaFrame(pImage); R->Stack(0x0, idx); } return 0; } DEFINE_HOOK(421371, TacticalClass_UpdateAlphasInRectangle_ShouldDraw, 5) { GET(int, AlphaLightIndex, EBX); auto pAlpha = AlphaShapeClass::Array->Items[AlphaLightIndex]; bool shouldDraw = !pAlpha->IsObjectGone; if(shouldDraw) { if(auto pTechno = abstract_cast<TechnoClass*>(pAlpha->AttachedTo)) { auto pData = TechnoExt::ExtMap.Find(pTechno); shouldDraw = pData->DrawVisualFX(); } } return shouldDraw ? 0 : 0x421694; } DEFINE_HOOK(42146E, TacticalClass_UpdateAlphasInRectangle_Header, 5) { GET(int, AlphaLightIndex, EBX); GET(RectangleStruct*, buffer, EDX); GET(SHPStruct*, pImage, EDI); auto pAlpha = AlphaShapeClass::Array->Items[AlphaLightIndex]; unsigned int idx = 0; if(auto pTechno = abstract_cast<TechnoClass*>(pAlpha->AttachedTo)) { auto pData = TechnoExt::ExtMap.Find(pTechno); idx = pData->AlphaFrame(pImage); } R->EAX(pImage->GetFrameBounds(*buffer, idx)); return 0x421478; } DEFINE_HOOK(42152C, TacticalClass_UpdateAlphasInRectangle_Body, 8) { GET_STACK(int, AlphaLightIndex, STACK_OFFS(0xA4, 0x78)); GET(SHPStruct*, pImage, ECX); auto pAlpha = AlphaShapeClass::Array->Items[AlphaLightIndex]; if(auto pTechno = abstract_cast<TechnoClass*>(pAlpha->AttachedTo)) { auto pData = TechnoExt::ExtMap.Find(pTechno); unsigned int idx = pData->AlphaFrame(pImage); R->Stack(0x0, idx); } return 0; } DEFINE_HOOK(71944E, TeleportLocomotionClass_ILocomotion_Process, 6) { GET(FootClass*, pObject, ECX); GET(CoordStruct*, XYZ, EDX); *XYZ = pObject->GetCoords(); R->EAX<CoordStruct*>(XYZ); if(auto pType = pObject->GetTechnoType()) { if(auto pImage = pType->AlphaImage) { Point2D xy; TacticalClass::Instance->CoordsToClient(XYZ, &xy); RectangleStruct* ScreenArea = &TacticalClass::Instance->VisibleArea; Point2D off = {ScreenArea->X - (pImage->Width / 2), ScreenArea->Y - (pImage->Height / 2)}; xy += off; RectangleStruct Dirty = { xy.X - ScreenArea->X, xy.Y - ScreenArea->Y, pImage->Width, pImage->Height}; TacticalClass::Instance->RegisterDirtyArea(Dirty, true); } } return 0x719454; }
ezibyte/EziSocial-PhotoExample
Cocos2dx-3x/PhotoExample/cocos2d/tests/cpp-tests/Classes/UITest/CocoStudioGUITest/UIImageViewTest/UIImageViewTest.cpp
<reponame>ezibyte/EziSocial-PhotoExample #include "UIImageViewTest.h" // UIImageViewTest bool UIImageViewTest::init() { if (UIScene::init()) { Size widgetSize = _widget->getContentSize(); Text* alert = Text::create("ImageView", "fonts/Marker Felt.ttf", 30); alert->setColor(Color3B(159, 168, 176)); alert->setPosition(Vec2(widgetSize.width / 2.0f, widgetSize.height / 2.0f - alert->getContentSize().height * 1.75f)); _uiLayer->addChild(alert); // Create the imageview ImageView* imageView = ImageView::create("cocosui/ccicon.png"); imageView->setPosition(Vec2(widgetSize.width / 2.0f, widgetSize.height / 2.0f)); _uiLayer->addChild(imageView); return true; } return false; } // UIImageViewTest_Scale9 bool UIImageViewTest_Scale9::init() { if (UIScene::init()) { Size widgetSize = _widget->getContentSize(); Text* alert = Text::create("ImageView scale9 render", "fonts/Marker Felt.ttf", 26); alert->setColor(Color3B(159, 168, 176)); alert->setPosition(Vec2(widgetSize.width / 2.0f, widgetSize.height / 2.0f - alert->getContentSize().height * 2.125f)); _uiLayer->addChild(alert); // Create the imageview ImageView* imageView = ImageView::create("cocosui/buttonHighlighted.png"); imageView->setScale9Enabled(true); imageView->setContentSize(Size(300, 115)); imageView->setPosition(Vec2(widgetSize.width / 2.0f, widgetSize.height / 2.0f)); _uiLayer->addChild(imageView); return true; } return false; }
gisspace/tds
tds/src/test/java/thredds/server/ncss/controller/grid/TemporalSpaceSubsettingTest.java
<gh_stars>10-100 /* * Copyright (c) 1998-2021 University Corporation for Atmospheric Research/Unidata * See LICENSE for license information. */ package thredds.server.ncss.controller.grid; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.web.WebAppConfiguration; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.MvcResult; import org.springframework.test.web.servlet.request.MockHttpServletRequestBuilder; import org.springframework.test.web.servlet.request.MockMvcRequestBuilders; import org.springframework.test.web.servlet.setup.MockMvcBuilders; import org.springframework.web.context.WebApplicationContext; import thredds.test.util.TdsTestDir; import thredds.junit4.SpringJUnit4ParameterizedClassRunner; import thredds.junit4.SpringJUnit4ParameterizedClassRunner.Parameters; import thredds.mock.params.GridPathParams; import thredds.mock.web.MockTdsContextLoader; import thredds.server.ncss.format.SupportedFormat; import ucar.nc2.Dimension; import ucar.nc2.NetcdfFile; import ucar.nc2.NetcdfFiles; import ucar.nc2.dataset.NetcdfDataset; import ucar.nc2.dataset.NetcdfDatasets; import ucar.unidata.util.test.category.NeedsCdmUnitTest; import java.io.IOException; import java.lang.invoke.MethodHandles; import java.util.Arrays; import java.util.Collection; import static org.junit.Assert.assertEquals; /** * @author mhermida */ @RunWith(SpringJUnit4ParameterizedClassRunner.class) @WebAppConfiguration @ContextConfiguration(locations = {"/WEB-INF/applicationContext.xml"}, loader = MockTdsContextLoader.class) @Category(NeedsCdmUnitTest.class) public class TemporalSpaceSubsettingTest { private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @Autowired private WebApplicationContext wac; private MockMvc mockMvc; private MockHttpServletRequestBuilder requestBuilder; private String pathInfo; private int lengthTimeDim; // Expected time dimension length @Parameters public static Collection<Object[]> getTestParameters() { return Arrays.asList(new Object[][] { {SupportedFormat.NETCDF3, 1, GridPathParams.getPathInfo().get(4), null, null, null, null, null, null}, // No // time // subset // provided {SupportedFormat.NETCDF3, 6, GridPathParams.getPathInfo().get(3), "all", null, null, null, null, null}, // Requesting // all {SupportedFormat.NETCDF3, 6, GridPathParams.getPathInfo().get(3), "", "all", null, null, null, null}, // Requesting // all {SupportedFormat.NETCDF3, 1, GridPathParams.getPathInfo().get(0), "", "2012-04-19T12:00:00.000Z", null, null, null, null}, // Single time on singleDataset {SupportedFormat.NETCDF3, 1, GridPathParams.getPathInfo().get(0), "", "2012-04-19T15:30:00.000Z", "PT3H", null, null, null}, // Single time in range with time_window {SupportedFormat.NETCDF3, 6, GridPathParams.getPathInfo().get(3), "", null, null, "2012-04-18T12:00:00.000Z", "2012-04-19T18:00:00.000Z", null}, // Time series on Best time series {SupportedFormat.NETCDF3, 5, GridPathParams.getPathInfo().get(3), "", null, null, "2012-04-18T12:00:00.000Z", null, "PT24H"}, // Time series on Best time series {SupportedFormat.NETCDF4, 1, GridPathParams.getPathInfo().get(4), null, null, null, null, null, null}, // No // time // subset // provided {SupportedFormat.NETCDF4, 6, GridPathParams.getPathInfo().get(3), "all", null, null, null, null, null}, // Requesting // all {SupportedFormat.NETCDF4, 6, GridPathParams.getPathInfo().get(3), "", "all", null, null, null, null}, // Requesting // all {SupportedFormat.NETCDF4, 1, GridPathParams.getPathInfo().get(0), "", "2012-04-19T12:00:00.000Z", null, null, null, null}, // Single time on singleDataset {SupportedFormat.NETCDF4, 1, GridPathParams.getPathInfo().get(0), "", "2012-04-19T15:30:00.000Z", "PT3H", null, null, null}, // Single time in range with time_window {SupportedFormat.NETCDF4, 6, GridPathParams.getPathInfo().get(3), "", null, null, "2012-04-18T12:00:00.000Z", "2012-04-19T18:00:00.000Z", null}, // Time series on Best time series {SupportedFormat.NETCDF4, 5, GridPathParams.getPathInfo().get(3), "", null, null, "2012-04-18T12:00:00.000Z", null, "PT24H"} // Time series on Best time series }); } public TemporalSpaceSubsettingTest(SupportedFormat format, int expectedLengthTimeDim, String pathInfoForTest, String temporal, String time, String time_window, String time_start, String time_end, String time_duration) { lengthTimeDim = expectedLengthTimeDim; pathInfo = pathInfoForTest; String servletPath = pathInfo; requestBuilder = MockMvcRequestBuilders.get(servletPath).servletPath(servletPath) .param("accept", format.getAliases().get(0)).param("temporal", temporal).param("time", time) .param("time_window", time_window).param("time_duration", time_duration).param("time_start", time_start) .param("time_end", time_end).param("var", "Temperature"); } @Before public void setUp() throws IOException { mockMvc = MockMvcBuilders.webAppContextSetup(this.wac).build(); } @Test public void shouldGetTimeRange() throws Exception { MvcResult mvc = this.mockMvc.perform(requestBuilder).andReturn(); SpatialSubsettingTest.showRequest(mvc.getRequest()); if (mvc.getResponse().getStatus() != 200) { System.out.printf("FAIL %s%n", mvc.getResponse().getContentAsString()); assert false; } // byte[] result = mvc.getResponse().getContentAsByteArray(); // ByteArrayInputStream is = new ByteArrayInputStream(result); // IO.writeToFile(is, "C:/temp/shouldGetTimeRange.nc"); // Open the binary response in memory NetcdfFile nf; NetcdfDataset ds; nf = NetcdfFiles.openInMemory("test_data.ncs", mvc.getResponse().getContentAsByteArray()); ds = NetcdfDatasets.enhance(nf, NetcdfDataset.getDefaultEnhanceMode(), null); Dimension time = ds.findDimension("time"); assertEquals(lengthTimeDim, time.getLength()); } }
klebed/esdc-ce
gui/dc/storage/views.py
from django.contrib.auth.decorators import login_required from django.views.decorators.http import require_POST from django.shortcuts import render from django.http import HttpResponse from gui.decorators import staff_required, ajax_required, admin_required, profile_required from gui.utils import collect_view_data, redirect, get_query_string from gui.dc.storage.forms import StorageForm from vms.models import NodeStorage, DcNode SR = ('node', 'storage', 'storage__owner') OB = ('node__hostname', 'zpool') @login_required @admin_required @profile_required def dc_storage_list(request): """ Storage management. """ context = collect_view_data(request, 'dc_storage_list') context['can_edit'] = can_edit = request.user.is_staff # DC owners have read-only rights context['all'] = _all = can_edit and request.GET.get('all', False) context['qs'] = get_query_string(request, all=_all).urlencode() nss = NodeStorage.objects.select_related(*SR).order_by(*OB) dc_nodes = dict([(dn.node.hostname, dn) for dn in DcNode.objects.select_related('node').filter(dc=request.dc)]) if _all: context['storages'] = storages = nss.prefetch_related('dc') # Uses set() because of optimized membership ("in") checking context['dc_storages'] = set(nss.exclude(dc=request.dc).values_list('pk', flat=True)) else: context['storages'] = storages = nss.filter(dc=request.dc) # Bug #chili-525 for ns in storages: ns.set_dc_node(dc_nodes.get(ns.node.hostname, None)) ns.set_dc(request.dc) if can_edit: context['form'] = form = StorageForm(request, storages) context['node_zpool'] = form.node_zpool else: context['node_zpool'] = {} return render(request, 'gui/dc/storage_list.html', context) @login_required @staff_required @ajax_required @require_POST def dc_storage_form(request): """ Ajax page for creating or updating storages. """ nss = NodeStorage.objects.select_related(*SR).filter(dc=request.dc).order_by(*OB) form = StorageForm(request, nss, request.POST) if form.is_valid(): status = form.save(args=(form.zpool_node,)) if status == 204: return HttpResponse(None, status=status) elif status in (200, 201): return redirect('dc_storage_list', query_string=request.GET) return render(request, 'gui/dc/storage_form.html', {'form': form})
MikaSoftware/mikathing
src/Phidget22/LogLevel.py
<reponame>MikaSoftware/mikathing<gh_stars>1-10 import sys import ctypes class LogLevel: # Critical PHIDGET_LOG_CRITICAL = 1 # Error PHIDGET_LOG_ERROR = 2 # Warning PHIDGET_LOG_WARNING = 3 # Info PHIDGET_LOG_INFO = 4 # Debug PHIDGET_LOG_DEBUG = 5 # Verbose PHIDGET_LOG_VERBOSE = 6 @classmethod def getName(self, val): if val == self.PHIDGET_LOG_CRITICAL: return "PHIDGET_LOG_CRITICAL" if val == self.PHIDGET_LOG_ERROR: return "PHIDGET_LOG_ERROR" if val == self.PHIDGET_LOG_WARNING: return "PHIDGET_LOG_WARNING" if val == self.PHIDGET_LOG_INFO: return "PHIDGET_LOG_INFO" if val == self.PHIDGET_LOG_DEBUG: return "PHIDGET_LOG_DEBUG" if val == self.PHIDGET_LOG_VERBOSE: return "PHIDGET_LOG_VERBOSE" return "<invalid enumeration value>"
mwoehlke-kitware/kwiver
vital/plugin_loader/plugin_loader_filter.h
<reponame>mwoehlke-kitware/kwiver<filename>vital/plugin_loader/plugin_loader_filter.h // This file is part of KWIVER, and is distributed under the // OSI-approved BSD 3-Clause License. See top-level LICENSE file or // https://github.com/Kitware/kwiver/blob/master/LICENSE for details. #ifndef KWIVER_VITAL_PLUGIN_LOADER_FILTER_H #define KWIVER_VITAL_PLUGIN_LOADER_FILTER_H #include <kwiversys/DynamicLoader.hxx> #include <vital/vital_types.h> #include <vital/vital_config.h> #include <memory> namespace kwiver { namespace vital { // base class of factory hierarchy class plugin_factory; class plugin_loader; using plugin_factory_handle_t = std::shared_ptr< plugin_factory >; // ----------------------------------------------------------------- /** Interface to plugin loader filters. * * */ class plugin_loader_filter { public: using DL = kwiversys::DynamicLoader; // -- CONSTRUCTORS -- plugin_loader_filter() = default; virtual ~plugin_loader_filter() = default; /** * @brief Test if plugin should be loaded. * * This method is a hook that can be implemented by a derived class * to verify that the specified plugin should be loaded. This * provides an application level approach to filter specific plugins * from a directory. The default implementation is to load all * discovered plugins. * * This method is called after the plugin is opened and the * designated initialization method has been located but not yet * called. Returning \b false from this method will result in the * library being closed without further processing. * * The library handle can be used to inspect the contents of the * plugin as needed. * * @param path File path to the plugin being loaded. * @param lib_handle Handle to library. * * @return \b true if the plugin should be loaded, \b false if plugin should not be loaded */ virtual bool load_plugin( VITAL_UNUSED path_t const& path, VITAL_UNUSED DL::LibraryHandle lib_handle ) const { return true; } /** * @brief Test if factory should be registered. * * This method is a hook that can be implemented by a derived class * to verify that the specified factory should be registered. This * provides an application level approach to filtering specific * class factories from a plugin. * * This method is called as the plugin is registering class * factories and can inspect attributes to determine if this factory * should be registered. Returning \b false will prevent this * factory from being registered with the plugin manager. * * A slight misapplication of this hook method could be to add * specific attributes to a set of factories before they are * registered. * * @param fact Pointer to the factory object. * * @return \b true if the plugin should be registered, \b false otherwise. */ virtual bool add_factory( VITAL_UNUSED plugin_factory_handle_t fact ) const { return true; } // reference to the owning loader. plugin_loader* m_loader; }; // end class plugin_loader_filter using plugin_filter_handle_t = std::shared_ptr< plugin_loader_filter >; } } // end namespace #endif //KWIVER_VITAL_PLUGIN_LOADER_FILTER_H
bobheadxi/seer
config/doc.go
<filename>config/doc.go // Package config defines server configuration and various loaders for them package config
geofffranks/cloud_controller_ng
lib/cloud_controller/blobstore/fog/fog_client.rb
<reponame>geofffranks/cloud_controller_ng require 'fileutils' require 'find' require 'mime-types' require 'cloud_controller/blobstore/fog/providers' require 'cloud_controller/blobstore/base_client' require 'cloud_controller/blobstore/fog/fog_blob' require 'cloud_controller/blobstore/fog/cdn' require 'cloud_controller/blobstore/errors' module CloudController module Blobstore class FogClient < BaseClient attr_reader :root_dir DEFAULT_BATCH_SIZE = 1000 def initialize(connection_config:, directory_key:, cdn: nil, root_dir: nil, min_size: nil, max_size: nil, storage_options: nil) @root_dir = root_dir @connection_config = connection_config @directory_key = directory_key @cdn = cdn @min_size = min_size || 0 @max_size = max_size @storage_options = storage_options end def local? @connection_config[:provider].downcase == 'local' end def exists?(key) !file(key).nil? end def download_from_blobstore(source_key, destination_path, mode: nil) FileUtils.mkdir_p(File.dirname(destination_path)) File.open(destination_path, 'wb') do |file| (@cdn || files).get(partitioned_key(source_key)) do |*chunk| file.write(chunk[0]) end file.chmod(mode) if mode end end def cp_to_blobstore(source_path, destination_key) start = Time.now.utc logger.info('blobstore.cp-start', destination_key: destination_key, source_path: source_path, bucket: @directory_key) size = -1 log_entry = 'blobstore.cp-skip' File.open(source_path) do |file| size = file.size next unless within_limits?(size) mime_type = MIME::Types.of(source_path).first.try(:content_type) options = { key: partitioned_key(destination_key), body: file, content_type: mime_type || 'application/zip', public: local? }.merge(formatted_storage_options) files.create(options) log_entry = 'blobstore.cp-finish' end duration = Time.now.utc - start logger.info(log_entry, destination_key: destination_key, duration_seconds: duration, size: size, ) end def cp_file_between_keys(source_key, destination_key) source_file = file(source_key) raise FileNotFound if source_file.nil? source_file.copy(@directory_key, partitioned_key(destination_key), formatted_storage_options) end def delete_all(page_size=DEFAULT_BATCH_SIZE) logger.info("Attempting to delete all files in #{@directory_key}/#{@root_dir} blobstore") delete_files(files_for(@root_dir), page_size) end def delete_all_in_path(path) logger.info("Attempting to delete all files in blobstore #{@directory_key} under path #{@directory_key}/#{partitioned_key(path)}") delete_files(files_for(partitioned_key(path)), DEFAULT_BATCH_SIZE) end def delete(key) blob_file = file(key) delete_file(blob_file) if blob_file end def delete_blob(blob) delete_file(blob.file) if blob.file end def blob(key) f = file(key) FogBlob.new(f, @cdn) if f end def files_for(prefix, _ignored_directory_prefixes=[]) if connection.is_a? Fog::Local::Storage::Real directory = connection.directories.get(File.join(dir.key, prefix || '')) directory ? directory.files : [] else connection.directories.get(dir.key, prefix: prefix).files end end def ensure_bucket_exists return if local? options = { max_keys: 1 } options['limit'] = 1 if connection.service == Fog::OpenStack::Storage connection.directories.get(@directory_key, options) || connection.directories.create(key: @directory_key, public: false) end private def files dir.files end def formatted_storage_options return {} unless @storage_options && @storage_options[:encryption] opts = @storage_options.dup encrypt_opt = opts.delete(:encryption) opts['x-amz-server-side-encryption'] = encrypt_opt opts end def delete_file(file) file.destroy end def delete_files(files_to_delete, page_size) if connection.respond_to?(:delete_multiple_objects) # AWS needs the file key to work; other providers with multiple delete # are currently not supported. When support is added this code may # need an update. each_slice(files_to_delete, page_size) do |file_group| connection.delete_multiple_objects(@directory_key, file_group.map(&:key)) end else files_to_delete.each { |f| delete_file(f) } end end def each_slice(files, batch_size) batch = [] files.each do |f| batch << f if batch.length == batch_size yield(batch) batch = [] end end if !batch.empty? yield(batch) end end def file(key) files.head(partitioned_key(key)) end def dir @dir ||= connection.directories.new(key: @directory_key) end def connection options = @connection_config blobstore_timeout = options.delete(:blobstore_timeout) options = options.merge(endpoint: '') if local? connection_options = options[:connection_options] || {} connection_options = connection_options.merge(read_timeout: blobstore_timeout, write_timeout: blobstore_timeout) options = options.merge(connection_options: connection_options) @connection ||= Fog::Storage.new(options) end def logger @logger ||= Steno.logger('cc.blobstore') end end end end
duvalale/Gladys
server/services/zwave/lib/commands/zwave.cancelControllerCommand.js
const logger = require('../../../../utils/logger'); /** * @description Cancel * @example * zwave.cancelControllerCommand(); */ function cancelControllerCommand() { logger.debug(`Zwave : Cancelling controller command`); this.zwave.cancelControllerCommand(); } module.exports = { cancelControllerCommand, };
Nurzamal/rest_api_docker
vbox/src/VBox/Frontends/VirtualBox/src/hostnetwork/UIHostNetworkUtils.cpp
/* $Id: UIHostNetworkUtils.cpp 66860 2017-05-10 11:51:10Z vboxsync $ */ /** @file * VBox Qt GUI - UIHostNetworkUtils namespace implementation. */ /* * Copyright (C) 2017 Oracle Corporation * * This file is part of VirtualBox Open Source Edition (OSE), as * available from http://www.virtualbox.org. This file is free software; * you can redistribute it and/or modify it under the terms of the GNU * General Public License (GPL) as published by the Free Software * Foundation, in version 2 as it comes in the "COPYING" file of the * VirtualBox OSE distribution. VirtualBox OSE is distributed in the * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind. */ #ifdef VBOX_WITH_PRECOMPILED_HEADERS # include <precomp.h> #else /* !VBOX_WITH_PRECOMPILED_HEADERS */ /* GUI includes: */ # include "UIHostNetworkUtils.h" #endif /* !VBOX_WITH_PRECOMPILED_HEADERS */ quint32 UIHostNetworkUtils::ipv4FromQStringToQuint32(const QString &strAddress) { quint32 uAddress = 0; foreach (const QString &strPart, strAddress.split('.')) { uAddress = uAddress << 8; bool fOk = false; uint uPart = strPart.toUInt(&fOk); if (fOk) uAddress += uPart; } return uAddress; } QString UIHostNetworkUtils::ipv4FromQuint32ToQString(quint32 uAddress) { QStringList address; while (uAddress) { uint uPart = uAddress & 0xFF; address.prepend(QString::number(uPart)); uAddress = uAddress >> 8; } return address.join('.'); } quint32 UIHostNetworkUtils::incrementNetworkAddress(quint32 uAddress) { return advanceNetworkAddress(uAddress, true /* forward */); } quint32 UIHostNetworkUtils::decrementNetworkAddress(quint32 uAddress) { return advanceNetworkAddress(uAddress, false /* forward */); } quint32 UIHostNetworkUtils::advanceNetworkAddress(quint32 uAddress, bool fForward) { /* Success by default: */ bool fSuccess = true; do { /* Just advance address: */ if (fForward) ++uAddress; else --uAddress; /* And treat it as success initially: */ fSuccess = true; /* Iterate the resulting bytes: */ uint uByteIndex = 0; quint32 uIterator = uAddress; while (fSuccess && uIterator) { /* Get current byte: */ const quint32 uCurrentByte = uIterator & 0xFF; /* Advance iterator early: */ uIterator = uIterator >> 8; // We know that .0. and .255. are legal these days // but still prefer to exclude them from // being proposed to an end user. /* If current byte equal to 255 * or first byte equal to 0, * let's try again: */ if ( uCurrentByte == 0xFF || (uCurrentByte == 0x00 && uByteIndex == 0)) fSuccess = false; /* Advance byte index: */ ++uByteIndex; } } while (!fSuccess); return uAddress; } QStringList UIHostNetworkUtils::makeDhcpServerProposal(const QString &strInterfaceAddress, const QString &strInterfaceMask) { /* Convert interface address/mask into digital form and calculate inverted interface mask: */ const quint32 uAddress = ipv4FromQStringToQuint32(strInterfaceAddress); const quint32 uMaskDirect = ipv4FromQStringToQuint32(strInterfaceMask); const quint32 uMaskInvert = ~uMaskDirect; //printf("Direct mask: %s (%u)\n", ipv4FromQuint32ToQString(uMaskDirect).toUtf8().constData(), uMaskDirect); //printf("Inverted mask: %s (%u)\n", ipv4FromQuint32ToQString(uMaskInvert).toUtf8().constData(), uMaskInvert); /* Split the interface address into left and right parts: */ const quint32 uPartL = uAddress & uMaskDirect; const quint32 uPartR = uAddress & uMaskInvert; //printf("Left part: %s (%u)\n", ipv4FromQuint32ToQString(uPartL).toUtf8().constData(), uPartL); //printf("Right part: %s (%u)\n", ipv4FromQuint32ToQString(uPartR).toUtf8().constData(), uPartR); /* Prepare DHCP server proposal:" */ quint32 uServerProposedAddress = 0; quint32 uServerProposedAddressL = 0; quint32 uServerProposedAddressU = 0; if (uPartR < uMaskInvert / 2) { /* Make DHCP server proposal from right scope: */ //printf("Make DHCP server proposal from right scope:\n"); uServerProposedAddress = uPartL + incrementNetworkAddress(uPartR); uServerProposedAddressL = uPartL + incrementNetworkAddress(incrementNetworkAddress(uPartR)); uServerProposedAddressU = uPartL + (uMaskInvert & 0xFEFEFEFE) /* decrementNetworkAddress(uMaskInvert) */; } else { /* Make DHCP server proposal from left scope: */ //printf("Make DHCP server proposal from left scope:\n"); uServerProposedAddress = uPartL + 1 /* incrementNetworkAddress(0) */; uServerProposedAddressL = uPartL + 2 /* incrementNetworkAddress(incrementNetworkAddress(0)) */; uServerProposedAddressU = uPartL + decrementNetworkAddress(uPartR); } //printf("DHCP server address: %s (%u)\n", ipv4FromQuint32ToQString(uServerProposedAddress).toUtf8().constData(), uServerProposedAddress); //printf("DHCP server lower address: %s (%u)\n", ipv4FromQuint32ToQString(uServerProposedAddressL).toUtf8().constData(), uServerProposedAddressL); //printf("DHCP server upper address: %s (%u)\n", ipv4FromQuint32ToQString(uServerProposedAddressU).toUtf8().constData(), uServerProposedAddressU); /* Pack and return result: */ return QStringList() << ipv4FromQuint32ToQString(uServerProposedAddress) << ipv4FromQuint32ToQString(uMaskDirect) << ipv4FromQuint32ToQString(uServerProposedAddressL) << ipv4FromQuint32ToQString(uServerProposedAddressU); }
mbitokhov/AdventOfCode2017
src/day9.cpp
<gh_stars>0 #include "includes/adventofcode.hpp" #include <iostream> #include <string> #include <functional> size_t inline day9_find_unescaped(const std::string& input, const size_t start=0, char value='>', char escape_char='!') { // Unfortantely the below code produces an error // It's a very simple answer, but it doesn't work all the time :( // size_t index{0}; // do { // index = input.find(value, index); // } while (input[index-1] != escape_char && index != std::string::npos); // return index; for(size_t index=start; index < input.size(); index++) { if(input[index] == escape_char) { index++; continue; } if(input[index] == value) { return index; } } return std::string::npos; } long day9p1(std::string input) { //remove all garbage data first size_t found = input.find('<'); size_t end; while(found != std::string::npos) { end = day9_find_unescaped(input, found+1); input.erase(found, end-found+1); found = input.find('<',found); } // std::cout << input << std::endl; long sum{0}; long inc{0}; for(auto c: input) { if(c == '{') { inc++; sum += inc; } else if (c == '}') { inc--; } } return sum; // I'm so disappointed. This is a perfect way to show off my knowledge of // recursive lambdas. But there's an obvious better solution to it. // // A parser of things // // is a function from strings // // to list of pairs // // of things and strings! // std::function<long(const std::string&)> parser; // parser = [](const std::string& input) { // // }; } long day9p2(const std::string& input) { //remove all garbage data first size_t found = input.find('<'); long sum=0; while(found != std::string::npos) { size_t index; for(index=found+1; index < input.size() && input[index] != '>'; index++) { if(input[index] == '!') { index++; continue; } sum++; } found = input.find('<',index); } return sum; }
liufuyang/scio
scalafix/input-0_8/src/main/scala/fix/FixJoinNames.scala
/* rule = ConsistenceJoinNames */ package fix import com.spotify.scio.values.SCollection object FixJoinNames { def changeJoinNames( lhs: SCollection[(Int, String)], rhs: SCollection[(Int, String)] ): SCollection[(Int, (String, Option[String]))] = { lhs.hashLeftJoin(rhs) lhs.sparseOuterJoin(rhs, 3) lhs.skewedLeftJoin(rhs) } def changeNamesAndArgs( lhs: SCollection[(Int, String)], rightHS: SCollection[(Int, String)] ): SCollection[(Int, (String, Option[String]))] = { lhs.hashLeftJoin(that = rightHS) lhs.sparseOuterJoin(that = rightHS, 3) lhs.sparseOuterJoin(that = rightHS, thatNumKeys = 3) lhs.skewedLeftJoin(that = rightHS) } def changeArgs(lhs: SCollection[(Int, String)], rightHS: SCollection[(Int, String)]): Unit = { lhs.join(that = rightHS) lhs.fullOuterJoin(that = rightHS) lhs.leftOuterJoin(that = rightHS) lhs.rightOuterJoin(that = rightHS) lhs.sparseOuterJoin(that = rightHS, thatNumKeys = 4L, fpProb = 0.01) lhs.sparseLeftOuterJoin(that = rightHS, thatNumKeys = 4) lhs.sparseRightOuterJoin(that = rightHS, thatNumKeys = 2) lhs.cogroup(that = rightHS) lhs.cogroup(that1 = rightHS, that2 = rightHS) lhs.cogroup(that1 = rightHS, that2 = rightHS, that3 = rightHS) lhs.groupWith(that = rightHS) lhs.groupWith(that1 = rightHS, that2 = rightHS) lhs.groupWith(that1 = rightHS, that2 = rightHS, that3 = rightHS) lhs.sparseLookup(that = rightHS, thisNumKeys = 1) lhs.sparseLookup(that1 = rightHS, that2 = rightHS, 3) lhs.skewedJoin(that = rightHS) lhs.skewedLeftJoin(that = rightHS) lhs.skewedFullOuterJoin(that = rightHS) lhs.hashJoin(that = rightHS) lhs.hashFullOuterJoin(that = rightHS) lhs.hashLeftJoin(that = rightHS) lhs.hashIntersectByKey(that = rightHS.map(a => a._1)) } def example(lhs: SCollection[(Int, String)], right: SCollection[String]): Unit = { def hashLeftJoin( lhs: SCollection[(Int, String)] ): SCollection[(Int, (String, Option[String]))] = lhs.hashLeftJoin(that = right.map(a => (a.length, a))) def sparseOuterJoin(a: String): Int = a.length def skewedLeftJoin(that: String): Int = that.length hashLeftJoin(lhs) sparseOuterJoin("test") skewedLeftJoin("test") } }
MIAhmed/Kujui-POC
src/app/modules/explorer/accounts/accountsExplorer.config.js
function AccountsExplorerConfig($stateProvider) { 'ngInject'; $stateProvider .state('app.accountsExplorer', { url: '/explorer/accounts', controller: 'AccountsExplorerCtrl', controllerAs: '$ctrl', templateUrl: 'modules/explorer/accounts/accountsExplorer.html', title: 'Explorer - Accounts', params: { address: "", privateKey: "" } }); }; export default AccountsExplorerConfig;
Ostoic/distant
include/distant/synch.hpp
#pragma once #include <distant/synch/mutex.hpp> #include <distant/synch/wait.hpp>
bourneagain/pythonBytes
hangman.py
import random import os import sys os.system("clear") try: readme=open("DATA/list.txt","r") except IOError: print "*** MISSING PROGRAM FILES ******* " print "\n" print " Please include files necessary for this game." print " If you had copied this from someone else, ( author :shyam :) ) include file named \"list.txt\" and place it inside \"DATA\" folder in pwd" print "\n" print "*************************************** " sys.exit() clueFlag=0 if len(sys.argv) > 1: clueFlag=1 deathCount=5 fileLines=[] dash=[] guessWordList=[] for lines in readme: fileLines.append(lines) randNameIndex=random.randint(0,len(fileLines)) guessWord=fileLines[randNameIndex].strip() for letters in guessWord: dash.append("_") print "<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<" print "<<<<<<<<<<<<<<<< <<<<<<<<<<<<<<<<<<" print "<<<<<<<<<<<<<<<< \"THE GAME BEGINS\" >>>>>>>>>>>>>>>>>>" print "<<<<<<<<<<<<<<<< <<<<<<<<<<<<<<<<<<" print "<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<" print "<<<<<<<<<<<<<<<< <<<<<<<<<<" print "<<<<<<<<<<<<<<<< This is a game of \"Hangman\", >>>>>>>>>>" print "<<<<<<<<<<<<<<<< Mission to guess the given word. >>>>>>>>>>" print "<<<<<<<<<<<<<<<< Each wrong guess moves you one step closer to being hanged. >>>>>>>>>>" print "<<<<<<<<<<<<<<< <<<<<<<<<<" print "<<<<<<<<<<<<<<< !!! You have only 5 LIVES to guess, GOOD LUCK !!! <<<<<<<<<<" print "<<<<<<<<<<<<<<< <<<<<<<<<<" print "<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<" print "\n" print "\n" print "******* GUESS THE COUNTRY/STATE/CITY NAMES of INDIA *******" print "\n" print " "+' '.join(dash) print "\n" for l in guessWord: guessWordList.append(l); if clueFlag: print guessWord while(deathCount > 0 ): if dash.count("_") == 0: os.system("clear") print "\n" print "\n" print "\n" print "\n" print "\n" print "<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<" print "<<<<<<<<<<<<<<<<<<<<<< <<<<<<<<<<<<<<<<<<<<<<<<<<<<" print "<<<<<<<<<<<<<<<<<<<<<< CONGRATULATIONS, WINNER >>>>>>>>>>>>>>>>>>>>>>>>>>>>>" print "<<<<<<<<<<<<<<<<<<<<<< <<<<<<<<<<<<<<<<<<<<<<<<<<<<" print "<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<" print "\n" print " YOU RIGHTLY GUESSED : \" " + guessWord + " \"" print "\n" print "\n" print "\n" print "\n" print " HAVE A GOOD DAY !, Bye " print "\n" break letter=raw_input("Enter your choice [ a-z ] : ") # print "YOU ENTERED " + letter index=0 if letter in guessWord: iter=-1; for ll in guessWordList: iter=int(iter)+1 if ll == letter: dash[iter]=letter os.system("clear") print "\n" print " ******************* GOOD GUESSS ****** " print " YOU ARE ALIVE STILL........." print "\n" print " "+' '.join(dash) print "\n" continue else: deathCount=deathCount-1 print "\n" print "\n" print "\n" if deathCount != 0: os.system("clear") print "*************************************************************************" print "ONE STEP CLOSER TO DEATH ......... you have " + str(deathCount) + " MORE LIVES , GUESS AGAIN !!!" print "\n" print " "+' '.join(dash) print "\n" continue else: os.system("clear") print " ************************** " print " YOU LOSER !!!!!! " print " ***************************" print " IT WAS SIMPLE TO GUESS :D " + guessWord print " =========================" print "\n" break readme.close();
reflectometry/osrefl
osrefl/viewers/plot_2d.py
<reponame>reflectometry/osrefl # -*- coding: utf-8 -*- from pylab import imshow,cm,colorbar,hot,show,xlabel,ylabel,connect, plot, figure, draw, axis, gcf,legend from numpy import ones, sum, arange, transpose, log import matplotlib.colors as colors from matplotlib.widgets import RectangleSelector from colormap import change_colormap from matplotlib.axis import XAxis, YAxis from matplotlib.backends.backend_wxagg import FigureCanvasWxAgg as Canvas from matplotlib.backends.backend_wxagg import NavigationToolbar2WxAgg from matplotlib.font_manager import fontManager, FontProperties import wx from matplotlib.image import FigureImage from matplotlib.figure import Figure from matplotlib.pyplot import figure, get_fignums from zoom_colorbar import zoom_colorbar from osrefl.loaders.reduction.cmapmenu import CMapMenu from matplotlib.cm import get_cmap import matplotlib.cbook as cbook import matplotlib #from binned_data_class3 import plottable_2d_data #from wxrebin3 import rebinned_data #import __main__ class MyNavigationToolbar(NavigationToolbar2WxAgg): """ Extend the default wx toolbar with your own event handlers """ class Cursors: HAND, POINTER, SELECT_REGION, MOVE = range(4) SET_SLICEMODE = wx.NewId() def __init__(self, canvas, cankill, data_frame_instance): NavigationToolbar2WxAgg.__init__(self, canvas) self.cursors = self.Cursors() self.data_frame_instance = data_frame_instance # for simplicity I'm going to reuse a bitmap from wx, you'll # probably want to add your own. slice_xpm_data = [ "16 16 2 1", " c None", ". c #91384E", " ", " ", " . ", " . . ", " . ", "... . . .. .. ", ".. . . . ... ", " . . . . . ", "... . . .. .. ", " ", " ", " ", " ", " ", " ", " "] slice2_xpm_data = [ "32 16 72 1", " c None", ". c None", "+ c #91384E", "@ c #DFC6CC", "# c #A3596B", "$ c #933B51", "% c #9E5063", "& c #D3B0B9", "* c #FCFBFB", "= c #C1909C", "- c #99475B", "; c #974459", "> c #CDA5AF", ", c #FDFCFC", "' c #C495A1", ") c #9A485C", "! c #943F54", "~ c #B67B8A", "{ c #F9F4F5", "] c #DBBEC5", "^ c #FBF9FA", "/ c #F2E7EA", "( c #BB8592", "_ c #C597A2", ": c #AF6E7E", "< c #F2E9EB", "[ c #F8F2F3", "} c #C18F9B", "| c #C99DA8", "1 c #AE6D7D", "2 c #BA8290", "3 c #BE8996", "4 c #9D4E62", "5 c #D0AAB4", "6 c #9D4D61", "7 c #EEE1E4", "8 c #EEE0E3", "9 c #F5EDEF", "0 c #9A495D", "a c #E8D5DA", "b c #B27483", "c c #99465B", "d c #9F5265", "e c #DDC2C9", "f c #923A50", "g c #FDFBFC", "h c #923B50", "i c #91394F", "j c #FEFEFE", "k c #DFC5CB", "l c #9B4A5E", "m c #F3E9EC", "n c #B67C8B", "o c #ECDEE1", "p c #FCFAFA", "q c #DEC4CA", "r c #C697A3", "s c #CA9FAA", "t c #B07181", "u c #EFE3E6", "v c #EBDBDF", "w c #D9BAC1", "x c #A15669", "y c #E0C7CD", "z c #C08E9B", "A c #98465A", "B c #984559", "C c #CEA6B0", "D c #FEFDFD", "E c #CAA0AB", "F c #A35A6C", "G c #D9BBC2", "................................", "................................", "................................", ".........+......................", ".........+..+...................", ".........+......................", "..@#$%&..+..+..*=-;>..,')!~{....", "..-]^/(..+..+.._:<[}..|1<{23....", "..45<....+..+..67.....%8..90....", "..abcde..+..+..fg.....hffi++....", "....jkl..+..+..67.....4m........", "..nopq-..+..+..r:<[}..stugv~....", "..wxh#y..+..+..*zABC..DE%$FG....", "................................", "................................", "................................"] slice_bitmap = wx.BitmapFromXPMData(slice2_xpm_data) self._slicemode = False self.AddCheckTool(self.SET_SLICEMODE, slice_bitmap, shortHelp='Click me', longHelp='Activate slice mode') self.Bind(wx.EVT_TOOL, self.set_slicemode, id=self.SET_SLICEMODE) def set_slicemode(self, *args): 'activate slice to rectangle mode' if self._slicemode: self._slicemode = False self.data_frame_instance.sliceplots_off() else: self._slicemode = True self.data_frame_instance.sliceplots_on() class plot_2d_data(wx.Frame): """Generic 2d plotting routine - inputs are: - data (2d array of values), - x and y extent of the data, - title of graph, and - pixel mask to be used during summation - must have same dimensions as data (only data entries corresponding to nonzero values in pixel_mask will be summed) - plot_title, x_label and y_label are added to the 2d-plot as you might expect""" def __init__(self, data, extent, caller = None, scale = 'log', window_title = 'log plot', pixel_mask = None, plot_title = "data plot", x_label = "x", y_label = "y", parent=None): wx.Frame.__init__(self, parent=None, title=window_title, pos = wx.DefaultPosition, size=wx.Size(800,600)) print parent self.extent = extent self.data = data self.caller = caller self.window_title = window_title x_range = extent[0:2] #x_range.sort() self.x_min, self.x_max = x_range y_range = extent[2:4] #y_range.sort() self.y_min, self.y_max = y_range self.plot_title = plot_title self.x_label = x_label self.y_label = y_label self.slice_xy_range = (x_range, y_range) self.ID_QUIT = wx.NewId() self.ID_LOGLIN = wx.NewId() self.ID_UPCOLLIM = wx.NewId() self.ID_LOWCOLLIM = wx.NewId() menubar = wx.MenuBar() filemenu = wx.Menu() quit = wx.MenuItem(filemenu, 1, '&Quit\tCtrl+Q') #quit.SetBitmap(wx.Bitmap('icons/exit.png')) filemenu.AppendItem(quit) plotmenu = wx.Menu() self.menu_log_lin_toggle = plotmenu.Append(self.ID_LOGLIN, 'Plot 2d data with log color scale', 'plot 2d on log scale', kind=wx.ITEM_CHECK) self.Bind(wx.EVT_MENU, self.toggle_2d_plot_scale, id=self.ID_LOGLIN) menu_upper_colormap_limit = plotmenu.Append(self.ID_UPCOLLIM, 'Set upper limit of color map', 'Set upper limit of color map') self.Bind(wx.EVT_MENU, self.set_new_upper_color_limit, id=self.ID_UPCOLLIM) menu_lower_colormap_limit = plotmenu.Append(self.ID_LOWCOLLIM, 'Set lower limit of color map', 'Set lower limit of color map') self.Bind(wx.EVT_MENU, self.set_new_lower_color_limit, id=self.ID_LOWCOLLIM) #live_on_off = wx.MenuItem(live_update, 1, '&Live Update\tCtrl+L') #quit.SetBitmap(wx.Bitmap('icons/exit.png')) #live_update.AppendItem(self.live_toggle) #self.menu_log_lin_toggle.Check(True) menubar.Append(filemenu, '&File') menubar.Append(plotmenu, '&Plot') self.SetMenuBar(menubar) self.Centre() if pixel_mask == None: pixel_mask = ones(data.shape) if pixel_mask.shape != data.shape: print "Warning: pixel mask shape incompatible with data" pixel_mask = ones(data.shape) self.pixel_mask = pixel_mask self.show_data = transpose(data.copy()) #self.minimum_intensity = self.data[pixel_mask.nonzero()].min() # correct for floating-point weirdness: self.minimum_intensity = self.data[self.data > 1e-17].min() #if scale == 'log': #self.show_data = log ( self.data.copy().T + self.minimum_intensity/2.0 ) #self._scale = 'log' #self.menu_log_lin_toggle.Check(True) #elif (scale =='lin' or scale == 'linear'): #self._scale = 'lin' #self.menu_log_lin_toggle.Check(True) #self.bin_data = caller.bin_data #self.params = caller.params #fig = figure() self.fig = Figure(dpi=80, figsize=(5,5)) #self.fig = figure() fig = self.fig self.canvas = Canvas(self, -1, self.fig) self.show_sliceplots = False # by default, sliceplots on self.sizer = wx.BoxSizer(wx.VERTICAL) self.sizer.Add(self.canvas, 1, wx.TOP | wx.LEFT | wx.EXPAND) #self.toolbar = Toolbar(self.canvas) self.toolbar = MyNavigationToolbar(self.canvas, True, self) self.toolbar.Realize() if wx.Platform == '__WXMAC__': # Mac platform (OSX 10.3, MacPython) does not seem to cope with # having a toolbar in a sizer. This work-around gets the buttons # back, but at the expense of having the toolbar at the top self.SetToolBar(self.toolbar) else: # On Windows platform, default window size is incorrect, so set # toolbar width to figure width. tw, th = self.toolbar.GetSizeTuple() fw, fh = self.canvas.GetSizeTuple() # By adding toolbar in sizer, we are able to put it at the bottom # of the frame - so appearance is closer to GTK version. # As noted above, doesn't work for Mac. self.toolbar.SetSize(wx.Size(fw, th)) self.sizer.Add(self.toolbar, 0, wx.LEFT | wx.EXPAND) self.statusbar = self.CreateStatusBar() self.statusbar.SetFieldsCount(2) self.statusbar.SetStatusWidths([-1, -2]) self.statusbar.SetStatusText("Current Position:", 0) self.canvas.mpl_connect('motion_notify_event', self.onmousemove) #self.canvas.mpl_connect('button_press_event', self.right_click_handler) #self.axes = fig.add_subplot(111) #self.axes = self.fig.gca() #ax = self.axes self.mapper = FigureImage(self.fig) #im = self.axes.pcolor(x,y,V,shading='flat') #self.mapper.add_observer(im) #self.show_data = transpose(log(self.show_data + self.minimum_intensity / 2.0)) #self.canvas.mpl_connect('pick_event', self.log_lin_select) ax = fig.add_subplot(221, label='2d_plot') fig.sx = fig.add_subplot(222, label='sx', picker=True) fig.sx.xaxis.set_picker(True) fig.sx.yaxis.set_picker(True) fig.sx.yaxis.set_ticks_position('right') fig.sx.set_zorder(1) fig.sz = fig.add_subplot(223, label='sz', picker=True) fig.sz.xaxis.set_picker(True) fig.sz.yaxis.set_picker(True) fig.sz.set_zorder(1) self.RS = RectangleSelector(ax, self.onselect, drawtype='box', useblit=True) fig.slice_overlay = None ax.set_position([0.125,0.1,0.7,0.8]) fig.cb = fig.add_axes([0.85,0.1,0.05,0.8]) fig.cb.set_zorder(2) fig.ax = ax fig.ax.set_zorder(2) self.axes = ax ax.set_title(plot_title) #connect('key_press_event', self.toggle_selector) if scale == 'log': self.show_data = log ( self.data.copy().T + self.minimum_intensity/2.0 ) self.__scale = 'log' self.fig.cb.set_xlabel('$\log_{10}I$') self.menu_log_lin_toggle.Check(True) elif (scale =='lin' or scale == 'linear'): self.__scale = 'lin' self.fig.cb.set_xlabel('$I$') self.menu_log_lin_toggle.Check(False) im = self.axes.imshow(self.show_data, interpolation='nearest', aspect='auto', origin='lower',cmap=cm.jet, extent=extent) #im = ax.imshow(data, interpolation='nearest', aspect='auto', origin='lower',cmap=cm.jet, extent=extent) fig.im = im ax.set_xlabel(x_label, size='large') ax.set_ylabel(y_label, size='large') self.toolbar.update() #zoom_colorbar(im) #fig.colorbar(im, cax=fig.cb) zoom_colorbar(im=im, cax=fig.cb) #figure(fig.number) #fig.canvas.draw() #return self.SetSizer(self.sizer) self.Fit() self.canvas.Bind(wx.EVT_RIGHT_DOWN, self.OnContext) self.Bind(wx.EVT_CLOSE, self.onExit) self.sliceplots_off() self.SetSize(wx.Size(800,600)) self.canvas.draw() return def onExit(self, event): self.Destroy() def exit(self, event): wx.GetApp().Exit() def set_new_upper_color_limit(self, evt = None): current_uplim = self.fig.im.get_clim()[1] current_lowlim = self.fig.im.get_clim()[0] dlg = wx.TextEntryDialog(None, "Change upper limit of color map (currently %f)" % current_uplim, defaultValue = "%f" % current_uplim) if dlg.ShowModal() == wx.ID_OK: new_val = dlg.GetValue() xlab = self.fig.cb.get_xlabel() ylab = self.fig.cb.get_ylabel() self.fig.im.set_clim((current_lowlim, float(new_val))) self.fig.cb.set_xlabel(xlab) self.fig.cb.set_ylabel(ylab) self.fig.canvas.draw() dlg.Destroy() def set_new_lower_color_limit(self, evt = None): current_uplim = self.fig.im.get_clim()[1] current_lowlim = self.fig.im.get_clim()[0] dlg = wx.TextEntryDialog(None, "Change lower limit of color map (currently %f)" % current_lowlim, defaultValue = "%f" % current_lowlim) if dlg.ShowModal() == wx.ID_OK: new_val = dlg.GetValue() xlab = self.fig.cb.get_xlabel() ylab = self.fig.cb.get_ylabel() self.fig.im.set_clim((float(new_val), current_uplim)) self.fig.cb.set_xlabel(xlab) self.fig.cb.set_ylabel(ylab) self.fig.canvas.draw() dlg.Destroy() def OnContext(self, evt): print self.show_sliceplots mpl_x = evt.X mpl_y = self.fig.canvas.GetSize()[1] - evt.Y mpl_mouseevent = matplotlib.backend_bases.MouseEvent('button_press_event', self.canvas, mpl_x, mpl_y, button = 3) if (mpl_mouseevent.inaxes == self.fig.ax): self.area_context(mpl_mouseevent, evt) elif ((mpl_mouseevent.inaxes == self.fig.sx or mpl_mouseevent.inaxes == self.fig.sz) and (self.show_sliceplots == True)): self.lineplot_context(mpl_mouseevent, evt) def area_context(self, mpl_mouseevent, evt): area_popup = wx.Menu() item1 = area_popup.Append(wx.ID_ANY,'&Grid on/off', 'Toggle grid lines') wx.EVT_MENU(self, item1.GetId(), self.OnGridToggle) cmapmenu = CMapMenu(self, callback = self.OnColormap, mapper=self.mapper, canvas=self.canvas) item2 = area_popup.Append(wx.ID_ANY,'&Toggle log/lin', 'Toggle log/linear scale') wx.EVT_MENU(self, item2.GetId(), lambda evt: self.toggle_log_lin(mpl_mouseevent)) item3 = area_popup.AppendMenu(wx.ID_ANY, "Colourmaps", cmapmenu) self.PopupMenu(area_popup, evt.GetPositionTuple()) def figure_list_dialog(self): figure_list = get_fignums() figure_list_names = [] for fig in figure_list: figure_list_names.append('Figure ' + str(fig)) figure_list_names.insert(0, 'New Figure') figure_list.insert(0, None) #selection_num = wx.GetSingleChoiceIndex('Choose other plot', '', other_plot_names) dlg = wx.SingleChoiceDialog(None, 'Choose figure number', '', figure_list_names) dlg.SetSize(wx.Size(640,480)) if dlg.ShowModal() == wx.ID_OK: selection_num=dlg.GetSelection() dlg.Destroy() print selection_num return figure_list[selection_num] def lineplot_context(self, mpl_mouseevent, evt): popup = wx.Menu() item1 = popup.Append(wx.ID_ANY,'&Toggle log/lin', 'Toggle log/linear scale of slices') wx.EVT_MENU(self, item1.GetId(), lambda evt: self.toggle_log_lin(mpl_mouseevent)) if mpl_mouseevent.inaxes == self.fig.sx: item2 = popup.Append(wx.ID_ANY, "Save x slice", "save this slice") wx.EVT_MENU(self, item2.GetId(), self.save_x_slice) item3 = popup.Append(wx.ID_ANY, '&Popout plot', 'Open this data in a figure window') wx.EVT_MENU(self, item3.GetId(), lambda evt: self.popout_x_slice()) elif mpl_mouseevent.inaxes == self.fig.sz: item2 = popup.Append(wx.ID_ANY, "Save y slice", "save this slice") wx.EVT_MENU(self, item2.GetId(), self.save_y_slice) item3 = popup.Append(wx.ID_ANY, '&Popout plot', 'Open this data in a new plot window') wx.EVT_MENU(self, item3.GetId(), lambda evt: self.popout_y_slice()) self.PopupMenu(popup, evt.GetPositionTuple()) def popout_y_slice(self, event=None, figure_num = None, label = None): if figure_num == None: figure_num = self.figure_list_dialog() fig = figure(figure_num) # if this is None, matplotlib automatically increments figure number to highest + 1 ax = self.fig.sz slice_desc = '\nsliceplot([%f,%f],[%f,%f])' % (self.slice_xy_range[0][0],self.slice_xy_range[0][1],self.slice_xy_range[1][0],self.slice_xy_range[1][1]) if figure_num == None: default_title = self.plot_title + slice_desc dlg = wx.TextEntryDialog(None, 'Enter title for plot', defaultValue = default_title) if dlg.ShowModal() == wx.ID_OK: title = dlg.GetValue() else: title = default_title dlg.Destroy() new_ax = fig.add_subplot(111) new_ax.set_title(title, size='large') new_ax.set_xlabel(self.x_label, size='x-large') new_ax.set_ylabel('$I_{summed}$', size='x-large') else: new_ax = fig.axes[0] if label == None: default_label = self.window_title + ': ' + self.plot_title + slice_desc dlg = wx.TextEntryDialog(None, 'Enter data label (for plot legend)', defaultValue = default_label) if dlg.ShowModal() == wx.ID_OK: label = dlg.GetValue() else: label = default_label dlg.Destroy() xy = ax.lines[0].get_data() x = xy[0] y = xy[1] new_ax.plot(x,y, label = label) font = FontProperties(size='small') lg = legend(prop=font) drag_lg = DraggableLegend(lg) drag_lg.connect() fig.canvas.draw() fig.show() def popout_x_slice(self, event=None, figure_num = None, label = None): if figure_num == None: figure_num = self.figure_list_dialog() fig = figure(figure_num) ax = self.fig.sx slice_desc = '\nsliceplot([%f,%f],[%f,%f])' % (self.slice_xy_range[0][0],self.slice_xy_range[0][1],self.slice_xy_range[1][0],self.slice_xy_range[1][1]) if figure_num == None: default_title = self.plot_title + slice_desc dlg = wx.TextEntryDialog(None, 'Enter title for plot', defaultValue = default_title) if dlg.ShowModal() == wx.ID_OK: title = dlg.GetValue() else: title = default_title dlg.Destroy() new_ax = fig.add_subplot(111) new_ax.set_title(title, size='large') new_ax.set_xlabel(self.y_label, size='x-large') new_ax.set_ylabel('$I_{summed}$', size='x-large') else: new_ax = fig.axes[0] if label == None: default_label = self.window_title + ': ' + self.plot_title + slice_desc dlg = wx.TextEntryDialog(None, 'Enter data label (for plot legend)', defaultValue = default_label) if dlg.ShowModal() == wx.ID_OK: label = dlg.GetValue() else: label = default_label dlg.Destroy() xy = ax.lines[0].get_data() x = xy[1] y = xy[0] new_ax.plot(x,y, label = label) font = FontProperties(size='small') lg = legend(prop=font) drag_lg = DraggableLegend(lg) drag_lg.connect() fig.canvas.draw() fig.show() def save_x_slice(self, event=None, outFileName=None): if outFileName == None: dlg = wx.FileDialog(None, "Save 2d data as:", '', "", "", wx.FD_SAVE) if dlg.ShowModal() == wx.ID_OK: fn = dlg.GetFilename() fd = dlg.GetDirectory() dlg.Destroy() outFileName = fd + '/' + fn outFile = open(outFileName, 'w') outFile.write('#'+self.title+'\n') outFile.write('#xmin: ' + str(self.slice_xy_range[0][0]) + '\n') outFile.write('#xmax: ' + str(self.slice_xy_range[0][1]) + '\n') outFile.write('#ymin: ' + str(self.slice_xy_range[1][0]) + '\n') outFile.write('#ymax: ' + str(self.slice_xy_range[1][1]) + '\n') outFile.write("#y\tslice_x_data\n") if not (self.slice_x_data == None): for i in range(self.slice_x_data.shape[0]): x = self.y[i] y = self.slice_x_data[i] outFile.write(str(x) + "\t" + str(y) + "\n") outFile.close() print('saved x slice in %s' % (outFileName)) return def save_y_slice(self, event=None, outFileName=None): if outFileName == None: dlg = wx.FileDialog(None, "Save 2d data as:", '', "", "", wx.FD_SAVE) if dlg.ShowModal() == wx.ID_OK: fn = dlg.GetFilename() fd = dlg.GetDirectory() dlg.Destroy() outFileName = fd + '/' + fn outFile = open(outFileName, 'w') outFile.write('#'+self.title+'\n') outFile.write('#xmin: ' + str(self.slice_xrange[0]) + '\n') outFile.write('#xmax: ' + str(self.slice_xrange[1]) + '\n') outFile.write('#ymin: ' + str(self.slice_yrange[0]) + '\n') outFile.write('#ymax: ' + str(self.slice_yrange[1]) + '\n') outFile.write("#x\tslice_y_data\n") if not (self.slice_y_data == None): for i in range(self.slice_y_data.shape[0]): x = self.x[i] y = self.slice_y_data[i] outFile.write(str(x) + "\t" + str(y) + "\n") outFile.close() print('saved y slice in %s' % (outFileName)) return def OnGridToggle(self, event): self.fig.ax.grid() self.fig.canvas.draw_idle() def OnColormap(self, name): print "Selected colormap",name self.fig.im.set_cmap(get_cmap(name)) self.fig.canvas.draw() def toggle_2d_plot_scale(self, event=None): if self.__scale == 'log': self.show_data = self.data.T self.fig.im.set_array(self.show_data) self.fig.im.autoscale() self.fig.cb.set_xlabel('$I$') self.__scale = 'lin' self.menu_log_lin_toggle.Check(False) self.statusbar.SetStatusText("%s scale" % self.__scale, 0) self.fig.canvas.draw_idle() elif self.__scale == 'lin': self.show_data = log ( self.data.copy().T + self.minimum_intensity/2.0 ) self.fig.im.set_array(self.show_data) self.fig.im.autoscale() self.fig.cb.set_xlabel('$\log_{10}I$') self.__scale = 'log' self.menu_log_lin_toggle.Check(True) self.statusbar.SetStatusText("%s scale" % self.__scale, 0) self.fig.canvas.draw_idle() def toggle_log_lin(self,event): ax = event.inaxes label = ax.get_label() if label == '2d_plot': self.toggle_2d_plot_scale() if label == 'sz': scale = ax.get_yscale() if scale == 'log': ax.set_yscale('linear') ax.figure.canvas.draw_idle() elif scale == 'linear': ax.set_yscale('log') ax.figure.canvas.draw_idle() elif label == 'sx': scale = ax.get_xscale() if scale == 'log': ax.set_xscale('linear') ax.figure.canvas.draw_idle() elif scale == 'linear': ax.set_xscale('log') ax.figure.canvas.draw_idle() def onmousemove(self,event): # the cursor position is given in the wx status bar #self.fig.gca() if event.inaxes: x, y = event.xdata, event.ydata self.statusbar.SetStatusText("%s scale x = %.3g, y = %.3g" % (self.__scale,x,y), 1) #self.statusbar.SetStatusText("y = %.3g" %y, 2) def onselect(self, eclick, erelease): x_range = [eclick.xdata, erelease.xdata] y_range = [eclick.ydata, erelease.ydata] ax = eclick.inaxes self.sliceplot((x_range, y_range), ax) print 'sliceplot(([%f,%f],[%f,%f]))' % (x_range[0],x_range[1],y_range[0],y_range[1]) def sliceplots_off(self): self.fig.ax.set_position([0.125,0.1,0.7,0.8]) self.fig.cb.set_position([0.85,0.1,0.05,0.8]) #self.fig.cb.set_visible(True) self.fig.sx.set_visible(False) self.fig.sz.set_visible(False) if self.fig.slice_overlay: self.fig.slice_overlay[0].set_visible(False) self.RS.set_active(False) self.show_sliceplots = False self.fig.canvas.draw() def sliceplots_on(self): self.fig.ax.set_position([0.125,0.53636364, 0.35227273,0.36363636]) self.fig.cb.set_position([0.49,0.53636364, 0.02, 0.36363636]) self.fig.sx.set_position([0.58,0.53636364, 0.35227273,0.36363636]) self.fig.sx.set_visible(True) self.fig.sz.set_visible(True) #self.fig.cb.set_visible(False) if self.fig.slice_overlay: self.fig.slice_overlay[0].set_visible(True) self.RS.set_active(True) self.show_sliceplots = True self.fig.canvas.draw() def toggle_sliceplots(self): """switch between views with and without slice plots""" if self.show_sliceplots == True: self.sliceplots_off() else: # self.show_sliceplots == False self.sliceplots_on() def show_slice_overlay(self, x_range, y_range, x, slice_y_data, y, slice_x_data): """sum along x and z within the box defined by qX- and qZrange. sum along qx is plotted to the right of the data, sum along qz is plotted below the data. Transparent white rectangle is overlaid on data to show summing region""" from matplotlib.ticker import FormatStrFormatter, ScalarFormatter if self.fig == None: print('No figure for this dataset is available') return fig = self.fig ax = fig.ax extent = fig.im.get_extent() if fig.slice_overlay == None: fig.slice_overlay = ax.fill([x_range[0],x_range[1],x_range[1],x_range[0]],[y_range[0],y_range[0],y_range[1],y_range[1]],fc='white', alpha=0.3) fig.ax.set_ylim(extent[2],extent[3]) else: fig.slice_overlay[0].xy = [(x_range[0],y_range[0]), (x_range[1],y_range[0]), (x_range[1],y_range[1]), (x_range[0],y_range[1])] fig.sz.clear() default_fmt = ScalarFormatter(useMathText=True) default_fmt.set_powerlimits((-2,4)) fig.sz.xaxis.set_major_formatter(default_fmt) fig.sz.yaxis.set_major_formatter(default_fmt) fig.sz.xaxis.set_major_formatter(FormatStrFormatter('%.2g')) fig.sz.set_xlim(x[0], x[-1]) fig.sz.plot(x, slice_y_data) fig.sx.clear() fig.sx.yaxis.set_major_formatter(default_fmt) fig.sx.xaxis.set_major_formatter(default_fmt) fig.sx.yaxis.set_ticks_position('right') fig.sx.yaxis.set_major_formatter(FormatStrFormatter('%.2g')) fig.sx.set_ylim(y[0], y[-1]) fig.sx.plot(slice_x_data, y) fig.im.set_extent(extent) fig.canvas.draw() def copy_intensity_range_from(self, other_plot): if isinstance(other_plot, type(self)): xlab = self.fig.cb.get_xlabel() ylab = self.fig.cb.get_ylabel() self.fig.im.set_clim(other_plot.fig.im.get_clim()) self.fig.cb.set_xlabel(xlab) self.fig.cb.set_ylabel(ylab) self.fig.canvas.draw() def sliceplot(self, xy_range, ax = None): """sum along x and z within the box defined by qX- and qZrange. sum along qx is plotted to the right of the data, sum along qz is plotted below the data. Transparent white rectangle is overlaid on data to show summing region""" self.sliceplots_on() x_range, y_range = xy_range x, slice_y_data, y, slice_x_data = self.do_xy_slice(x_range, y_range) self.x = x self.slice_y_data = slice_y_data self.y = y self.slice_x_data = slice_x_data self.slice_xy_range = xy_range self.show_slice_overlay(x_range, y_range, x, slice_y_data, y, slice_x_data) def do_xy_slice(self, x_range, y_range): """ slice up the data, once along x and once along z. returns 4 arrays: a y-axis for the x data, an x-axis for the y data.""" #params = self.params print 'doing xy slice' data = self.data pixels = self.pixel_mask # zero out any pixels in the sum that have zero in the pixel count: data[pixels == 0] = 0 normalization_matrix = ones(data.shape) normalization_matrix[pixels == 0] = 0 x_min = min(x_range) x_max = max(x_range) y_min = min(y_range) y_max = max(y_range) x_size,y_size = data.shape global_x_range = (self.x_max - self.x_min) global_y_range = (self.y_max - self.y_min) x_pixel_min = round( (x_min - self.x_min) / global_x_range * x_size ) x_pixel_max = round( (x_max - self.x_min) / global_x_range * x_size ) y_pixel_min = round( (y_min - self.y_min) / global_y_range * y_size ) y_pixel_max = round( (y_max - self.y_min) / global_y_range * y_size ) #correct any sign switches: if (x_pixel_min > x_pixel_max): new_min = x_pixel_max x_pixel_max = x_pixel_min x_pixel_min = new_min if (y_pixel_min > y_pixel_max): new_min = y_pixel_max y_pixel_max = y_pixel_min y_pixel_min = new_min new_x_min = x_pixel_min / x_size * global_x_range + self.x_min new_x_max = x_pixel_max / x_size * global_x_range + self.x_min new_y_min = y_pixel_min / y_size * global_y_range + self.y_min new_y_max = y_pixel_max / y_size * global_y_range + self.y_min x_pixel_min = int(x_pixel_min) x_pixel_max = int(x_pixel_max) y_pixel_min = int(y_pixel_min) y_pixel_max = int(y_pixel_max) y_norm_factor = sum(normalization_matrix[x_pixel_min:x_pixel_max,y_pixel_min:y_pixel_max], axis=1) x_norm_factor = sum(normalization_matrix[x_pixel_min:x_pixel_max,y_pixel_min:y_pixel_max], axis=0) # make sure the normalization has a minimum value of 1 everywhere, # to avoid divide by zero errors: y_norm_factor[y_norm_factor == 0] = 1 x_norm_factor[x_norm_factor == 0] = 1 slice_y_data = sum(data[x_pixel_min:x_pixel_max,y_pixel_min:y_pixel_max], axis=1) / y_norm_factor slice_x_data = sum(data[x_pixel_min:x_pixel_max,y_pixel_min:y_pixel_max], axis=0) / x_norm_factor #slice_y_data = slice_y_data #slice_x_data = slice_x_data x_vals = arange(slice_y_data.shape[0], dtype = 'float') / slice_y_data.shape[0] * (new_x_max - new_x_min) + new_x_min y_vals = arange(slice_x_data.shape[0], dtype = 'float') / slice_x_data.shape[0] * (new_y_max - new_y_min) + new_y_min return x_vals, slice_y_data, y_vals, slice_x_data class DraggableLegend: lock = None # only one can be animated at a time def __init__(self, leg): self.leg = leg self.frame = leg.get_frame() self.figure = self.leg.figure self.axes = self.leg.parent self.press = None self.background = None def connect(self): 'connect to all the events we need' self.cidpress = self.figure.canvas.mpl_connect( 'button_press_event', self.on_press) self.cidrelease = self.figure.canvas.mpl_connect( 'button_release_event', self.on_release) self.cidmotion = self.figure.canvas.mpl_connect( 'motion_notify_event', self.on_motion) def on_press(self, event): 'on button press we will see if the mouse is over us and store some data' if event.inaxes != self.axes: return if DraggableLegend.lock is not None: return contains, attrd = self.frame.contains(event) if not contains: return #bbox = self.leg.get_bbox_to_anchor().transformed(self.axes.transData.inverted()) bbox = self.leg.get_bbox_to_anchor() #print 'event contains', self.leg.get_window_extent() self.press = bbox, event.x, event.y DraggableLegend.lock = self self.event = event # draw everything but the selected legend and store the pixel buffer canvas = self.leg.figure.canvas axes = self.axes self.leg.set_animated(True) canvas.draw() self.background = canvas.copy_from_bbox(self.axes.bbox) # now redraw just the rectangle axes.draw_artist(self.leg) # and blit just the redrawn area canvas.blit(axes.bbox) def on_motion(self, event): 'on motion we will move the legend if the mouse is over us' if DraggableLegend.lock is not self: return if event.inaxes != self.axes: return bbox, xpress, ypress = self.press dx = event.x - xpress dy = event.y - ypress new_bbox = (bbox.translated(dx, dy)).transformed(self.axes.transAxes.inverted()) self.leg.set_bbox_to_anchor(new_bbox, transform = self.axes.transAxes) #new_bbox = bbox.translated(dx, dy) #self.leg.set_bbox_to_anchor(new_bbox, transform = self.transform) canvas = self.figure.canvas axes = self.axes # restore the background region canvas.restore_region(self.background) # redraw just the current rectangle axes.draw_artist(self.leg) # blit just the redrawn area canvas.blit(axes.bbox) def on_release(self, event): 'on release we reset the press data' if DraggableLegend.lock is not self: return self.press = None DraggableLegend.lock = None # turn off the rect animation property and reset the background self.leg.set_animated(False) self.background = None # redraw the full figure self.figure.canvas.draw() def disconnect(self): 'disconnect all the stored connection ids' self.figure.canvas.mpl_disconnect(self.cidpress) self.figure.canvas.mpl_disconnect(self.cidrelease) self.figure.canvas.mpl_disconnect(self.cidmotion)
Seren541/SimpleOS
src/kernel/fs/tar.c
/* Copyright 2021 <NAME> Licensed under MIT ( https://github.com/xing1357/SimpleOS/blob/main/LICENSE ) */ #include "tar.h" uint32 getsize(const char *in) { uint32 size = 0; uint32 j; uint32 count = 1; for (j = 11; j > 0; j--, count *= 8) size += ((in[j - 1] - '0') * count); return size; } struct tar_header *headers[32]; uint32 parse(uint32 address) { uint32 i; for (i = 0; ; i++) { struct tar_header *header = (struct tar_header *)address; if (header->name[0] == '\0') break; uint32 size = getsize(header->size); headers[i] = header; address += ((size / 512) + 1) * 512; if (size % 512) address += 512; } return i; }
isabella232/fbc-mobile-app-core
packages/@fbcmobile-ui/Utils/__tests__/nullthrows_test.js
/** * Copyright (c) Facebook, Inc. and its affiliates. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. * * @flow strict-local * @format */ 'use strict'; import nullthrows from '@fbcmobile/ui/Utils/nullthrows'; describe('nullthrows tests', () => { test('test nullthrows with error message', () => { expect.assertions(1); try { const _oops = nullthrows(null, 'oops'); } catch (error) { expect(error).toHaveProperty('message', '[NullValueError] oops'); } }); test('test nullthrows', () => { expect.assertions(1); try { const _noOops = nullthrows(null); } catch (error) { expect(error).toHaveProperty('message', '[NullValueError]'); } }); });
rogelio-o/torrentflix
frontend/src/components/Layout/CustomNavbar.js
import "./CustomNavbar.css"; import React from "react"; import { Link } from "react-router-dom"; import { Collapse, Nav, Navbar, NavbarBrand, NavbarToggler, NavItem, NavLink } from "reactstrap"; import Logo from "./logo.png"; class CustomNavbar extends React.Component { constructor(props) { super(props); this.state = { isOpen: false, navbarClass: "", }; } componentDidMount() { this._scrollHandler = this._handleScroll.bind(this); window.addEventListener("scroll", this._scrollHandler); } componentWillUnmount() { if (this._scrollHandler) { window.removeEventListener("scroll", this._scrollHandler); } } _handleScroll() { const scrollY = window.scrollY; if (scrollY > 100) { this.setState({ navbarClass: "main-navbar-toggled" }); } else { this.setState({ navbarClass: "" }); } } toggle() { this.setState({ isOpen: !this.state.isOpen }); } render() { const { navbarClass, isOpen } = this.state; return ( <Navbar color="dark" fixed="top" expand="md" dark className={"main-navbar " + navbarClass} > <NavbarBrand href="/"> <img src={Logo} alt="Torrentflix" /> </NavbarBrand> <NavbarToggler onClick={this.toggle.bind(this)} /> <Collapse isOpen={isOpen} navbar> <Nav className="mr-auto" navbar> <NavItem> <NavLink tag={Link} to="/series"> Series </NavLink> </NavItem> <NavItem> <NavLink tag={Link} to="/movies"> Movies </NavLink> </NavItem> <NavItem> <NavLink tag={Link} to="/torrents"> Torrents </NavLink> </NavItem> <NavItem> <NavLink tag={Link} to="/renderizations"> Renderizations </NavLink> </NavItem> </Nav> </Collapse> </Navbar> ); } } export default CustomNavbar;
ekera/qunundrum
src/linear_distribution_mpi.cpp
/*! * \file linear_distribution_mpi.cpp * \ingroup linear_distribution * * \brief The definition of functions for sending, receiving and broadcasting * linear probability distributions. */ #include "linear_distribution.h" #include "errors.h" #include "linear_distribution_slice.h" #include <mpi.h> #include <stdint.h> #include <stdlib.h> void linear_distribution_init_bcast_recv( Linear_Distribution * const distribution, const int root) { /* Broadcast the parameters. */ parameters_init(&(distribution->parameters)); parameters_bcast_recv(&(distribution->parameters), root); /* Broadcast the precision. */ if (MPI_SUCCESS != MPI_Bcast( &(distribution->precision), 1, /* count */ MPI_UNSIGNED, root, MPI_COMM_WORLD)) { critical("linear_distribution_init_bcast_recv(): " "Failed to broadcast the precision."); } /* Broadcast the flags. */ if (MPI_SUCCESS != MPI_Bcast( &(distribution->flags), 1, /* count */ MPI_UNSIGNED, root, MPI_COMM_WORLD)) { critical("linear_distribution_init_bcast_recv(): " "Failed to broadcast the flags."); } /* Broadcast the capacity. */ uint32_t capacity; if (MPI_SUCCESS != MPI_Bcast( &capacity, 1, /* count */ MPI_UNSIGNED, root, MPI_COMM_WORLD)) { critical("linear_distribution_init_bcast_recv(): " "Failed to broadcast the capacity."); } /* Store the capacity and zeroize the count. */ distribution->capacity = capacity; distribution->count = 0; /* Allocate space for slices according to the capacity. */ distribution->slices = (Linear_Distribution_Slice **)malloc( sizeof(Linear_Distribution_Slice *) * capacity); if (NULL == distribution->slices) { critical("linear_distribution_init_bcast_recv(): " "Failed to allocate memory."); } for (uint32_t i = 0; i < capacity; i++) { distribution->slices[i] = NULL; } /* Broadcast the slice count. */ uint32_t count; if (MPI_SUCCESS != MPI_Bcast( &count, 1, /* count */ MPI_UNSIGNED, root, MPI_COMM_WORLD)) { critical("linear_distribution_init_bcast_recv(): " "Failed to broadcast the slice count."); } /* Broadcast the slices. */ distribution->total_probability = 0; distribution->total_error = 0; for (uint32_t i = 0; i < count; i++) { /* Broadcast slice. */ Linear_Distribution_Slice * slice = linear_distribution_slice_alloc(); linear_distribution_slice_init_bcast_recv(slice, root); /* Insert the slice into the distribution. */ linear_distribution_insert_slice(distribution, slice); } /* Note: The total_probability and total_error will now have been set. */ } void linear_distribution_bcast_send( const Linear_Distribution * const distribution, const int root) { /* Broadcast the parameters. */ parameters_bcast_send(&(distribution->parameters), root); /* Broadcast the precision. */ uint32_t precision = distribution->precision; if (MPI_SUCCESS != MPI_Bcast( &precision, 1, /* count */ MPI_UNSIGNED, root, MPI_COMM_WORLD)) { critical("linear_distribution_bcast_send(): " "Failed to broadcast the precision."); } /* Broadcast the flags. */ uint32_t flags = distribution->flags; if (MPI_SUCCESS != MPI_Bcast( &flags, 1, /* count */ MPI_UNSIGNED, root, MPI_COMM_WORLD)) { critical("linear_distribution_bcast_send(): " "Failed to broadcast the flags."); } /* Broadcast the capacity. */ uint32_t capacity = distribution->capacity; if (MPI_SUCCESS != MPI_Bcast( &capacity, 1, /* count */ MPI_UNSIGNED, root, MPI_COMM_WORLD)) { critical("linear_distribution_bcast_send(): " "Failed to broadcast the capacity."); } /* Broadcast the slice count. */ uint32_t count = distribution->count; if (MPI_SUCCESS != MPI_Bcast( &count, 1, /* count */ MPI_UNSIGNED, root, MPI_COMM_WORLD)) { critical("linear_distribution_bcast_send(): " "Failed to broadcast the slice count."); } /* Broadcast the slices. */ for (uint32_t i = 0; i < distribution->count; i++) { /* Broadcast slice. */ linear_distribution_slice_bcast_send(distribution->slices[i], root); } }
IllinoisSimulatorLab/szg
src/drivers/arConstantHeadFilter.cpp
<gh_stars>1-10 //******************************************************** // Syzygy is licensed under the BSD license v2 // see the file SZG_CREDITS for details //******************************************************** #include "arPrecompiled.h" #include "arConstantHeadFilter.h" #include "arVRConstants.h" DriverFactory(arConstantHeadFilter, "arIOFilter") bool arConstantHeadFilter::_processEvent( arInputEvent& inputEvent ) { if (inputEvent.getType() != AR_EVENT_MATRIX || inputEvent.getIndex() != AR_VR_HEAD_MATRIX_ID) return true; return inputEvent.setMatrix( arMatrix4(1,0,0,0, 0,1,0,5, 0,0,1,0, 0,0,0,1) * ar_extractRotationMatrix(inputEvent.getMatrix())); }
pop1234o/BestPracticeApp
Video/src/main/java/com/liyafeng/video/Operating_system.java
package com.liyafeng.video; public class Operating_system { /** * 见jni模块下 * @param args */ public static void main(String[] args) { } }
chegini91/mVis
mvis-src/src/main/java/at/tugraz/cgv/multiviewva/model/indexing/Index.java
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package at.tugraz.cgv.multiviewva.model.indexing; import com.google.common.base.Converter; import com.google.common.collect.ImmutableMap; import java.util.List; import java.util.function.Consumer; /** * Basic Interface for a Content Based Indexing module * * @author <NAME> * @param <OT> Type of objects that are to be indexed * @param <FET> Type of the Feature Extractor to be used */ public interface Index<OT,FET extends FeatureExtractor<OT>> { /** * @param featureExtractor * @param objectConverter should provide methods to map an object to an unique string (and vice versa) ... the most simple implementation could be realized as a facade for a BiMap<String,OT> from all objects to their names * @param metric * @param indexerProps * @throws RuntimeException */ public void init(FET featureExtractor, Converter<OT,String> objectToNameMapper, SimilarityMeasure metric, ImmutableMap<String,String> indexerProps) throws RuntimeException; /** * @return true if a call to init was successful */ public boolean isInitialized(); /** * Asynchronously adds objects to an index * * Note that the two callbacks (Consumers) may very well be invoked in another thread. If you intend to toggle UI events from within the callback, check the docs of your UI toolkit of how to do that from another thread Depending on the implementation, the index may throw an IllegalStateException if you try to invoke another operation on the index while the add operation is still running in the background. It is safe, however, to start another operation once the onSuccessCallback has been invoked The computed feature vectors can be accessed from within the onSuccessCallback via IndexedObjectContainer::getDescriptor() * * * @param objects the objects that should be added to the index * @param featureExtractorProps properties that steer the feature extraction, check what properties are supported by the feature extractor that is used by the indexer * @param onSuccessCallback invoked by the index when all objects have been added. A list of indexed objects containers will be passed that wraps the input objects * @param onErrorCallback invoked whenever something goes wrong * @throws RuntimeException */ public void add(List<OT> objects, ImmutableMap<String,String> featureExtractorProps, Consumer<List<IndexedObjectContainer<OT>>> onSuccessCallback, Consumer<Throwable> onErrorCallback) throws RuntimeException; /** * @return the feature extractor used by this index */ public FeatureExtractor<OT> getFeatureExtractor(); /** * Can be used to get the Similarity Measure * @param sm */ public SimilarityMeasure getSimilarityMeasure(); public ImmutableMap<String,String> getIndexerProperties(); /** * Can be used to change the Similarity Measure, depending on the internal implementation, the index may need to rebuild itself after that * @param sm * @param onSuccessCallback * @param onErrorCallback */ public void setSimilarityMeasure(SimilarityMeasure sm,Consumer<Index> onSuccessCallback,Consumer<Throwable> onErrorCallback) throws RuntimeException; /** * * @return the number of objects that have been added to the index */ int getNumberOfIndexedObjects(); /** * Starts a search for the nearest neighbors of the query on the index. The number of results can be limited by k and r simultaneously. * If k is set to Integer.MAX_VALUE and r is set to Double.MAX_VALUE, all objects in the database will be returned, ordered ascending by distance to the query * * The query object itself will not be added to the index, however, it will be returned as first result if it has already been added before * * @param query * @param k maximum number of results * @param r maximum distance of the results * @return * @throws RuntimeException */ public IndexSearch<OT> findNeighbors(IndexedObjectContainer<OT> query, int k, double r) throws RuntimeException; /** * Starts a search for the nearest neighbors of the query on the index. The number of results can be limited by k and r simultaneously. If k is set to Integer.MAX_VALUE and r is set to Double.MAX_VALUE, all objects in the database will be returned, ordered ascending by distance to the query As the queryObject that is passed in does not provide features, features will be extracted on the fly. The features can be accessed via IndexSearch::getQuery().getDescriptor() The query object itself will not be added to the index, however, it will be returned as first result if it has already been added before * * * @param queryObject * @param k * @param r * @return * @throws RuntimeException */ public IndexSearch<OT> findNeighbors(OT queryObject, ImmutableMap<String,String> featureExtractorProps, int k, double r) throws RuntimeException; /** * checks whether an object has been added to the index... * Note: the Behavior may rely on the provided objectNameMapper (passed to Index#init()) * * @param object * @return true if the passed object was previously added to this index * @throws RuntimeException if this method is used before Index::init() was invoked or if the objectNameMapper#convert throws an exception */ public boolean contains(OT object) throws RuntimeException; /** * checks whether an object was previously added to the index that beared the same name as uniqueObjectName. * Note: the Behavior may rely on the provided objectNameMapper (passed to Index#init()) * * @param uniqueObjectName * @return true if the passed names mathes the name of an object that was previously added to this index * @throws RuntimeException if this method is used before Index::init() was invoked */ public boolean containsName(String uniqueObjectName) throws RuntimeException; }
luk4z7/pagarme-go
examples/search/search.go
// Copyright 2016 The <NAME> Author. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package main import ( "encoding/json" "github.com/luk4z7/pagarme-go/auth" "github.com/luk4z7/pagarme-go/lib/search" "net/url" "os" ) var searchRecord search.Search func main() { get, err, errorsApi := searchRecord.Get(url.Values{}, auth.Headers{ "type": "transaction", "query": `{ "query": { "filtered": { "query": {"match_all": {}}, "filter": { "and": [ { "range": { "date_created": { "lte": "2016-01-31" } } }, { "or": [ {"term": {"status": "waiting_payment"}}, {"term": {"status": "paid"}} ] } ] } } }, "sort": [ { "date_created": {"order": "desc"} } ], "size": 5, "from": 0 }`, }) if err != nil { response, _ := json.MarshalIndent(errorsApi, "", " ") os.Stdout.Write(response) } else { responseGet, _ := json.MarshalIndent(get, "", " ") os.Stdout.Write(responseGet) } }
Tall-Programacion-FIME/backend
app/app/tests/crud/test_book.py
<reponame>Tall-Programacion-FIME/backend<gh_stars>0 import random from app.crud.crud_book import create_book from app.schemas.book import BookCreate from app.tests.utils.user import create_random_user from faker import Faker from sqlalchemy.orm import Session def test_create_book(db: Session, fake: Faker) -> None: name = " ".join(fake.words(3)) author = fake.name() cover_url = fake.image_url() price = random.randint(0, 500) book_in = BookCreate(name=name, author=author, cover_url=cover_url, price=price) user = create_random_user(db=db, fake=fake) book = create_book(db=db, book=book_in, user_id=user.id) assert book.name == name assert book.author == author assert book.cover_url == cover_url assert book.price == price
dr-js/dr-server
source/server/feature/StatReport/setup.js
import { responderSendJSON } from '@dr-js/core/module/node/server/Responder/Send' import { createGetStatReport } from 'source/module/Stat/StatReport' // TODO: move to ActionJSON? const setup = async ({ name = 'feature:stat-report', loggerExot, routePrefix = '', featureAuth: { authPack: { authFetch }, createResponderCheckAuth }, statReportProcessTag, URL_STAT_REPORT = `${routePrefix}/stat-report` }) => { const getStatReport = createGetStatReport(statReportProcessTag) const routeList = [ [ URL_STAT_REPORT, 'GET', createResponderCheckAuth({ responderNext: (store) => responderSendJSON(store, { object: getStatReport() }) }) ] ] const reportStat = async (url) => { // TODO: move out const { status } = await authFetch(url, { method: 'POST', body: JSON.stringify(getStatReport()) }) __DEV__ && console.log('reported status:', status) } return { reportStat, URL_STAT_REPORT, routeList, name } } export { setup }
icza/sc2gears
src/hu/belicza/andras/sc2gears/ui/components/PlayerPopupMenu.java
/* * Project Sc2gears * * Copyright (c) 2010 <NAME> <<EMAIL>> * * This software is the property of <NAME>. * Copying, modifying, distributing, refactoring without the authors permission * is prohibited and protected by Law. */ package hu.belicza.andras.sc2gears.ui.components; import hu.belicza.andras.sc2gears.language.Language; import hu.belicza.andras.sc2gears.sc2replay.model.Details.PlayerId; import hu.belicza.andras.sc2gears.settings.Settings; import hu.belicza.andras.sc2gears.ui.dialogs.PlayerProfileDialog; import hu.belicza.andras.sc2gears.ui.icons.Icons; import hu.belicza.andras.sc2gears.util.GeneralUtils; import hu.belicza.andras.sc2gears.util.ObjectRegistry; import hu.belicza.andras.sc2gearspluginapi.api.listener.PlayerPopupMenuItemListener; import hu.belicza.andras.sc2gearspluginapi.api.listener.PlayerPopupMenuItemListener.PlayerInfo; import hu.belicza.andras.sc2gearspluginapi.api.sc2replay.IPlayerId; import hu.belicza.andras.sc2gearspluginapi.api.sc2replay.ReplayConsts.BnetLanguage; import hu.belicza.andras.sc2gearspluginapi.api.sc2replay.ReplayConsts.Gateway; import hu.belicza.andras.sc2gearspluginapi.api.sc2replay.ReplayConsts.PlayerType; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.ArrayList; import java.util.Collections; import java.util.List; import javax.swing.Icon; import javax.swing.JMenu; import javax.swing.JMenuItem; import javax.swing.JPopupMenu; /** * Player popup menu. * * @author <NAME> */ @SuppressWarnings("serial") public class PlayerPopupMenu extends JPopupMenu implements ActionListener { /** * Custom player menu item specification. * @author <NAME> */ private static class CustomPlayerMenuItemSpec extends CustomMenuItemSpec { /** Listener to be called when action is performed. */ public final PlayerPopupMenuItemListener listener; /** * Creates a new CustomPlayerMenuItemSpec. * @param text text of the custom menu item spec * @param icon optional icon of the custom menu item spec * @param listener listener to be called when action is performed */ public CustomPlayerMenuItemSpec( final String text, final Icon icon, final PlayerPopupMenuItemListener listener ) { super( text, icon ); this.listener = listener; } } /** * A {@link PlayerInfo} implementation. * @author <NAME> */ private static class PlayerInfoImpl implements PlayerInfo { private IPlayerId playerId; private final PlayerType playerType; /** * Creates a new PlayerInfoImpl. * @param playerId reference to the player id * @param playerType type of player */ public PlayerInfoImpl( final IPlayerId playerId, final PlayerType playerType ) { this.playerId = playerId; this.playerType = playerType; } @Override public IPlayerId getPlayerId() { return playerId; } @Override public PlayerType getPlayerType() { return playerType; } } /** Custom player menu item specifications registry. */ private static final ObjectRegistry< CustomPlayerMenuItemSpec > customMenuItemRegistry = new ObjectRegistry< CustomPlayerMenuItemSpec >(); /** * Adds a new player popup menu item. * @param text text of the new menu item * @param icon optional icon of the new menu item * @param listener listener to be called when the menu item is activated * @return a handler that can be used to remove the registered menu item */ public static Integer addPlayerPopupMenuItem( final String text, final Icon icon, final PlayerPopupMenuItemListener listener ) { final CustomPlayerMenuItemSpec customPlayerMenuItemSpec = new CustomPlayerMenuItemSpec( text, icon, listener ); customMenuItemRegistry.add( customPlayerMenuItemSpec ); return customPlayerMenuItemSpec.handler; } /** * Removes a player popup menu item specified by its handler. * @param handler handler of the popup menu item to be removed */ public static void removePlayerPopupMenuItem( final Integer handler ) { synchronized ( customMenuItemRegistry ) { for ( final CustomPlayerMenuItemSpec customPlayerMenuItemSpec : customMenuItemRegistry ) if ( customPlayerMenuItemSpec.handler.equals( handler ) ) { customMenuItemRegistry.remove( customPlayerMenuItemSpec ); break; } } } /** Identifier of the player. */ private final PlayerId playerId; /** Type of the player. */ private final PlayerType playerType; /** Show profile info in pop-up item. */ private final JMenuItem showProfileInfoInPopupMenuItem = new JMenuItem( Language.getText( "module.repAnalyzer.tab.charts.playerMenu.showProfileInfoInPopup" ), Icons.PROFILE ); /** View Sc2ranks.com profile menu item. */ private final JMenuItem viewSc2ranksProfileMenuItem = new JMenuItem( Language.getText( "module.repAnalyzer.tab.charts.playerMenu.viewSc2ranksProfile" ), Icons.SC2RANKS ); /** View character profile menu item. */ private final JMenuItem viewCharacterProfileMenuItem = new JMenuItem( Language.getText( "module.repAnalyzer.tab.charts.playerMenu.viewCharacterProfile" ), Icons.PROFILE ); /** Add to the favored player list menu item. */ private final JMenuItem addToFavoredPlayerListMenuItem = new JMenuItem( Language.getText( "module.repAnalyzer.tab.charts.playerMenu.addToFavoredPlayerList" ), Icons.USER_PLUS ); /** Remove from the favored player list menu item. */ private final JMenuItem removeFromFavoredPlayerListMenuItem = new JMenuItem( Language.getText( "module.repAnalyzer.tab.charts.playerMenu.removeFromFavoredPlayerList" ), Icons.USER_MINUS ); /** Copy the full name to the clipboard menu item. */ private final JMenuItem copyFullNameMenuItem; /** * Creates a new PlayerPopupMenu. * * @param playerId identifier of the player * @param playerType player type */ public PlayerPopupMenu( final PlayerId playerId, final PlayerType playerType ) { this.playerId = playerId; this.playerType = playerType; // GUI: final boolean noProfile = playerId.gateway == Gateway.UNKNOWN || playerId.gateway == Gateway.PUBLIC_TEST || playerType != PlayerType.HUMAN || playerId.battleNetId == 0; if ( noProfile ) showProfileInfoInPopupMenuItem.setEnabled( false ); showProfileInfoInPopupMenuItem.addActionListener( this ); add( showProfileInfoInPopupMenuItem ); addSeparator(); if ( noProfile ) viewSc2ranksProfileMenuItem.setEnabled( false ); viewSc2ranksProfileMenuItem.addActionListener( this ); add( viewSc2ranksProfileMenuItem ); if ( noProfile ) viewCharacterProfileMenuItem.setEnabled( false ); viewCharacterProfileMenuItem.addActionListener( this ); add( viewCharacterProfileMenuItem ); final JMenu viewCharacterProfileInLanguageMenu = new JMenu( Language.getText( "module.repAnalyzer.tab.charts.playerMenu.viewCharacterProfileInLanguage" ) ); viewCharacterProfileInLanguageMenu.setIcon( Icons.PROFILE ); if ( noProfile ) viewCharacterProfileInLanguageMenu.setEnabled( false ); else { final List< BnetLanguage > availableBnetLanguageList = new ArrayList< BnetLanguage >( playerId.gateway.availableLanguageSet ); Collections.sort( availableBnetLanguageList ); for ( final BnetLanguage bnetLanguage : availableBnetLanguageList ) { final JMenuItem viewCharacterProfileInLanguageMenuItem = new JMenuItem( bnetLanguage.stringValue, Icons.getLanguageIcon( Language.getDefaultText( bnetLanguage.textKey ) ) ); viewCharacterProfileInLanguageMenuItem.addActionListener( new ActionListener() { @Override public void actionPerformed( final ActionEvent event ) { GeneralUtils.showURLInBrowser( playerId.getBattleNetProfileUrl( bnetLanguage ) ); } } ); viewCharacterProfileInLanguageMenu.add( viewCharacterProfileInLanguageMenuItem ); } } add( viewCharacterProfileInLanguageMenu ); addSeparator(); final List< String > favoredPlayerList = GeneralUtils.getFavoredPlayerList(); final boolean playerIsOnTheFavoredList = favoredPlayerList.contains( playerId.name ); addToFavoredPlayerListMenuItem.setEnabled( !playerIsOnTheFavoredList ); addToFavoredPlayerListMenuItem.addActionListener( this ); add( addToFavoredPlayerListMenuItem ); final JMenu insertToTheFavoredListBeforePlayerMenu = new JMenu( Language.getText( "module.repAnalyzer.tab.charts.playerMenu.insertToFavoredPlayerList" ) ); insertToTheFavoredListBeforePlayerMenu.setIcon( Icons.USER_PLUS ); insertToTheFavoredListBeforePlayerMenu.setEnabled( !playerIsOnTheFavoredList ); if ( !playerIsOnTheFavoredList ) { for ( final String favoredPlayerName : favoredPlayerList ) { final JMenuItem playerMenuItem = new JMenuItem( favoredPlayerName ); playerMenuItem.addActionListener( new ActionListener() { @Override public void actionPerformed( final ActionEvent event ) { final List< String > favoredPlayerList_ = GeneralUtils.getFavoredPlayerList(); final int insertIndex = favoredPlayerList_.indexOf( favoredPlayerName ); if ( insertIndex >= 0 ) { favoredPlayerList.add( insertIndex, playerId.name ); Settings.set( Settings.KEY_SETTINGS_MISC_FAVORED_PLAYER_LIST, getFavoredPlayerListString( favoredPlayerList ) ); } } } ); insertToTheFavoredListBeforePlayerMenu.add( playerMenuItem ); } } add( insertToTheFavoredListBeforePlayerMenu ); removeFromFavoredPlayerListMenuItem.setEnabled( playerIsOnTheFavoredList ); removeFromFavoredPlayerListMenuItem.addActionListener( this ); add( removeFromFavoredPlayerListMenuItem ); addSeparator(); copyFullNameMenuItem = new JMenuItem( Language.getText( "module.repAnalyzer.tab.charts.playerMenu.copyFullName", playerId.getFullName() ), Icons.CLIPBOARD_SIGN ); copyFullNameMenuItem.addActionListener( this ); add( copyFullNameMenuItem ); synchronized ( customMenuItemRegistry ) { if ( !customMenuItemRegistry.isEmpty() ) { addSeparator(); for ( final CustomPlayerMenuItemSpec customPlayerMenuItemSpec : customMenuItemRegistry ) { final JMenuItem customMenuItem = new JMenuItem( customPlayerMenuItemSpec.text, customPlayerMenuItemSpec.icon ); customMenuItem.addActionListener( new ActionListener() { @Override public void actionPerformed( final ActionEvent event ) { customPlayerMenuItemSpec.listener.actionPerformed( new PlayerInfoImpl( playerId, playerType ), customPlayerMenuItemSpec.handler ); } } ); add( customMenuItem ); } } } } @Override public void actionPerformed( final ActionEvent event ) { if ( event.getSource() == showProfileInfoInPopupMenuItem ) { new PlayerProfileDialog( playerId, playerType ); } else if ( event.getSource() == viewCharacterProfileMenuItem ) { GeneralUtils.showURLInBrowser( playerId.getBattleNetProfileUrl( BnetLanguage.values()[ Settings.getInt( Settings.KEY_SETTINGS_MISC_PREFERRED_BNET_LANGUAGE ) ] ) ); } else if ( event.getSource() == viewSc2ranksProfileMenuItem ) { GeneralUtils.showURLInBrowser( playerId.getSc2ranksProfileUrl() ); } else if ( event.getSource() == addToFavoredPlayerListMenuItem ) { final List< String > favoredPlayerList = GeneralUtils.getFavoredPlayerList(); favoredPlayerList.add( playerId.name ); Settings.set( Settings.KEY_SETTINGS_MISC_FAVORED_PLAYER_LIST, getFavoredPlayerListString( favoredPlayerList ) ); } else if ( event.getSource() == removeFromFavoredPlayerListMenuItem ) { final List< String > favoredPlayerList = GeneralUtils.getFavoredPlayerList(); favoredPlayerList.remove( playerId.name ); Settings.set( Settings.KEY_SETTINGS_MISC_FAVORED_PLAYER_LIST, getFavoredPlayerListString( favoredPlayerList ) ); } else if ( event.getSource() == copyFullNameMenuItem ) { GeneralUtils.copyToClipboard( playerId.getFullName() ); } } /** * Converts and returns the favored player list as a string which will be a comma separated list of the favored players. * @param favoredPlayerList favored player list to be converted * @return the favored player list as a string which will be a comma separated list of the favored players. */ private static String getFavoredPlayerListString( List< String > favoredPlayerList ) { final StringBuilder builder = new StringBuilder(); for ( final String playerName : favoredPlayerList ) { if ( builder.length() > 0 ) builder.append( ", " ); builder.append( playerName ); } return builder.toString(); } }
anttisirkiafuturice/angular2todo
node_modules/angular2/es6/prod/src/web-workers/worker/broker.js
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) { if (typeof Reflect === "object" && typeof Reflect.decorate === "function") return Reflect.decorate(decorators, target, key, desc); switch (arguments.length) { case 2: return decorators.reduceRight(function(o, d) { return (d && d(o)) || o; }, target); case 3: return decorators.reduceRight(function(o, d) { return (d && d(target, key)), void 0; }, void 0); case 4: return decorators.reduceRight(function(o, d) { return (d && d(target, key, o)) || o; }, desc); } }; var __metadata = (this && this.__metadata) || function (k, v) { if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v); }; import { print, isPresent, DateWrapper, stringify } from "../../facade/lang"; import { PromiseWrapper } from "angular2/src/facade/async"; import { ListWrapper, StringMapWrapper } from "../../facade/collection"; import { Serializer } from "angular2/src/web-workers/shared/serializer"; import { Injectable } from "angular2/di"; export let MessageBroker = class { constructor(_messageBus, _serializer) { this._messageBus = _messageBus; this._serializer = _serializer; this._pending = new Map(); this._messageBus.source.addListener((data) => this._handleMessage(data['data'])); } _generateMessageId(name) { var time = stringify(DateWrapper.toMillis(DateWrapper.now())); var iteration = 0; var id = name + time + stringify(iteration); while (isPresent(this._pending[id])) { id = `${name}${time}${iteration}`; iteration++; } return id; } runOnUiThread(args, returnType) { var fnArgs = []; if (isPresent(args.args)) { ListWrapper.forEach(args.args, (argument) => { if (argument.type != null) { fnArgs.push(this._serializer.serialize(argument.value, argument.type)); } else { fnArgs.push(argument.value); } }); } var promise; var id = null; if (returnType != null) { var completer = PromiseWrapper.completer(); id = this._generateMessageId(args.type + args.method); this._pending.set(id, completer.resolve); PromiseWrapper.catchError(completer.promise, (err, stack) => { print(err); completer.reject(err, stack); }); promise = PromiseWrapper.then(completer.promise, (data) => { if (this._serializer == null) { return data.value; } else { return this._serializer.deserialize(data.value, returnType); } }); } else { promise = null; } // TODO(jteplitz602): Create a class for these messages so we don't keep using StringMap var message = { 'type': args.type, 'method': args.method, 'args': fnArgs }; if (id != null) { message['id'] = id; } this._messageBus.sink.send(message); return promise; } _handleMessage(message) { var data = new MessageData(message); // TODO(jteplitz602): replace these strings with messaging constants var id = data.value.id; if (this._pending.has(id)) { this._pending.get(id)(data.value); this._pending.delete(id); } } }; MessageBroker = __decorate([ Injectable(), __metadata('design:paramtypes', [Object, Serializer]) ], MessageBroker); class MessageData { constructor(data) { this.type = StringMapWrapper.get(data, "type"); if (StringMapWrapper.contains(data, "value")) { this.value = new MessageResult(StringMapWrapper.get(data, "value")); } else { this.value = null; } } } class MessageResult { constructor(result) { this.id = StringMapWrapper.get(result, "id"); this.value = StringMapWrapper.get(result, "value"); } } export class FnArg { constructor(value, type) { this.value = value; this.type = type; } } export class UiArguments { constructor(type, method, args) { this.type = type; this.method = method; this.args = args; } } //# sourceMappingURL=broker.js.map
DEFCON-MUD/defcon-28-mud-source
gurba/lib/domains/gurba/objects/hat.c
#include "../domain.h" inherit "/std/armor"; void setup(void) { set_id("sombrero", "hat"); set_adj("large", "huge"); set_short("A huge sombrero"); set_long("Your average sombrero."); set_gettable(1); set_slot("head"); set_wear_message("$N $vput $o on $p head."); set_remove_message("$N $vtake off $o."); set_value(20); set_weight(3); set_size(2); }
mathewdgardner/sklearn-porter
examples/estimator/classifier/DecisionTreeClassifier/java/accuracy.py
# -*- coding: utf-8 -*- from sklearn.tree import tree from sklearn.datasets import load_iris from sklearn_porter import Porter iris_data = load_iris() X, y = iris_data.data, iris_data.target clf = tree.DecisionTreeClassifier() clf.fit(X, y) porter = Porter(clf) accuracy = porter.predict_test([1.0, 2.0, 3.0, 4.0]) print(accuracy) # 1.0 accuracy = porter.predict_test(X[0]) print(accuracy) # 1.0 accuracy = porter.predict_test(X) print(accuracy) # 1.0
drdla/cncjs
src/web/components_new/ActionLink.js
<filename>src/web/components_new/ActionLink.js // @flow /* eslint-disable no-undef */ /* * Renders an icon that triggers an action on click, such as edit or delete. * Shows the name of the action in a custom tooltip on hover. * * Usage: * <ActionLink action="edit" onClick={() => { alert('clicked!'); }} isDisabled={false} /> */ import * as React from 'react'; import styled from 'styled-components'; import Icon from './Icon'; import mixin from '../styles/mixins/'; type Action = 'add' | 'cancel' | 'delete' | 'download' | 'duplicate' | 'edit' | 'run' | string; const TEXTS = { add: 'add', cancel: 'cancel', delete: 'delete', download: 'download', edit: 'edit', run: 'run', }; export type Props = { action: Action, label: string, isDisabled: boolean, onClick: Function, renderWithLabel: boolean, }; function getOptions(action: string): {icon: string, text: string} { const defaultOptions = { icon: action, text: TEXTS[action] || action, }; const options = { dunno: { icon: 'none', text: TEXTS[action], }, }; return options[action] || defaultOptions; } const ActionLinkIcon = ({type}: {type: string}) => <Icon name={type} />; const StyledActionLink = styled.span` ${mixin.link}; display: inline-block; font-weight: ${({theme}) => theme.font.weight.bold}; padding: ${({theme}) => theme.size.tiny} ${({theme}) => theme.size.small}; user-select: none; vertical-align: middle; `; const LinkText = styled.span` padding-left: ${({theme}) => theme.size.small}; `; export default class ActionLink extends React.Component<Props> { static defaultProps = { className: '', isDisabled: false, label: '', renderWithLabel: false, style: {}, title: '', tooltipPosition: 'above', }; onClick = (e: SyntheticEvent<HTMLSpanElement>) => { const {isDisabled, onClick} = this.props; const f = onClick || function() {}; if (isDisabled) { // "return false;" did not work for reasons that are beyond me, so let's just do it the verbose way e.preventDefault(); e.stopPropagation(); return; } // prevent the click to bubble up e.stopPropagation(); f(); }; render() { const {action, label, renderWithLabel} = this.props; const options = getOptions(action); return ( <StyledActionLink onClick={this.onClick}> {options.icon ? <ActionLinkIcon type={options.icon} /> : null} {!options.icon || renderWithLabel ? <LinkText>{label || options.text}</LinkText> : null} </StyledActionLink> ); } }
SocraticGrid/Services-Backup
VACDS-Order-Service-Model/src/main/java/org/socraticgrid/hl7/services/orders/model/requirements/CollectionRequirement.java
package org.socraticgrid.hl7.services.orders.model.requirements; import java.io.Serializable; /** * @author <NAME> * @version 1.0 * @created 16-Jan-2014 9:12:40 AM */ public class CollectionRequirement extends Requirement implements Serializable { /** * */ private static final long serialVersionUID = 1L; public CollectionRequirement() { super(); type= RequirementType.Collection; } @Override public void setType(RequirementType newVal){ // } }
calee0219/kali_Linux_Document
C/C_HW/HW10/HW10_0416037_1#B.c
#include <stdio.h> #include <stdlib.h> #include <string.h> int graph[101][101]; int save[100]; int check[101]; void list(int num, int lenth); int main() { int n, m; scanf("%d%d", &n, &m); while(n) { memset(graph, 0, 101*sizeof(int)); memset(check, 0, 101*sizeof(int)); for(size_t i = 0; i < m; ++i) { int p, c; scanf("%d%d", &p, &c); graph[p][c] = 1; } list(n, 0); printf("\n"); scanf("%d%d", &n, &m); } return 0; } void list(int num, int lenth) { if(lenth == num) { printf("hi"); size_t i; for(i = 0; i < num; ++i) printf("%d ", save[i]); printf("\n"); return; } size_t i; for(i = 1; i <= num; ++i) { int che = 0; size_t j; for(j = 1; j <= num; ++j) { if(graph[j][i] == 1) { che = 1; break; } } if(che) continue; if(check[i] == 0) { printf("%d %d\t", i, lenth); check[i] = 1; save[lenth] = i; size_t j; for(j = 0; j < num; ++j) graph[i][j] = 0; list(num, lenth+1); } } return; }
AlessandraFrancez/tcc-node
src/commons/utils/scheduler.utils.js
<filename>src/commons/utils/scheduler.utils.js<gh_stars>0 'use strict'; class SchedulerUtils { constructor() { this.Tweets = require('../../models/tweets.model'); this.Dictionary = require('../../models/dictionaries.model'); this.logger = require('../logger/logger'); this.moment = require('moment-timezone'); this.Ips = require('../../models/ips.model'); } async getUntranslatedWords() { const tweets = await this.Tweets.find({ checked: false, 'voting.selected.0': { $exists: true, $not: { $size: 0 } } }); let counter = 0; for (let i = 0; i < tweets.length; i++) { const tweet = tweets[i]; const { selected } = tweet.voting; console.log(selected); const list = []; selected.forEach(vote => { vote.forEach(word => { if (list.indexOf(word) === -1) { list.push(word); } }); }); for (let i = 0; i < list.length; i++) { const word = list[i]; const knownWord = await this.Dictionary.countDocuments({ word }).lean(); if (!knownWord) { await this.Dictionary.create({ word, type: 'untranslated' }); counter += 1; } } tweet.checked = true; await tweet.save(); } this.logger.info(`[GetUntranslatedJob] ${counter} words added out of ${tweets.length} analyzed.`); } async getStats() { const stats = { fetched: await this.Tweets.countDocuments({ 'voting.fetched': { $gt: 0 } }), total: await this.Tweets.countDocuments({}), replacedWords: await this.Dictionary.countDocuments({ $or: [ { replacement: { $exists: true } }, { $and: [ { ignore: { $exists: true } }, { ignore: true } ] } ] }), wordsTotal: await this.Dictionary.countDocuments({}), volunteers: await this.Ips.countDocuments({}), lastUpdate: this.moment.tz('America/Sao_Paulo').format('HH:mm:SS DD/MM/YYYY') }; global.STATS = stats; this.logger.info('Global stats updated', global.STATS); } async getTones() { const choices = ['Anger', 'Fear', 'Joy', 'Sadness', 'Analytical', 'Confident', 'Tentative']; choices.forEach(async choice => this.getTone(choice)); } async getTone(choice) { const res = []; const tones = await this.Tweets.find({ 'voting.fetched': { $gt: 0 }, 'tones.0': { $exists: true } }, { 'tones': 1, '_id': 0 }).lean(); for (let i = 0; i < tones.length; i++) { const analyzedTweet = tones[i].tones; analyzedTweet.forEach(item => { if (item.tone_name === choice) { res.push(item); } }); } this.logger.info({ tone: choice, quantity: res.length }); } } module.exports = new SchedulerUtils();
nikos912000/rhapsody
samples/src/main/java/com/expediagroup/rhapsody/samples/parallelism/KafkaArbitraryParallelism.java
<reponame>nikos912000/rhapsody /** * Copyright 2019 Expedia, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.expediagroup.rhapsody.samples.parallelism; import java.time.Duration; import java.time.Instant; import java.util.Collections; import java.util.Map; import java.util.UUID; import java.util.concurrent.CountDownLatch; import java.util.function.Function; import org.apache.kafka.clients.CommonClientConfigs; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.common.serialization.StringDeserializer; import org.apache.kafka.common.serialization.StringSerializer; import com.expediagroup.rhapsody.api.Acknowledgeable; import com.expediagroup.rhapsody.kafka.acknowledgement.OrderManagingReceiverAcknowledgementStrategy; import com.expediagroup.rhapsody.kafka.factory.KafkaConfigFactory; import com.expediagroup.rhapsody.kafka.factory.KafkaFluxFactory; import com.expediagroup.rhapsody.kafka.factory.KafkaValueFluxFactory; import com.expediagroup.rhapsody.kafka.factory.KafkaValueSenderFactory; import com.expediagroup.rhapsody.kafka.test.TestKafkaFactory; import reactor.core.publisher.Flux; import reactor.core.scheduler.Scheduler; import reactor.core.scheduler.Schedulers; /** * This sample shows how to process Kafka records with arbitrarily high parallelism. You can try * changing the number of number of samples and/or number of processing groups to see significant * changes in processing speed. Note that in-order offset acknowledgement is handled by * {@link OrderManagingReceiverAcknowledgementStrategy} such that offset commits are not executed * past any record whose offset we have not yet fully processed (acknowledged) */ public class KafkaArbitraryParallelism { private static final Map<String, ?> TEST_KAFKA_CONFIG = new TestKafkaFactory().createKafka(); private static final String TOPIC = "TOPIC"; private static final int NUM_SAMPLES = 10000; private static final int NUM_GROUPS = 16; private static final long MAX_SLEEP_MILLIS = 10; private static final Scheduler SCHEDULER = Schedulers.newElastic(KafkaArbitraryParallelism.class.getSimpleName()); public static void main(String[] args) throws Exception { //Step 1) Create Kafka Producer Config for Producer that backs Sender's Subscriber //implementation KafkaConfigFactory kafkaSubscriberConfig = new KafkaConfigFactory(); kafkaSubscriberConfig.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, TestKafkaFactory.extractConnect(TEST_KAFKA_CONFIG)); kafkaSubscriberConfig.put(CommonClientConfigs.CLIENT_ID_CONFIG, KafkaArbitraryParallelism.class.getSimpleName()); kafkaSubscriberConfig.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); kafkaSubscriberConfig.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); kafkaSubscriberConfig.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, 1); kafkaSubscriberConfig.put(ProducerConfig.ACKS_CONFIG, "all"); //Step 2) Create Kafka Consumer Config for Consumer that backs Receiver's Publisher //implementation. Note that we block our main Thread on partition assignment such that //subsequently produced Records are processed KafkaConfigFactory kafkaPublisherConfig = new KafkaConfigFactory(); kafkaPublisherConfig.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, TestKafkaFactory.extractConnect(TEST_KAFKA_CONFIG)); kafkaPublisherConfig.put(CommonClientConfigs.CLIENT_ID_CONFIG, KafkaArbitraryParallelism.class.getSimpleName()); kafkaPublisherConfig.put(ConsumerConfig.GROUP_ID_CONFIG, KafkaArbitraryParallelism.class.getSimpleName()); kafkaPublisherConfig.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); kafkaPublisherConfig.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); kafkaPublisherConfig.put(KafkaFluxFactory.BLOCK_REQUEST_ON_PARTITION_ASSIGNMENT_CONFIG, true); //Step 3) Apply stream processing to the Kafka topic we'll produce records to. The //"processing" in this case introduces a superficial blocking sleep which might mimic an //IO-bound process. CountDownLatch latch = new CountDownLatch(NUM_SAMPLES); new KafkaValueFluxFactory<String>(kafkaPublisherConfig) .receiveValue(Collections.singletonList(TOPIC), new OrderManagingReceiverAcknowledgementStrategy()) .groupBy(acknowledgeable -> Math.abs(UUID.fromString(acknowledgeable.get()).hashCode() % NUM_GROUPS)) .subscribe(groupFlux -> groupFlux .publishOn(SCHEDULER) .map(Acknowledgeable.mapping(String::toUpperCase)) .doOnNext(next -> { try { Double sleepMillis = Math.random() * MAX_SLEEP_MILLIS + 1; System.out.println(String.format("next=%s thread=%s sleepMillis=%d", next, Thread.currentThread().getName(), sleepMillis.longValue())); Thread.sleep(sleepMillis.longValue()); } catch (Exception e) { System.err.println("Failed to sleep"); } }) .subscribe(Acknowledgeable.consuming(string -> latch.countDown(), Acknowledgeable::acknowledge))); //Step 4) Produce random UUIDs to the topic we're processing above Flux.range(0, NUM_SAMPLES) .subscribeOn(Schedulers.elastic()) .map(i -> UUID.randomUUID()) .map(UUID::toString) .transform(uuids -> new KafkaValueSenderFactory<String>(kafkaSubscriberConfig) .sendValues(uuids, value -> TOPIC, Function.identity())) .subscribe(); //Step 5) Await processing completion of the UUIDs we produced Instant begin = Instant.now(); latch.await(); System.out.println("Processing duration=" + Duration.between(begin, Instant.now())); System.exit(0); } }
gochaorg/cofe.xyz-next
sql/data-table/src/main/java/xyz/cofe/data/table/store/XmlStreamStorage.java
/* * The MIT License * * Copyright 2017 user. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package xyz.cofe.data.table.store; import xyz.cofe.collection.graph.Path; import xyz.cofe.data.table.DataColumn; import xyz.cofe.data.table.DataRow; import xyz.cofe.data.table.DataTable; import xyz.cofe.simpletypes.SimpleTypes; import xyz.cofe.typeconv.ExtendedCastGraph; import xyz.cofe.typeconv.TypeCastGraph; import xyz.cofe.xml.FormatXMLWriter; import xyz.cofe.xml.stream.path.*; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamWriter; import java.io.*; import java.net.URL; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; /** * Сохранение таблицы в XML * @author <NAME> (<EMAIL>) */ public class XmlStreamStorage { //<editor-fold defaultstate="collapsed" desc="log Функции"> private static final Logger logger = Logger.getLogger(XmlStreamStorage.class.getName()); private static final Level logLevel = logger.getLevel(); private static final boolean isLogSevere = logLevel==null ? true : logLevel.intValue() <= Level.SEVERE.intValue(); private static final boolean isLogWarning = logLevel==null ? true : logLevel.intValue() <= Level.WARNING.intValue(); private static final boolean isLogInfo = logLevel==null ? true : logLevel.intValue() <= Level.INFO.intValue(); private static final boolean isLogFine = logLevel==null ? true : logLevel.intValue() <= Level.FINE.intValue(); private static final boolean isLogFiner = logLevel==null ? true : logLevel.intValue() <= Level.FINER.intValue(); private static final boolean isLogFinest = logLevel==null ? true : logLevel.intValue() <= Level.FINEST.intValue(); private static void logFine(String message,Object ... args){ logger.log(Level.FINE, message, args); } private static void logFiner(String message,Object ... args){ logger.log(Level.FINER, message, args); } private static void logFinest(String message,Object ... args){ logger.log(Level.FINEST, message, args); } private static void logInfo(String message,Object ... args){ logger.log(Level.INFO, message, args); } private static void logWarning(String message,Object ... args){ logger.log(Level.WARNING, message, args); } private static void logSevere(String message,Object ... args){ logger.log(Level.SEVERE, message, args); } private static void logException(Throwable ex){ logger.log(Level.SEVERE, null, ex); } private static void logEntering(String method,Object ... params){ logger.entering(XmlStreamStorage.class.getName(), method, params); } private static void logExiting(String method){ logger.exiting(XmlStreamStorage.class.getName(), method); } private static void logExiting(String method, Object result){ logger.exiting(XmlStreamStorage.class.getName(), method, result); } //</editor-fold> private static final String TABLE_TAG = "datatable"; private static final String COLUMN_TAG = "column"; private static final String COLUMN_NAME_ATTR = "name"; private static final String ROW_TAG = "row"; //<editor-fold defaultstate="collapsed" desc="castGraph : TypeCastGraph"> protected TypeCastGraph castGraph; public TypeCastGraph getCastGraph() { synchronized(this){ if( castGraph!=null )return castGraph; castGraph = new ExtendedCastGraph(); return castGraph; } } public void setCastGraph(TypeCastGraph castGraph) { synchronized(this){ this.castGraph = castGraph; } } //</editor-fold> //<editor-fold defaultstate="collapsed" desc="asString(val):String"> public String asString( Object val ){ if( val==null )return null; TypeCastGraph tc = getCastGraph(); return tc.cast(val, String.class); } //</editor-fold> //<editor-fold defaultstate="collapsed" desc="asValueOf(str,cls):Object"> public Object asValueOf( String str, Class cls ){ if( cls==null )throw new IllegalArgumentException("cls == null"); if( str==null ){ if( !SimpleTypes.isSimple(cls) ) return null; if( SimpleTypes.boolObject().equals(cls) )return Boolean.FALSE; if( SimpleTypes.byteObject().equals(cls) )return Byte.valueOf((byte)0); if( SimpleTypes.charObject().equals(cls) )return Character.valueOf((char)(int)0); if( SimpleTypes.doubleObject().equals(cls) )return Double.valueOf(0d); if( SimpleTypes.floatObject().equals(cls) )return Float.valueOf(0f); if( SimpleTypes.intObject().equals(cls) )return Integer.valueOf(0); if( SimpleTypes.longObject().equals(cls) )return Long.valueOf(0L); if( SimpleTypes.shortObject().equals(cls) )return Short.valueOf((short)0); return null; } TypeCastGraph tc = getCastGraph(); return tc.cast(str, cls); } //</editor-fold> //<editor-fold defaultstate="collapsed" desc="support(De)Serialize(cls):boolean"> public boolean supportDeserialize( Class cls ){ if( cls==null )throw new IllegalArgumentException("cls == null"); synchronized(this){ TypeCastGraph tc = getCastGraph(); Path path = tc.findPath(String.class, cls); return path != null; } } public boolean supportSerialize( Class cls ){ if( cls==null )throw new IllegalArgumentException("cls == null"); synchronized(this){ TypeCastGraph tc = getCastGraph(); Path path = tc.findPath(cls, String.class); return path != null; } } //</editor-fold> //<editor-fold defaultstate="collapsed" desc="write()"> public synchronized void write( XMLStreamWriter xout, DataColumn memColumn ) throws XMLStreamException { if( xout==null )throw new IllegalArgumentException("xout==null"); if( memColumn==null )throw new IllegalArgumentException("memColumn==null"); xout.writeStartElement(COLUMN_TAG); xout.writeAttribute(COLUMN_NAME_ATTR, memColumn.getName()); xout.writeAttribute("dataType", memColumn.getDataType().getName()); xout.writeAttribute("allowNull", memColumn.isAllowNull() ? "true" : "false"); xout.writeAttribute("allowSubTypes", memColumn.isAllowSubTypes() ? "true" : "false"); xout.writeEndElement(); } public synchronized void write( XMLStreamWriter xout, DataRow memRow ) throws XMLStreamException { if( xout==null )throw new IllegalArgumentException("xout==null"); if( memRow==null )throw new IllegalArgumentException("memRow==null"); xout.writeStartElement(ROW_TAG); xout.writeAttribute("changed", memRow.isChanged() ? "true" : "false"); xout.writeAttribute("deleted", memRow.isDeleted() ? "true" : "false"); xout.writeAttribute("inserted", memRow.isInserted() ? "true" : "false"); xout.writeAttribute("updated", memRow.isUpdated() ? "true" : "false"); Object[] vals = memRow.getData(); Object[] orig = memRow.getOrigin(); int changeCnt = memRow.getChangeCount(); xout.writeAttribute("changeCounter", Integer.toString(changeCnt)); List<DataRow.ChangedValue> chvals = memRow.getChangedValues(); if( chvals!=null && chvals.size()>0 ){ xout.writeStartElement("changes"); for( DataRow.ChangedValue chval : chvals ){ xout.writeStartElement("changed"); xout.writeAttribute(COLUMN_TAG, Integer.toString(chval.getColumn())); Object from = chval.getFrom(); Object to = chval.getTo(); if( from!=null ){ xout.writeStartElement("from"); xout.writeAttribute("type",from.getClass().getName()); xout.writeCharacters(asString(from)); xout.writeEndElement(); } if( to!=null ){ xout.writeStartElement("to"); xout.writeAttribute("type",to.getClass().getName()); xout.writeCharacters(asString(to)); xout.writeEndElement(); } xout.writeEndElement(); } xout.writeEndElement(); } if( orig!=null ){ xout.writeStartElement("origin"); for( int i=0; i<orig.length; i++ ){ Object v = orig[i]; xout.writeStartElement("value"); if( v==null ){ xout.writeAttribute("isnull", "true"); }else{ xout.writeAttribute("isnull", "false"); xout.writeAttribute("type", v.getClass().getName()); xout.writeCharacters(asString(v)); } xout.writeEndElement(); } xout.writeEndElement(); } if( vals!=null ){ xout.writeStartElement("current"); for( int i=0; i<vals.length; i++ ){ Object v = vals[i]; xout.writeStartElement("value"); if( v==null ){ xout.writeAttribute("isnull", "true"); }else{ xout.writeAttribute("isnull", "false"); xout.writeAttribute("type", v.getClass().getName()); xout.writeCharacters(asString(v)); } xout.writeEndElement(); } xout.writeEndElement(); } xout.writeEndElement(); } public static final String COLUMNS_TAG = "columns"; public static final String ROWS_TAG = "rows"; public synchronized void write( XMLStreamWriter xout, DataTable dataTable ) throws XMLStreamException { if( xout==null )throw new IllegalArgumentException("xout==null"); if( dataTable==null )throw new IllegalArgumentException("dataTable==null"); synchronized(dataTable){ xout.writeStartElement(TABLE_TAG); xout.writeStartElement(COLUMNS_TAG); for( DataColumn mc : dataTable.getColumns()){ write(xout, mc); } xout.writeEndElement(); xout.writeStartElement(ROWS_TAG); for( int ri=0; ri<dataTable.getRowsCount(); ri++ ){ DataRow mrow = dataTable.getRow(ri); if( mrow==null )throw new IllegalStateException("dataTable.getRow("+ri+")==null"); if( mrow.isInserted() )continue; if( mrow.isDeleted() )continue; write(xout, mrow); } xout.writeEndElement(); for( DataRow dr : dataTable.getRowsIterableAll() ){ if( dr.isInserted() ){ xout.writeStartElement("inserted"); write(xout, dr); xout.writeEndElement(); }else if( dr.isDeleted() ){ xout.writeStartElement("deleted"); write(xout, dr); xout.writeEndElement(); } } xout.writeEndElement(); } } public synchronized void write( Writer xout, DataTable mtable ){ FormatXMLWriter fxml; try { fxml = new FormatXMLWriter(xout); fxml.setWriteOutline(true); XmlStreamStorage mtStorage = new XmlStreamStorage(); mtStorage.write(fxml, mtable); fxml.flush(); } catch (XMLStreamException ex) { //Logger.getLogger(MemTableTest.class.getName()).log(Level.SEVERE, null, ex); throw new IOError(ex); } } public synchronized void write( OutputStream xout, Charset cs, DataTable mtable ){ if( cs==null )cs = Charset.forName("utf-8"); FormatXMLWriter fxml; try { fxml = new FormatXMLWriter(xout,cs.name()); fxml.setWriteOutline(true); XmlStreamStorage mtStorage = new XmlStreamStorage(); mtStorage.write(fxml, mtable); fxml.flush(); } catch (XMLStreamException ex) { //Logger.getLogger(MemTableTest.class.getName()).log(Level.SEVERE, null, ex); throw new IOError(ex); } } public synchronized void write( OutputStream xout, DataTable mtable ){ write(xout, Charset.forName("utf-8"), mtable); } public synchronized void write( File xout, Charset cs, DataTable mtable ){ if( cs==null )cs = Charset.forName("utf-8"); FormatXMLWriter fxml; try { fxml = new FormatXMLWriter(xout,cs); fxml.setWriteOutline(true); XmlStreamStorage mtStorage = new XmlStreamStorage(); mtStorage.write(fxml, mtable); fxml.flush(); } catch (XMLStreamException ex) { //Logger.getLogger(MemTableTest.class.getName()).log(Level.SEVERE, null, ex); throw new IOError(ex); } } public synchronized void write( File xout, DataTable mtable ){ FormatXMLWriter fxml; try { fxml = new FormatXMLWriter(xout, Charset.forName("utf-8")); fxml.setWriteOutline(true); XmlStreamStorage mtStorage = new XmlStreamStorage(); mtStorage.write(fxml, mtable); fxml.flush(); } catch (XMLStreamException ex) { //Logger.getLogger(MemTableTest.class.getName()).log(Level.SEVERE, null, ex); throw new IOError(ex); } } //</editor-fold> public class XmlTableVisitor extends XVisitorAdapter { public final TableBuilder tbuilder; public XmlTableVisitor(TableBuilder tbuilder){ this.tbuilder = tbuilder; } @PathMatch(enter = TABLE_TAG) public void begin( XEventPath path ){ tbuilder.begin(); } @PathMatch(exit = TABLE_TAG) public void end( XEventPath path ){ tbuilder.end(); } public ClassLoader classLoader(){ ClassLoader cl = tbuilder.getClassLoader(); if( cl==null )cl = Thread.currentThread().getContextClassLoader(); if( cl==null )cl = XmlStreamStorage.class.getClassLoader(); return cl; } //<editor-fold defaultstate="collapsed" desc="columns"> @PathMatch(enter = COLUMNS_TAG) public void beginColumn(XEventPath path){ tbuilder.beginColumns(); } @PathMatch(enter = COLUMN_TAG) public void column(XEventPath path){ String name = path.readAttributeAsString(COLUMN_NAME_ATTR, "?noAttrib_name"); String type = path.readAttributeAsString("dataType", "?noAttrib_dataType"); ClassLoader cl = classLoader(); Class dataType = null; try { dataType = Class.forName(type, true, cl); } catch (ClassNotFoundException ex) { Logger.getLogger(XmlStreamStorage.class.getName()).log(Level.SEVERE, null, ex); dataType = String.class; } DataColumn mc = new DataColumn(name, dataType); switch( path.readAttributeAsString("allowNull", "?") ){ case "true": mc = mc.allowNull(true); break; case "false": mc = mc.allowNull(false); break; } switch( path.readAttributeAsString("allowSubTypes", "?") ){ case "true": mc = mc.allowSubTypes(true); break; case "false": mc = mc.allowSubTypes(false); break; } tbuilder.addColumn(mc); } @PathMatch(exit = COLUMNS_TAG) public void endColumn(XEventPath path){ tbuilder.endColumns(); } //</editor-fold> //<editor-fold defaultstate="collapsed" desc="rows"> public final ArrayList origin = new ArrayList(); public final ArrayList current = new ArrayList(); public boolean rowChanged = false; public boolean rowDeleted = false; public boolean rowInserted = false; public boolean rowUpdated = false; public int rowChangeCounter = 0; @PathMatch(enter = ROW_TAG) public void beginRow( XEventPath path ){ origin.clear(); current.clear(); rowChanged = false; rowDeleted = false; rowInserted = false; rowUpdated = false; rowChangeCounter = 0; rowChanged = path.readAttributeAsBoolean("changed", false); rowDeleted = path.readAttributeAsBoolean("deleted", false); rowInserted = path.readAttributeAsBoolean("inserted", false); rowUpdated = path.readAttributeAsBoolean("updated", false); rowChangeCounter = path.readAttributeAsInteger("changeCounter", 0); } @PathMatch(enter = "row/origin/value") public void rowOriginNullValue( XEventPath path ){ if( path.readAttributeAsBoolean("isnull", false) ){ origin.add(null); } } @PathMatch(content = "row/origin/value") public void rowOriginValue( XEventPath path, String content ){ if( path.readAttributeAsBoolean("isnull", false) ){ return; } String type = path.readAttributeAsString("type", "?noAttrib_type"); try { Class cls = Class.forName(type, true, classLoader()); Object v = asValueOf(content, cls); origin.add(v); } catch (ClassNotFoundException ex) { Logger.getLogger(XmlStreamStorage.class.getName()).log(Level.SEVERE, null, ex); origin.add(content); } } @PathMatch(enter = "row/current/value") public void rowCurrentNullValue( XEventPath path ){ if( path.readAttributeAsBoolean("isnull", false) ){ current.add(null); } } @PathMatch(content = "row/current/value") public void rowCurrentValue( XEventPath path, String content ){ if( path.readAttributeAsBoolean("isnull", false) ){ return; } String type = path.readAttributeAsString("type", "?noAttrib_type"); try { Class cls = Class.forName(type, true, classLoader()); Object v = asValueOf(content, cls); current.add(v); } catch (ClassNotFoundException ex) { Logger.getLogger(XmlStreamStorage.class.getName()).log(Level.SEVERE, null, ex); current.add(content); } } @PathMatch(exit = ROW_TAG) public void endRow( XEventPath path ){ DataTable mt = tbuilder.getDataTable(); if( mt==null ){ Logger.getLogger(XmlStreamStorage.class.getName()).log(Level.SEVERE, "table builder return null table"); return; } DataRow mrow = new DataRow(mt, current.toArray(), origin.toArray(), rowChangeCounter); if( rowInserted ){ tbuilder.insertedRow(mrow); }else if( rowDeleted ){ tbuilder.deletedRow(mrow); }else if( rowChanged ){ tbuilder.changedRow(mrow); }else{ tbuilder.unchangedRow(mrow); } } //</editor-fold> } public XVisitor createXVisitor( final TableBuilder tbuilder ){ if( tbuilder==null )throw new IllegalArgumentException("tbuilder == null"); return new XmlTableVisitor(tbuilder); } public synchronized void read( TableBuilder tbuilder, URL url ) { if( tbuilder==null )throw new IllegalArgumentException("tbuilder==null"); if( url==null )throw new IllegalArgumentException("url==null"); try { new XmlReader(url, createXVisitor(tbuilder)); } catch (IOException | XMLStreamException ex) { throw new IOError(ex); } } public synchronized void read( TableBuilder tbuilder, Reader xml ) { if( tbuilder==null )throw new IllegalArgumentException("tbuilder==null"); if( xml==null )throw new IllegalArgumentException("xml == null"); try { new XmlReader(xml, createXVisitor(tbuilder)); } catch (XMLStreamException ex) { throw new IOError(ex); } } public synchronized void read( TableBuilder tbuilder, InputStream xml, Charset cs ) { if( tbuilder==null )throw new IllegalArgumentException("tbuilder==null"); if( xml==null )throw new IllegalArgumentException("xml == null"); try { if( cs!=null ){ new XmlReader(xml, cs, createXVisitor(tbuilder)); }else{ new XmlReader(xml, createXVisitor(tbuilder)); } } catch (XMLStreamException ex) { throw new IOError(ex); } } public synchronized void read( TableBuilder tbuilder, InputStream xml ) { if( tbuilder==null )throw new IllegalArgumentException("tbuilder==null"); if( xml==null )throw new IllegalArgumentException("xml == null"); try { new XmlReader(xml, createXVisitor(tbuilder)); } catch (XMLStreamException ex) { throw new IOError(ex); } } public synchronized void read( TableBuilder tbuilder, File xml, Charset cs ) { if( tbuilder==null )throw new IllegalArgumentException("tbuilder==null"); if( xml==null )throw new IllegalArgumentException("xml == null"); try { if( cs!=null ){ new XmlReader(xml, cs, createXVisitor(tbuilder)); }else{ new XmlReader(xml, createXVisitor(tbuilder)); } } catch (IOException | XMLStreamException ex) { throw new IOError(ex); } } public synchronized void read( TableBuilder tbuilder, File xml ) { if( tbuilder==null )throw new IllegalArgumentException("tbuilder==null"); if( xml==null )throw new IllegalArgumentException("xml == null"); try { new XmlReader(xml, createXVisitor(tbuilder)); } catch (IOException | XMLStreamException ex) { throw new IOError(ex); } } }
dazu1990/CEDH
src/containers/index.js
<gh_stars>1-10 import About from './about'; import CommanderList from './commander-list'; import UploadList from './upload-rec'; import Welcome from './welcome'; export { About, CommanderList, UploadList, Welcome };
cpesar/my-portfolio
portfolio/node_modules/@sanity/base/lib/datastores/grants/debug/exampleRoles.js
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.getDebugRolesByNames = getDebugRolesByNames; exports.exampleRoles = void 0; var exampleRoles = { // basic administrator: { name: 'administrator', title: 'Administrator' }, viewer: { name: 'Viewer', title: 'Viewer' }, editor: { name: 'editor', title: 'Editor' }, developer: { name: 'developer', title: 'Developer' }, contributor: { name: 'contributor', title: 'Contributor' }, // custom restricted: { name: 'restricted', title: 'Restricted' }, // legacy read: { name: 'read', title: 'Read' }, write: { name: 'write', title: 'Write' } }; exports.exampleRoles = exampleRoles; function getDebugRolesByNames(roleNames) { return roleNames.filter(roleName => roleName in exampleRoles).map(roleName => exampleRoles[roleName]); }
priyamshah112/Project-Descripton-Blog
tech_project/lib/python2.7/site-packages/cms/tests/frontend/unit/cms.pagetree.stickyheader.test.js
<reponame>priyamshah112/Project-Descripton-Blog /* global document, window */ 'use strict'; var CMS = require('../../../static/cms/js/modules/cms.base').default; var PageTreeStickyHeader = require('../../../static/cms/js/modules/cms.pagetree.stickyheader').default; var $ = require('jquery'); window.CMS = window.CMS || CMS; CMS.PageTreeStickyHeader = PageTreeStickyHeader; CMS.$ = $; describe('CMS.PageTreeStickyHeader', function() { fixture.setBase('cms/tests/frontend/unit/fixtures'); it('creates a PageTreeStickyHeader class', function() { expect(CMS.PageTreeStickyHeader).toBeDefined(); }); var sticky; var header1; var col1; var header2; var col2; beforeEach(function(done) { fixture.load('pagetree.html'); $(function() { var container = $('.cms-pagetree'); header1 = $('<div class="jstree-grid-header"></div>'); col1 = $('<div class="jstree-grid-column" style="width: 100px"></div>'); header2 = $('<div class="jstree-grid-header"></div>'); col2 = $('<div class="jstree-grid-column" style="width: 200px"></div>'); container.append(col1); container.append(col2); col1.append(header1); col2.append(header2); spyOn(CMS.API.Helpers, '_getWindow').and.returnValue({ CMS: CMS }); sticky = new CMS.PageTreeStickyHeader({ container: container }); done(); }); }); afterEach(function() { $(window).off(sticky.resize); $(window).off(sticky.scroll); fixture.cleanup(); }); describe('instance', function() { it('has default state', function() { expect(sticky.areClonesInDOM).toEqual(false); expect(sticky.resize).toEqual('resize.cms.pagetree.header'); expect(sticky.scroll).toEqual('scroll.cms.pagetree.header'); expect(sticky.options).toEqual({ container: $('.cms-pagetree') }); }); it('has ui', function() { expect(sticky.ui).toEqual({ container: $('.cms-pagetree'), window: $(window), headers: $('.cms-pagetree').find('.jstree-grid-header'), columns: [col1, col2], clones: jasmine.any(Array) }); }); }); describe('_saveSizes()', function() { it('saves headers top offset', function() { expect(sticky.headersTopOffset).toEqual(jasmine.any(Number)); spyOn($.fn, 'offset').and.returnValue({ top: 'MOCK' }); sticky._saveSizes(); expect(sticky.headersTopOffset).toEqual('MOCK'); }); it('saves toolbar height if in admin', function() { spyOn(sticky, '_isInSideframe').and.returnValue(false); $('<div id="branding" style="height: 200px"></div>').prependTo(sticky.ui.container); expect(sticky.toolbarHeight).toEqual(null); sticky._saveSizes(); expect(sticky.toolbarHeight).toEqual(200); }); it('saves toolbar height if in sideframe', function() { spyOn(sticky, '_isInSideframe').and.returnValue(true); CMS.API.Helpers._getWindow.and.returnValue({ parent: { CMS: CMS } }); $('<div class="cms-toolbar" style="height: 250px"></div>').prependTo(sticky.ui.container); expect(sticky.toolbarHeight).toEqual(null); sticky._saveSizes(); expect(sticky.toolbarHeight).toEqual(250); }); }); describe('_isInSideframe()', function() { it('returns true if we are in the sideframe', function() { CMS.API.Helpers._getWindow.and.returnValue({ parent: { CMS: CMS } }); expect(sticky._isInSideframe()).toEqual(true); }); it('returns false if not', function() { expect(sticky._isInSideframe()).toEqual(false); }); }); describe('_events()', function() { it('attaches events', function() { sticky.ui.window.off('resize scroll'); spyOn(sticky, '_handleResizeOrScroll'); expect(sticky.ui.window).not.toHandle(sticky.resize); expect(sticky.ui.window).not.toHandle(sticky.scroll); sticky._events(); expect(sticky.ui.window).toHandle(sticky.resize); expect(sticky.ui.window).toHandle(sticky.scroll); sticky.ui.window.trigger(sticky.resize); expect(sticky._handleResizeOrScroll).toHaveBeenCalledTimes(1); sticky.ui.window.trigger(sticky.scroll); expect(sticky._handleResizeOrScroll).toHaveBeenCalledTimes(2); }); }); describe('_handleResizeOrScroll()', function() { beforeEach(function() { spyOn(sticky, '_stickHeader'); spyOn(sticky, '_unstickHeader'); }); it('sticks headers based on scroll position', function() { spyOn(sticky, '_shouldStick').and.returnValue(true); sticky._handleResizeOrScroll(); expect(sticky._stickHeader).toHaveBeenCalledTimes(1); expect(sticky._stickHeader).toHaveBeenCalledWith(jasmine.any(Number), 0); expect(sticky._unstickHeader).not.toHaveBeenCalled(); }); it('unsticks headers based on scroll position', function() { spyOn(sticky, '_shouldStick').and.returnValue(false); sticky._handleResizeOrScroll(); expect(sticky._unstickHeader).toHaveBeenCalledTimes(1); expect(sticky._unstickHeader).toHaveBeenCalledWith(); expect(sticky._stickHeader).not.toHaveBeenCalled(); }); }); describe('_shouldStick()', function() { it('returns true/false if headers should stick or not', function() { sticky.toolbarHeight = 10; sticky.headersTopOffset = 100; expect(sticky._shouldStick(90)).toEqual(true); expect(sticky._shouldStick(91)).toEqual(true); expect(sticky._shouldStick(9000)).toEqual(true); expect(sticky._shouldStick(89)).toEqual(false); expect(sticky._shouldStick(0)).toEqual(false); }); }); describe('_stickHeader()', function() { it('inserts clones', function() { spyOn(sticky, '_insertClones'); expect(sticky._insertClones).not.toHaveBeenCalled(); sticky._stickHeader(); expect(sticky._insertClones).toHaveBeenCalledTimes(1); }); it('updates widths/left/top for the headers', function() { sticky.toolbarHeight = 218; spyOn($.fn, 'css').and.callThrough(); spyOn($.fn, 'offset').and.returnValue({ left: 0 }); sticky._stickHeader(10, -10); expect($.fn.css).toHaveBeenCalledTimes(2 * 2 + 1); expect($.fn.css).toHaveBeenCalledWith('width'); // gets expect($.fn.css).toHaveBeenCalledWith({ width: '100px', left: 10 }); expect($.fn.css).toHaveBeenCalledWith({ width: '200px', left: 10 }); expect($.fn.css).toHaveBeenCalledWith({ width: '200px', left: 10 }); expect($.fn.css).toHaveBeenCalledWith({ top: 218 }); }); it('adds a class to the headers', function() { expect(header1).not.toHaveClass('jstree-grid-header-fixed'); expect(header2).not.toHaveClass('jstree-grid-header-fixed'); sticky._stickHeader(10, -10); expect(header1).toHaveClass('jstree-grid-header-fixed'); expect(header2).toHaveClass('jstree-grid-header-fixed'); }); }); describe('_unstickHeader()', function() { it('detaches clones', function() { spyOn(sticky, '_detachClones'); sticky._unstickHeader(); expect(sticky._detachClones).toHaveBeenCalledTimes(1); }); it('resets top, left and width', function() { spyOn($.fn, 'css'); sticky._unstickHeader(); expect($.fn.css).toHaveBeenCalledTimes(1); expect($.fn.css).toHaveBeenCalledWith({ top: 0, width: 'auto', left: 'auto' }); }); it('removes a class', function() { sticky._stickHeader(0, 0); expect(header1).toHaveClass('jstree-grid-header-fixed'); expect(header2).toHaveClass('jstree-grid-header-fixed'); sticky._unstickHeader(); expect(header1).not.toHaveClass('jstree-grid-header-fixed'); expect(header2).not.toHaveClass('jstree-grid-header-fixed'); }); }); describe('_insertClones()', function() { it('inserts clones in DOM', function() { expect(col1.children().length).toEqual(1); expect(col2.children().length).toEqual(1); sticky._insertClones(); expect(col1.children().length).toEqual(2); expect(col2.children().length).toEqual(2); }); it('sets the flag that nodes are inserted', function() { expect(sticky.areClonesInDOM).toEqual(false); sticky._insertClones(); expect(sticky.areClonesInDOM).toEqual(true); }); it('noop if flag is already set', function() { sticky._insertClones(); sticky._insertClones(); expect(col1.children().length).toEqual(2); expect(col2.children().length).toEqual(2); }); }); describe('_detachClones()', function() { beforeEach(function() { sticky._insertClones(); }); it('removes clones from DOM', function() { expect(col1.children().length).toEqual(2); expect(col2.children().length).toEqual(2); sticky._detachClones(); expect(col1.children().length).toEqual(1); expect(col2.children().length).toEqual(1); }); it('sets the flag that nodes are not inserted', function() { expect(sticky.areClonesInDOM).toEqual(true); sticky._detachClones(); expect(sticky.areClonesInDOM).toEqual(false); }); it('noop if flag is already set', function() { sticky._detachClones(); sticky._detachClones(); expect(col1.children().length).toEqual(1); expect(col2.children().length).toEqual(1); }); }); });
maximus-uk/SPFx_MultiList
node_modules/@microsoft/spfx-heft-plugins/node_modules/@microsoft/rush-lib/lib/cli/actions/ChangeAction.js
"use strict"; // Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license. // See LICENSE in the project root for license information. var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.ChangeAction = void 0; const os = __importStar(require("os")); const path = __importStar(require("path")); const child_process = __importStar(require("child_process")); const colors_1 = __importDefault(require("colors")); const node_core_library_1 = require("@rushstack/node-core-library"); const ChangeManagement_1 = require("../../api/ChangeManagement"); const ChangeFile_1 = require("../../api/ChangeFile"); const BaseRushAction_1 = require("./BaseRushAction"); const ChangeFiles_1 = require("../../logic/ChangeFiles"); const VersionPolicy_1 = require("../../api/VersionPolicy"); const Git_1 = require("../../logic/Git"); const inquirer = node_core_library_1.Import.lazy('inquirer', require); class ChangeAction extends BaseRushAction_1.BaseRushAction { constructor(parser) { const documentation = [ 'Asks a series of questions and then generates a <branchname>-<timestamp>.json file ' + 'in the common folder. The `publish` command will consume these files and perform the proper ' + 'version bumps. Note these changes will eventually be published in a changelog.md file in each package.', '', 'The possible types of changes are: ', '', 'MAJOR - these are breaking changes that are not backwards compatible. ' + 'Examples are: renaming a public class, adding/removing a non-optional ' + 'parameter from a public API, or renaming an variable or function that ' + 'is exported.', '', 'MINOR - these are changes that are backwards compatible (but not ' + 'forwards compatible). Examples are: adding a new public API or adding an ' + 'optional parameter to a public API', '', 'PATCH - these are changes that are backwards and forwards compatible. ' + 'Examples are: Modifying a private API or fixing a bug in the logic ' + 'of how an existing API works.', '', 'HOTFIX (EXPERIMENTAL) - these are changes that are hotfixes targeting a ' + 'specific older version of the package. When a hotfix change is added, ' + 'other changes will not be able to increment the version number. ' + "Enable this feature by setting 'hotfixChangeEnabled' in your rush.json.", '' ]; super({ actionName: 'change', summary: 'Records changes made to projects, indicating how the package version number should be bumped ' + 'for the next publish.', documentation: documentation.join(os.EOL), safeForSimultaneousRushProcesses: true, parser }); this._git = new Git_1.Git(this.rushConfiguration); } onDefineParameters() { const BULK_LONG_NAME = '--bulk'; const BULK_MESSAGE_LONG_NAME = '--message'; const BULK_BUMP_TYPE_LONG_NAME = '--bump-type'; this._verifyParameter = this.defineFlagParameter({ parameterLongName: '--verify', parameterShortName: '-v', description: 'Verify the change file has been generated and that it is a valid JSON file' }); this._noFetchParameter = this.defineFlagParameter({ parameterLongName: '--no-fetch', description: 'Skips fetching the baseline branch before running "git diff" to detect changes.' }); this._targetBranchParameter = this.defineStringParameter({ parameterLongName: '--target-branch', parameterShortName: '-b', argumentName: 'BRANCH', description: 'If this parameter is specified, compare the checked out branch with the specified branch to ' + 'determine which projects were changed. If this parameter is not specified, the checked out branch ' + 'is compared against the "master" branch.' }); this._overwriteFlagParameter = this.defineFlagParameter({ parameterLongName: '--overwrite', description: `If a changefile already exists, overwrite without prompting ` + `(or erroring in ${BULK_LONG_NAME} mode).` }); this._changeEmailParameter = this.defineStringParameter({ parameterLongName: '--email', argumentName: 'EMAIL', description: 'The email address to use in changefiles. If this parameter is not provided, the email address ' + 'will be detected or prompted for in interactive mode.' }); this._bulkChangeParameter = this.defineFlagParameter({ parameterLongName: BULK_LONG_NAME, description: 'If this flag is specified, apply the same change message and bump type to all changed projects. ' + `The ${BULK_MESSAGE_LONG_NAME} and the ${BULK_BUMP_TYPE_LONG_NAME} parameters must be specified if the ` + `${BULK_LONG_NAME} parameter is specified` }); this._bulkChangeMessageParameter = this.defineStringParameter({ parameterLongName: BULK_MESSAGE_LONG_NAME, argumentName: 'MESSAGE', description: `The message to apply to all changed projects if the ${BULK_LONG_NAME} flag is provided.` }); this._bulkChangeBumpTypeParameter = this.defineChoiceParameter({ parameterLongName: BULK_BUMP_TYPE_LONG_NAME, alternatives: [...Object.keys(this._getBumpOptions()), ChangeManagement_1.ChangeType[ChangeManagement_1.ChangeType.none]], description: `The bump type to apply to all changed projects if the ${BULK_LONG_NAME} flag is provided.` }); } async runAsync() { console.log(`The target branch is ${this._targetBranch}`); if (this._verifyParameter.value) { const errors = [ this._bulkChangeParameter, this._bulkChangeMessageParameter, this._bulkChangeBumpTypeParameter, this._overwriteFlagParameter ] .map((parameter) => { return parameter.value ? `The {${this._bulkChangeParameter.longName} parameter cannot be provided with the ` + `${this._verifyParameter.longName} parameter` : ''; }) .filter((error) => error !== ''); if (errors.length > 0) { errors.forEach((error) => console.error(error)); throw new node_core_library_1.AlreadyReportedError(); } this._verify(); return; } const sortedProjectList = this._getChangedPackageNames().sort(); if (sortedProjectList.length === 0) { this._logNoChangeFileRequired(); this._warnUncommittedChanges(); return; } this._warnUncommittedChanges(); const promptModule = inquirer.createPromptModule(); let changeFileData = new Map(); let interactiveMode = false; if (this._bulkChangeParameter.value) { if (!this._bulkChangeBumpTypeParameter.value || (!this._bulkChangeMessageParameter.value && this._bulkChangeBumpTypeParameter.value !== ChangeManagement_1.ChangeType[ChangeManagement_1.ChangeType.none])) { throw new Error(`The ${this._bulkChangeBumpTypeParameter.longName} and ${this._bulkChangeMessageParameter.longName} ` + `parameters must provided if the ${this._bulkChangeParameter.longName} flag is provided. If the value ` + `"${ChangeManagement_1.ChangeType[ChangeManagement_1.ChangeType.none]}" is provided to the ${this._bulkChangeBumpTypeParameter.longName} ` + `parameter, the ${this._bulkChangeMessageParameter.longName} parameter may be omitted.`); } const email = this._changeEmailParameter.value || this._detectEmail(); if (!email) { throw new Error("Unable to detect Git email and an email address wasn't provided using the " + `${this._changeEmailParameter.longName} paramter.`); } const errors = []; const comment = this._bulkChangeMessageParameter.value || ''; const changeType = this._bulkChangeBumpTypeParameter.value; for (const packageName of sortedProjectList) { const allowedBumpTypes = Object.keys(this._getBumpOptions(packageName)); let projectChangeType = changeType; if (allowedBumpTypes.length === 0) { projectChangeType = ChangeManagement_1.ChangeType[ChangeManagement_1.ChangeType.none]; } else if (projectChangeType !== ChangeManagement_1.ChangeType[ChangeManagement_1.ChangeType.none] && allowedBumpTypes.indexOf(projectChangeType) === -1) { errors.push(`The "${projectChangeType}" change type is not allowed for package "${packageName}".`); } changeFileData.set(packageName, { changes: [ { comment, type: projectChangeType, packageName } ], packageName, email }); } if (errors.length > 0) { for (const error of errors) { console.error(error); } throw new node_core_library_1.AlreadyReportedError(); } } else if (this._bulkChangeBumpTypeParameter.value || this._bulkChangeMessageParameter.value) { throw new Error(`The ${this._bulkChangeParameter.longName} flag must be provided with the ` + `${this._bulkChangeBumpTypeParameter.longName} and ${this._bulkChangeMessageParameter.longName} parameters.`); } else { interactiveMode = true; const existingChangeComments = ChangeFiles_1.ChangeFiles.getChangeComments(this._getChangeFiles()); changeFileData = await this._promptForChangeFileData(promptModule, sortedProjectList, existingChangeComments); const email = this._changeEmailParameter.value ? this._changeEmailParameter.value : await this._detectOrAskForEmail(promptModule); changeFileData.forEach((changeFile) => { changeFile.email = email; }); } try { return await this._writeChangeFiles(promptModule, changeFileData, this._overwriteFlagParameter.value, interactiveMode); } catch (error) { throw new Error(`There was an error creating a change file: ${error.toString()}`); } } _generateHostMap() { const hostMap = new Map(); this.rushConfiguration.projects.forEach((project) => { let hostProjectName = project.packageName; if (project.versionPolicy && project.versionPolicy.isLockstepped) { const lockstepPolicy = project.versionPolicy; hostProjectName = lockstepPolicy.mainProject || project.packageName; } hostMap.set(project.packageName, hostProjectName); }); return hostMap; } _verify() { const changedPackages = this._getChangedPackageNames(); if (changedPackages.length > 0) { this._validateChangeFile(changedPackages); } else { this._logNoChangeFileRequired(); } } get _targetBranch() { if (!this._targetBranchName) { this._targetBranchName = this._targetBranchParameter.value || this._git.getRemoteDefaultBranch(); } return this._targetBranchName; } _getChangedPackageNames() { const changedFolders = this._git.getChangedFolders(this._targetBranch, !this._noFetchParameter.value); if (!changedFolders) { return []; } const changedPackageNames = new Set(); const git = new Git_1.Git(this.rushConfiguration); const repoRootFolder = git.getRepositoryRootPath(); const projectHostMap = this._generateHostMap(); this.rushConfiguration.projects .filter((project) => project.shouldPublish) .filter((project) => !project.versionPolicy || !project.versionPolicy.exemptFromRushChange) .filter((project) => { const projectFolder = repoRootFolder ? path.relative(repoRootFolder, project.projectFolder) : project.projectRelativeFolder; return this._hasProjectChanged(changedFolders, projectFolder); }) .forEach((project) => { const hostName = projectHostMap.get(project.packageName); if (hostName) { changedPackageNames.add(hostName); } }); return [...changedPackageNames]; } _validateChangeFile(changedPackages) { const files = this._getChangeFiles(); ChangeFiles_1.ChangeFiles.validate(files, changedPackages, this.rushConfiguration); } _getChangeFiles() { return this._git.getChangedFiles(this._targetBranch, true, `common/changes/`).map((relativePath) => { return path.join(this.rushConfiguration.rushJsonFolder, relativePath); }); } _hasProjectChanged(changedFolders, projectFolder) { for (const folder of changedFolders) { if (node_core_library_1.Path.isUnderOrEqual(folder, projectFolder)) { return true; } } return false; } /** * The main loop which prompts the user for information on changed projects. */ async _promptForChangeFileData(promptModule, sortedProjectList, existingChangeComments) { const changedFileData = new Map(); for (const projectName of sortedProjectList) { const changeInfo = await this._askQuestions(promptModule, projectName, existingChangeComments); if (changeInfo) { // Save the info into the change file let changeFile = changedFileData.get(changeInfo.packageName); if (!changeFile) { changeFile = { changes: [], packageName: changeInfo.packageName, email: undefined }; changedFileData.set(changeInfo.packageName, changeFile); } changeFile.changes.push(changeInfo); } } return changedFileData; } /** * Asks all questions which are needed to generate changelist for a project. */ async _askQuestions(promptModule, packageName, existingChangeComments) { console.log(`${os.EOL}${packageName}`); const comments = existingChangeComments.get(packageName); if (comments) { console.log(`Found existing comments:`); comments.forEach((comment) => { console.log(` > ${comment}`); }); const { appendComment } = await promptModule({ name: 'appendComment', type: 'list', default: 'skip', message: 'Append to existing comments or skip?', choices: [ { name: 'Skip', value: 'skip' }, { name: 'Append', value: 'append' } ] }); if (appendComment === 'skip') { return undefined; } else { return await this._promptForComments(promptModule, packageName); } } else { return await this._promptForComments(promptModule, packageName); } } async _promptForComments(promptModule, packageName) { const bumpOptions = this._getBumpOptions(packageName); const { comment } = await promptModule({ name: 'comment', type: 'input', message: `Describe changes, or ENTER if no changes:` }); if (Object.keys(bumpOptions).length === 0 || !comment) { return { packageName: packageName, comment: comment || '', type: ChangeManagement_1.ChangeType[ChangeManagement_1.ChangeType.none] }; } else { const { bumpType } = await promptModule({ choices: Object.keys(bumpOptions).map((option) => { return { value: option, name: bumpOptions[option] }; }), default: 'patch', message: 'Select the type of change:', name: 'bumpType', type: 'list' }); return { packageName: packageName, comment: comment, type: bumpType }; } } _getBumpOptions(packageName) { let bumpOptions = this.rushConfiguration && this.rushConfiguration.hotfixChangeEnabled ? { [ChangeManagement_1.ChangeType[ChangeManagement_1.ChangeType.hotfix]]: 'hotfix - for changes that need to be published in a separate hotfix package' } : { [ChangeManagement_1.ChangeType[ChangeManagement_1.ChangeType.major]]: 'major - for changes that break compatibility, e.g. removing an API', [ChangeManagement_1.ChangeType[ChangeManagement_1.ChangeType.minor]]: 'minor - for backwards compatible changes, e.g. adding a new API', [ChangeManagement_1.ChangeType[ChangeManagement_1.ChangeType.patch]]: 'patch - for changes that do not affect compatibility, e.g. fixing a bug' }; if (packageName) { const project = this.rushConfiguration.getProjectByName(packageName); const versionPolicy = project.versionPolicy; if (versionPolicy) { if (versionPolicy.definitionName === VersionPolicy_1.VersionPolicyDefinitionName.lockStepVersion) { // No need to ask for bump types if project is lockstep versioned. bumpOptions = {}; } else if (versionPolicy.definitionName === VersionPolicy_1.VersionPolicyDefinitionName.individualVersion) { const individualPolicy = versionPolicy; if (individualPolicy.lockedMajor !== undefined) { delete bumpOptions[ChangeManagement_1.ChangeType[ChangeManagement_1.ChangeType.major]]; } } } } return bumpOptions; } /** * Will determine a user's email by first detecting it from their Git config, * or will ask for it if it is not found or the Git config is wrong. */ async _detectOrAskForEmail(promptModule) { return (await this._detectAndConfirmEmail(promptModule)) || (await this._promptForEmail(promptModule)); } _detectEmail() { try { return child_process .execSync('git config user.email') .toString() .replace(/(\r\n|\n|\r)/gm, ''); } catch (err) { console.log('There was an issue detecting your Git email...'); return undefined; } } /** * Detects the user's email address from their Git configuration, prompts the user to approve the * detected email. It returns undefined if it cannot be detected. */ async _detectAndConfirmEmail(promptModule) { const email = this._detectEmail(); if (email) { const { isCorrectEmail } = await promptModule([ { type: 'confirm', name: 'isCorrectEmail', default: 'Y', message: `Is your email address ${email}?` } ]); return isCorrectEmail ? email : undefined; } else { return undefined; } } /** * Asks the user for their email address */ async _promptForEmail(promptModule) { const { email } = await promptModule([ { type: 'input', name: 'email', message: 'What is your email address?', validate: (input) => { return true; // @todo should be an email } } ]); return email; } _warnUncommittedChanges() { try { if (this._git.hasUncommittedChanges()) { console.log(os.EOL + colors_1.default.yellow('Warning: You have uncommitted changes, which do not trigger prompting for change ' + 'descriptions.')); } } catch (error) { console.log(`An error occurred when detecting uncommitted changes: ${error}`); } } /** * Writes change files to the common/changes folder. Will prompt for overwrite if file already exists. */ async _writeChangeFiles(promptModule, changeFileData, overwrite, interactiveMode) { await changeFileData.forEach(async (changeFile) => { await this._writeChangeFile(promptModule, changeFile, overwrite, interactiveMode); }); } async _writeChangeFile(promptModule, changeFileData, overwrite, interactiveMode) { const output = JSON.stringify(changeFileData, undefined, 2); const changeFile = new ChangeFile_1.ChangeFile(changeFileData, this.rushConfiguration); const filePath = changeFile.generatePath(); const fileExists = node_core_library_1.FileSystem.exists(filePath); const shouldWrite = !fileExists || overwrite || (interactiveMode ? await this._promptForOverwrite(promptModule, filePath) : false); if (!interactiveMode && fileExists && !overwrite) { throw new Error(`Changefile ${filePath} already exists`); } if (shouldWrite) { this._writeFile(filePath, output, shouldWrite && fileExists); } } async _promptForOverwrite(promptModule, filePath) { const overwrite = await promptModule([ { name: 'overwrite', type: 'confirm', message: `Overwrite ${filePath}?` } ]); if (overwrite) { return true; } else { console.log(`Not overwriting ${filePath}`); return false; } } /** * Writes a file to disk, ensuring the directory structure up to that point exists */ _writeFile(fileName, output, isOverwrite) { node_core_library_1.FileSystem.writeFile(fileName, output, { ensureFolderExists: true }); if (isOverwrite) { console.log(`Overwrote file: ${fileName}`); } else { console.log(`Created file: ${fileName}`); } } _logNoChangeFileRequired() { console.log('No changes were detected to relevant packages on this branch. Nothing to do.'); } } exports.ChangeAction = ChangeAction; //# sourceMappingURL=ChangeAction.js.map
Antique/libxenserver
src/xen_vm.c
<reponame>Antique/libxenserver<filename>src/xen_vm.c<gh_stars>0 /* * Copyright (c) <NAME>, Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1) Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2) Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials * provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. */ #include <stddef.h> #include <stdlib.h> #include "xen_internal.h" #include "xen_on_crash_behaviour_internal.h" #include "xen_on_normal_exit_internal.h" #include "xen_string_vm_operations_map_internal.h" #include "xen_vm_operations_internal.h" #include "xen_vm_operations_string_map_internal.h" #include "xen_vm_power_state_internal.h" #include <xen/api/xen_blob.h> #include <xen/api/xen_common.h> #include <xen/api/xen_console.h> #include <xen/api/xen_crashdump.h> #include <xen/api/xen_host.h> #include <xen/api/xen_host_string_set_map.h> #include <xen/api/xen_pci.h> #include <xen/api/xen_sr.h> #include <xen/api/xen_string_blob_map.h> #include <xen/api/xen_string_string_map.h> #include <xen/api/xen_string_vm_operations_map.h> #include <xen/api/xen_vbd.h> #include <xen/api/xen_vdi.h> #include <xen/api/xen_vdi_sr_map.h> #include <xen/api/xen_vgpu.h> #include <xen/api/xen_vif.h> #include <xen/api/xen_vif_network_map.h> #include <xen/api/xen_vm.h> #include <xen/api/xen_vm_appliance.h> #include <xen/api/xen_vm_guest_metrics.h> #include <xen/api/xen_vm_metrics.h> #include <xen/api/xen_vm_operations.h> #include <xen/api/xen_vm_operations_string_map.h> #include <xen/api/xen_vm_xen_vm_record_map.h> #include <xen/api/xen_vmpp.h> #include <xen/api/xen_vtpm.h> XEN_FREE(xen_vm) XEN_SET_ALLOC_FREE(xen_vm) XEN_ALLOC(xen_vm_record) XEN_SET_ALLOC_FREE(xen_vm_record) XEN_ALLOC(xen_vm_record_opt) XEN_RECORD_OPT_FREE(xen_vm) XEN_SET_ALLOC_FREE(xen_vm_record_opt) static const struct_member xen_vm_record_struct_members[] = { { .key = "uuid", .type = &abstract_type_string, .offset = offsetof(xen_vm_record, uuid) }, { .key = "allowed_operations", .type = &xen_vm_operations_set_abstract_type_, .offset = offsetof(xen_vm_record, allowed_operations) }, { .key = "current_operations", .type = &string_vm_operations_map_abstract_type_, .offset = offsetof(xen_vm_record, current_operations) }, { .key = "power_state", .type = &xen_vm_power_state_abstract_type_, .offset = offsetof(xen_vm_record, power_state) }, { .key = "name_label", .type = &abstract_type_string, .offset = offsetof(xen_vm_record, name_label) }, { .key = "name_description", .type = &abstract_type_string, .offset = offsetof(xen_vm_record, name_description) }, { .key = "user_version", .type = &abstract_type_int, .offset = offsetof(xen_vm_record, user_version) }, { .key = "is_a_template", .type = &abstract_type_bool, .offset = offsetof(xen_vm_record, is_a_template) }, { .key = "suspend_VDI", .type = &abstract_type_ref, .offset = offsetof(xen_vm_record, suspend_vdi) }, { .key = "resident_on", .type = &abstract_type_ref, .offset = offsetof(xen_vm_record, resident_on) }, { .key = "affinity", .type = &abstract_type_ref, .offset = offsetof(xen_vm_record, affinity) }, { .key = "memory_overhead", .type = &abstract_type_int, .offset = offsetof(xen_vm_record, memory_overhead) }, { .key = "memory_target", .type = &abstract_type_int, .offset = offsetof(xen_vm_record, memory_target) }, { .key = "memory_static_max", .type = &abstract_type_int, .offset = offsetof(xen_vm_record, memory_static_max) }, { .key = "memory_dynamic_max", .type = &abstract_type_int, .offset = offsetof(xen_vm_record, memory_dynamic_max) }, { .key = "memory_dynamic_min", .type = &abstract_type_int, .offset = offsetof(xen_vm_record, memory_dynamic_min) }, { .key = "memory_static_min", .type = &abstract_type_int, .offset = offsetof(xen_vm_record, memory_static_min) }, { .key = "VCPUs_params", .type = &abstract_type_string_string_map, .offset = offsetof(xen_vm_record, vcpus_params) }, { .key = "VCPUs_max", .type = &abstract_type_int, .offset = offsetof(xen_vm_record, vcpus_max) }, { .key = "VCPUs_at_startup", .type = &abstract_type_int, .offset = offsetof(xen_vm_record, vcpus_at_startup) }, { .key = "actions_after_shutdown", .type = &xen_on_normal_exit_abstract_type_, .offset = offsetof(xen_vm_record, actions_after_shutdown) }, { .key = "actions_after_reboot", .type = &xen_on_normal_exit_abstract_type_, .offset = offsetof(xen_vm_record, actions_after_reboot) }, { .key = "actions_after_crash", .type = &xen_on_crash_behaviour_abstract_type_, .offset = offsetof(xen_vm_record, actions_after_crash) }, { .key = "consoles", .type = &abstract_type_ref_set, .offset = offsetof(xen_vm_record, consoles) }, { .key = "VIFs", .type = &abstract_type_ref_set, .offset = offsetof(xen_vm_record, vifs) }, { .key = "VBDs", .type = &abstract_type_ref_set, .offset = offsetof(xen_vm_record, vbds) }, { .key = "crash_dumps", .type = &abstract_type_ref_set, .offset = offsetof(xen_vm_record, crash_dumps) }, { .key = "VTPMs", .type = &abstract_type_ref_set, .offset = offsetof(xen_vm_record, vtpms) }, { .key = "PV_bootloader", .type = &abstract_type_string, .offset = offsetof(xen_vm_record, pv_bootloader) }, { .key = "PV_kernel", .type = &abstract_type_string, .offset = offsetof(xen_vm_record, pv_kernel) }, { .key = "PV_ramdisk", .type = &abstract_type_string, .offset = offsetof(xen_vm_record, pv_ramdisk) }, { .key = "PV_args", .type = &abstract_type_string, .offset = offsetof(xen_vm_record, pv_args) }, { .key = "PV_bootloader_args", .type = &abstract_type_string, .offset = offsetof(xen_vm_record, pv_bootloader_args) }, { .key = "PV_legacy_args", .type = &abstract_type_string, .offset = offsetof(xen_vm_record, pv_legacy_args) }, { .key = "HVM_boot_policy", .type = &abstract_type_string, .offset = offsetof(xen_vm_record, hvm_boot_policy) }, { .key = "HVM_boot_params", .type = &abstract_type_string_string_map, .offset = offsetof(xen_vm_record, hvm_boot_params) }, { .key = "HVM_shadow_multiplier", .type = &abstract_type_float, .offset = offsetof(xen_vm_record, hvm_shadow_multiplier) }, { .key = "platform", .type = &abstract_type_string_string_map, .offset = offsetof(xen_vm_record, platform) }, { .key = "PCI_bus", .type = &abstract_type_string, .offset = offsetof(xen_vm_record, pci_bus) }, { .key = "other_config", .type = &abstract_type_string_string_map, .offset = offsetof(xen_vm_record, other_config) }, { .key = "domid", .type = &abstract_type_int, .offset = offsetof(xen_vm_record, domid) }, { .key = "domarch", .type = &abstract_type_string, .offset = offsetof(xen_vm_record, domarch) }, { .key = "last_boot_CPU_flags", .type = &abstract_type_string_string_map, .offset = offsetof(xen_vm_record, last_boot_cpu_flags) }, { .key = "is_control_domain", .type = &abstract_type_bool, .offset = offsetof(xen_vm_record, is_control_domain) }, { .key = "metrics", .type = &abstract_type_ref, .offset = offsetof(xen_vm_record, metrics) }, { .key = "guest_metrics", .type = &abstract_type_ref, .offset = offsetof(xen_vm_record, guest_metrics) }, { .key = "last_booted_record", .type = &abstract_type_string, .offset = offsetof(xen_vm_record, last_booted_record) }, { .key = "recommendations", .type = &abstract_type_string, .offset = offsetof(xen_vm_record, recommendations) }, { .key = "xenstore_data", .type = &abstract_type_string_string_map, .offset = offsetof(xen_vm_record, xenstore_data) }, { .key = "ha_always_run", .type = &abstract_type_bool, .offset = offsetof(xen_vm_record, ha_always_run) }, { .key = "ha_restart_priority", .type = &abstract_type_string, .offset = offsetof(xen_vm_record, ha_restart_priority) }, { .key = "is_a_snapshot", .type = &abstract_type_bool, .offset = offsetof(xen_vm_record, is_a_snapshot) }, { .key = "snapshot_of", .type = &abstract_type_ref, .offset = offsetof(xen_vm_record, snapshot_of) }, { .key = "snapshots", .type = &abstract_type_ref_set, .offset = offsetof(xen_vm_record, snapshots) }, { .key = "snapshot_time", .type = &abstract_type_datetime, .offset = offsetof(xen_vm_record, snapshot_time) }, { .key = "transportable_snapshot_id", .type = &abstract_type_string, .offset = offsetof(xen_vm_record, transportable_snapshot_id) }, { .key = "blobs", .type = &abstract_type_string_ref_map, .offset = offsetof(xen_vm_record, blobs) }, { .key = "tags", .type = &abstract_type_string_set, .offset = offsetof(xen_vm_record, tags) }, { .key = "blocked_operations", .type = &vm_operations_string_map_abstract_type_, .offset = offsetof(xen_vm_record, blocked_operations) }, { .key = "snapshot_info", .type = &abstract_type_string_string_map, .offset = offsetof(xen_vm_record, snapshot_info) }, { .key = "snapshot_metadata", .type = &abstract_type_string, .offset = offsetof(xen_vm_record, snapshot_metadata) }, { .key = "parent", .type = &abstract_type_ref, .offset = offsetof(xen_vm_record, parent) }, { .key = "children", .type = &abstract_type_ref_set, .offset = offsetof(xen_vm_record, children) }, { .key = "bios_strings", .type = &abstract_type_string_string_map, .offset = offsetof(xen_vm_record, bios_strings) }, { .key = "protection_policy", .type = &abstract_type_ref, .offset = offsetof(xen_vm_record, protection_policy) }, { .key = "is_snapshot_from_vmpp", .type = &abstract_type_bool, .offset = offsetof(xen_vm_record, is_snapshot_from_vmpp) }, { .key = "appliance", .type = &abstract_type_ref, .offset = offsetof(xen_vm_record, appliance) }, { .key = "start_delay", .type = &abstract_type_int, .offset = offsetof(xen_vm_record, start_delay) }, { .key = "shutdown_delay", .type = &abstract_type_int, .offset = offsetof(xen_vm_record, shutdown_delay) }, { .key = "order", .type = &abstract_type_int, .offset = offsetof(xen_vm_record, order) }, { .key = "VGPUs", .type = &abstract_type_ref_set, .offset = offsetof(xen_vm_record, vgpus) }, { .key = "attached_PCIs", .type = &abstract_type_ref_set, .offset = offsetof(xen_vm_record, attached_pcis) }, { .key = "suspend_SR", .type = &abstract_type_ref, .offset = offsetof(xen_vm_record, suspend_sr) }, { .key = "version", .type = &abstract_type_int, .offset = offsetof(xen_vm_record, version) }, { .key = "generation_id", .type = &abstract_type_string, .offset = offsetof(xen_vm_record, generation_id) } }; const abstract_type xen_vm_record_abstract_type_ = { .typename = STRUCT, .struct_size = sizeof(xen_vm_record), .member_count = sizeof(xen_vm_record_struct_members) / sizeof(struct_member), .members = xen_vm_record_struct_members }; static const struct struct_member xen_vm_xen_vm_record_members[] = { { .type = &abstract_type_string, .offset = offsetof(xen_vm_xen_vm_record_map_contents, key) }, { .type = &xen_vm_record_abstract_type_, .offset = offsetof(xen_vm_xen_vm_record_map_contents, val) } }; const abstract_type abstract_type_string_xen_vm_record_map = { .typename = MAP, .struct_size = sizeof(xen_vm_xen_vm_record_map_contents), .members = xen_vm_xen_vm_record_members }; void xen_vm_record_free(xen_vm_record *record) { if (record == NULL) { return; } free(record->handle); free(record->uuid); xen_vm_operations_set_free(record->allowed_operations); xen_string_vm_operations_map_free(record->current_operations); free(record->name_label); free(record->name_description); xen_vdi_record_opt_free(record->suspend_vdi); xen_host_record_opt_free(record->resident_on); xen_host_record_opt_free(record->affinity); xen_string_string_map_free(record->vcpus_params); xen_console_record_opt_set_free(record->consoles); xen_vif_record_opt_set_free(record->vifs); xen_vbd_record_opt_set_free(record->vbds); xen_crashdump_record_opt_set_free(record->crash_dumps); xen_vtpm_record_opt_set_free(record->vtpms); free(record->pv_bootloader); free(record->pv_kernel); free(record->pv_ramdisk); free(record->pv_args); free(record->pv_bootloader_args); free(record->pv_legacy_args); free(record->hvm_boot_policy); xen_string_string_map_free(record->hvm_boot_params); xen_string_string_map_free(record->platform); free(record->pci_bus); xen_string_string_map_free(record->other_config); free(record->domarch); xen_string_string_map_free(record->last_boot_cpu_flags); xen_vm_metrics_record_opt_free(record->metrics); xen_vm_guest_metrics_record_opt_free(record->guest_metrics); free(record->last_booted_record); free(record->recommendations); xen_string_string_map_free(record->xenstore_data); free(record->ha_restart_priority); xen_vm_record_opt_free(record->snapshot_of); xen_vm_record_opt_set_free(record->snapshots); free(record->transportable_snapshot_id); xen_string_blob_map_free(record->blobs); xen_string_set_free(record->tags); xen_vm_operations_string_map_free(record->blocked_operations); xen_string_string_map_free(record->snapshot_info); free(record->snapshot_metadata); xen_vm_record_opt_free(record->parent); xen_vm_record_opt_set_free(record->children); xen_string_string_map_free(record->bios_strings); xen_vmpp_record_opt_free(record->protection_policy); xen_vm_appliance_record_opt_free(record->appliance); xen_vgpu_record_opt_set_free(record->vgpus); xen_pci_record_opt_set_free(record->attached_pcis); xen_sr_record_opt_free(record->suspend_sr); free(record->generation_id); free(record); } bool xen_vm_get_record(xen_session *session, xen_vm_record **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = xen_vm_record_abstract_type_; *result = NULL; XEN_CALL_("VM.get_record"); if (session->ok) { (*result)->handle = xen_strdup_((*result)->uuid); } return session->ok; } bool xen_vm_get_by_uuid(xen_session *session, xen_vm *result, char *uuid) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = uuid } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.get_by_uuid"); return session->ok; } bool xen_vm_create(xen_session *session, xen_vm *result, xen_vm_record *record) { abstract_value param_values[] = { { .type = &xen_vm_record_abstract_type_, .u.struct_val = record } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.create"); return session->ok; } bool xen_vm_create_async(xen_session *session, xen_task *result, xen_vm_record *record) { abstract_value param_values[] = { { .type = &xen_vm_record_abstract_type_, .u.struct_val = record } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.create"); return session->ok; } bool xen_vm_destroy(xen_session *session, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; xen_call_(session, "VM.destroy", param_values, 1, NULL, NULL); return session->ok; } bool xen_vm_destroy_async(xen_session *session, xen_task *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.destroy"); return session->ok; } bool xen_vm_get_by_name_label(xen_session *session, struct xen_vm_set **result, char *label) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = label } }; abstract_type result_type = abstract_type_string_set; *result = NULL; XEN_CALL_("VM.get_by_name_label"); return session->ok; } bool xen_vm_get_allowed_operations(xen_session *session, struct xen_vm_operations_set **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = xen_vm_operations_set_abstract_type_; *result = NULL; XEN_CALL_("VM.get_allowed_operations"); return session->ok; } bool xen_vm_get_current_operations(xen_session *session, xen_string_vm_operations_map **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = string_vm_operations_map_abstract_type_; *result = NULL; XEN_CALL_("VM.get_current_operations"); return session->ok; } bool xen_vm_get_power_state(xen_session *session, enum xen_vm_power_state *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = xen_vm_power_state_abstract_type_; XEN_CALL_("VM.get_power_state"); return session->ok; } bool xen_vm_get_name_label(xen_session *session, char **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.get_name_label"); return session->ok; } bool xen_vm_get_name_description(xen_session *session, char **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.get_name_description"); return session->ok; } bool xen_vm_get_user_version(xen_session *session, int64_t *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_int; XEN_CALL_("VM.get_user_version"); return session->ok; } bool xen_vm_get_is_a_template(xen_session *session, bool *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_bool; XEN_CALL_("VM.get_is_a_template"); return session->ok; } bool xen_vm_get_suspend_vdi(xen_session *session, xen_vdi *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.get_suspend_VDI"); return session->ok; } bool xen_vm_get_resident_on(xen_session *session, xen_host *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.get_resident_on"); return session->ok; } bool xen_vm_get_affinity(xen_session *session, xen_host *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.get_affinity"); return session->ok; } bool xen_vm_get_memory_overhead(xen_session *session, int64_t *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_int; XEN_CALL_("VM.get_memory_overhead"); return session->ok; } bool xen_vm_get_memory_target(xen_session *session, int64_t *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_int; XEN_CALL_("VM.get_memory_target"); return session->ok; } bool xen_vm_get_memory_static_max(xen_session *session, int64_t *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_int; XEN_CALL_("VM.get_memory_static_max"); return session->ok; } bool xen_vm_get_memory_dynamic_max(xen_session *session, int64_t *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_int; XEN_CALL_("VM.get_memory_dynamic_max"); return session->ok; } bool xen_vm_get_memory_dynamic_min(xen_session *session, int64_t *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_int; XEN_CALL_("VM.get_memory_dynamic_min"); return session->ok; } bool xen_vm_get_memory_static_min(xen_session *session, int64_t *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_int; XEN_CALL_("VM.get_memory_static_min"); return session->ok; } bool xen_vm_get_vcpus_params(xen_session *session, xen_string_string_map **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string_string_map; *result = NULL; XEN_CALL_("VM.get_VCPUs_params"); return session->ok; } bool xen_vm_get_vcpus_max(xen_session *session, int64_t *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_int; XEN_CALL_("VM.get_VCPUs_max"); return session->ok; } bool xen_vm_get_vcpus_at_startup(xen_session *session, int64_t *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_int; XEN_CALL_("VM.get_VCPUs_at_startup"); return session->ok; } bool xen_vm_get_actions_after_shutdown(xen_session *session, enum xen_on_normal_exit *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = xen_on_normal_exit_abstract_type_; XEN_CALL_("VM.get_actions_after_shutdown"); return session->ok; } bool xen_vm_get_actions_after_reboot(xen_session *session, enum xen_on_normal_exit *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = xen_on_normal_exit_abstract_type_; XEN_CALL_("VM.get_actions_after_reboot"); return session->ok; } bool xen_vm_get_actions_after_crash(xen_session *session, enum xen_on_crash_behaviour *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = xen_on_crash_behaviour_abstract_type_; XEN_CALL_("VM.get_actions_after_crash"); return session->ok; } bool xen_vm_get_consoles(xen_session *session, struct xen_console_set **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string_set; *result = NULL; XEN_CALL_("VM.get_consoles"); return session->ok; } bool xen_vm_get_vifs(xen_session *session, struct xen_vif_set **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string_set; *result = NULL; XEN_CALL_("VM.get_VIFs"); return session->ok; } bool xen_vm_get_vbds(xen_session *session, struct xen_vbd_set **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string_set; *result = NULL; XEN_CALL_("VM.get_VBDs"); return session->ok; } bool xen_vm_get_crash_dumps(xen_session *session, struct xen_crashdump_set **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string_set; *result = NULL; XEN_CALL_("VM.get_crash_dumps"); return session->ok; } bool xen_vm_get_vtpms(xen_session *session, struct xen_vtpm_set **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string_set; *result = NULL; XEN_CALL_("VM.get_VTPMs"); return session->ok; } bool xen_vm_get_pv_bootloader(xen_session *session, char **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.get_PV_bootloader"); return session->ok; } bool xen_vm_get_pv_kernel(xen_session *session, char **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.get_PV_kernel"); return session->ok; } bool xen_vm_get_pv_ramdisk(xen_session *session, char **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.get_PV_ramdisk"); return session->ok; } bool xen_vm_get_pv_args(xen_session *session, char **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.get_PV_args"); return session->ok; } bool xen_vm_get_pv_bootloader_args(xen_session *session, char **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.get_PV_bootloader_args"); return session->ok; } bool xen_vm_get_pv_legacy_args(xen_session *session, char **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.get_PV_legacy_args"); return session->ok; } bool xen_vm_get_hvm_boot_policy(xen_session *session, char **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.get_HVM_boot_policy"); return session->ok; } bool xen_vm_get_hvm_boot_params(xen_session *session, xen_string_string_map **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string_string_map; *result = NULL; XEN_CALL_("VM.get_HVM_boot_params"); return session->ok; } bool xen_vm_get_hvm_shadow_multiplier(xen_session *session, double *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_float; XEN_CALL_("VM.get_HVM_shadow_multiplier"); return session->ok; } bool xen_vm_get_platform(xen_session *session, xen_string_string_map **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string_string_map; *result = NULL; XEN_CALL_("VM.get_platform"); return session->ok; } bool xen_vm_get_pci_bus(xen_session *session, char **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.get_PCI_bus"); return session->ok; } bool xen_vm_get_other_config(xen_session *session, xen_string_string_map **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string_string_map; *result = NULL; XEN_CALL_("VM.get_other_config"); return session->ok; } bool xen_vm_get_domid(xen_session *session, int64_t *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_int; XEN_CALL_("VM.get_domid"); return session->ok; } bool xen_vm_get_domarch(xen_session *session, char **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.get_domarch"); return session->ok; } bool xen_vm_get_last_boot_cpu_flags(xen_session *session, xen_string_string_map **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string_string_map; *result = NULL; XEN_CALL_("VM.get_last_boot_CPU_flags"); return session->ok; } bool xen_vm_get_is_control_domain(xen_session *session, bool *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_bool; XEN_CALL_("VM.get_is_control_domain"); return session->ok; } bool xen_vm_get_metrics(xen_session *session, xen_vm_metrics *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.get_metrics"); return session->ok; } bool xen_vm_get_guest_metrics(xen_session *session, xen_vm_guest_metrics *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.get_guest_metrics"); return session->ok; } bool xen_vm_get_last_booted_record(xen_session *session, char **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.get_last_booted_record"); return session->ok; } bool xen_vm_get_recommendations(xen_session *session, char **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.get_recommendations"); return session->ok; } bool xen_vm_get_xenstore_data(xen_session *session, xen_string_string_map **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string_string_map; *result = NULL; XEN_CALL_("VM.get_xenstore_data"); return session->ok; } bool xen_vm_get_ha_always_run(xen_session *session, bool *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_bool; XEN_CALL_("VM.get_ha_always_run"); return session->ok; } bool xen_vm_get_ha_restart_priority(xen_session *session, char **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.get_ha_restart_priority"); return session->ok; } bool xen_vm_get_is_a_snapshot(xen_session *session, bool *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_bool; XEN_CALL_("VM.get_is_a_snapshot"); return session->ok; } bool xen_vm_get_snapshot_of(xen_session *session, xen_vm *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.get_snapshot_of"); return session->ok; } bool xen_vm_get_snapshots(xen_session *session, struct xen_vm_set **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string_set; *result = NULL; XEN_CALL_("VM.get_snapshots"); return session->ok; } bool xen_vm_get_snapshot_time(xen_session *session, time_t *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_datetime; XEN_CALL_("VM.get_snapshot_time"); return session->ok; } bool xen_vm_get_transportable_snapshot_id(xen_session *session, char **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.get_transportable_snapshot_id"); return session->ok; } bool xen_vm_get_blobs(xen_session *session, xen_string_blob_map **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string_string_map; *result = NULL; XEN_CALL_("VM.get_blobs"); return session->ok; } bool xen_vm_get_tags(xen_session *session, struct xen_string_set **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string_set; *result = NULL; XEN_CALL_("VM.get_tags"); return session->ok; } bool xen_vm_get_blocked_operations(xen_session *session, xen_vm_operations_string_map **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = vm_operations_string_map_abstract_type_; *result = NULL; XEN_CALL_("VM.get_blocked_operations"); return session->ok; } bool xen_vm_get_snapshot_info(xen_session *session, xen_string_string_map **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string_string_map; *result = NULL; XEN_CALL_("VM.get_snapshot_info"); return session->ok; } bool xen_vm_get_snapshot_metadata(xen_session *session, char **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.get_snapshot_metadata"); return session->ok; } bool xen_vm_get_parent(xen_session *session, xen_vm *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.get_parent"); return session->ok; } bool xen_vm_get_children(xen_session *session, struct xen_vm_set **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string_set; *result = NULL; XEN_CALL_("VM.get_children"); return session->ok; } bool xen_vm_get_bios_strings(xen_session *session, xen_string_string_map **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string_string_map; *result = NULL; XEN_CALL_("VM.get_bios_strings"); return session->ok; } bool xen_vm_get_protection_policy(xen_session *session, xen_vmpp *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.get_protection_policy"); return session->ok; } bool xen_vm_get_is_snapshot_from_vmpp(xen_session *session, bool *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_bool; XEN_CALL_("VM.get_is_snapshot_from_vmpp"); return session->ok; } bool xen_vm_get_appliance(xen_session *session, xen_vm_appliance *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.get_appliance"); return session->ok; } bool xen_vm_get_start_delay(xen_session *session, int64_t *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_int; XEN_CALL_("VM.get_start_delay"); return session->ok; } bool xen_vm_get_shutdown_delay(xen_session *session, int64_t *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_int; XEN_CALL_("VM.get_shutdown_delay"); return session->ok; } bool xen_vm_get_order(xen_session *session, int64_t *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_int; XEN_CALL_("VM.get_order"); return session->ok; } bool xen_vm_get_vgpus(xen_session *session, struct xen_vgpu_set **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string_set; *result = NULL; XEN_CALL_("VM.get_VGPUs"); return session->ok; } bool xen_vm_get_attached_pcis(xen_session *session, struct xen_pci_set **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string_set; *result = NULL; XEN_CALL_("VM.get_attached_PCIs"); return session->ok; } bool xen_vm_get_suspend_sr(xen_session *session, xen_sr *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.get_suspend_SR"); return session->ok; } bool xen_vm_get_version(xen_session *session, int64_t *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_int; XEN_CALL_("VM.get_version"); return session->ok; } bool xen_vm_get_generation_id(xen_session *session, char **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.get_generation_id"); return session->ok; } bool xen_vm_set_name_label(xen_session *session, xen_vm vm, char *label) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = label } }; xen_call_(session, "VM.set_name_label", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_name_description(xen_session *session, xen_vm vm, char *description) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = description } }; xen_call_(session, "VM.set_name_description", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_user_version(xen_session *session, xen_vm vm, int64_t user_version) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_int, .u.int_val = user_version } }; xen_call_(session, "VM.set_user_version", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_is_a_template(xen_session *session, xen_vm vm, bool is_a_template) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_bool, .u.bool_val = is_a_template } }; xen_call_(session, "VM.set_is_a_template", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_affinity(xen_session *session, xen_vm vm, xen_host affinity) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = affinity } }; xen_call_(session, "VM.set_affinity", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_vcpus_params(xen_session *session, xen_vm vm, xen_string_string_map *params) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string_string_map, .u.set_val = (arbitrary_set *)params } }; xen_call_(session, "VM.set_VCPUs_params", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_add_to_vcpus_params(xen_session *session, xen_vm vm, char *key, char *value) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = key }, { .type = &abstract_type_string, .u.string_val = value } }; xen_call_(session, "VM.add_to_VCPUs_params", param_values, 3, NULL, NULL); return session->ok; } bool xen_vm_remove_from_vcpus_params(xen_session *session, xen_vm vm, char *key) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = key } }; xen_call_(session, "VM.remove_from_VCPUs_params", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_actions_after_shutdown(xen_session *session, xen_vm vm, enum xen_on_normal_exit after_shutdown) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &xen_on_normal_exit_abstract_type_, .u.enum_val = after_shutdown } }; xen_call_(session, "VM.set_actions_after_shutdown", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_actions_after_reboot(xen_session *session, xen_vm vm, enum xen_on_normal_exit after_reboot) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &xen_on_normal_exit_abstract_type_, .u.enum_val = after_reboot } }; xen_call_(session, "VM.set_actions_after_reboot", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_actions_after_crash(xen_session *session, xen_vm vm, enum xen_on_crash_behaviour after_crash) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &xen_on_crash_behaviour_abstract_type_, .u.enum_val = after_crash } }; xen_call_(session, "VM.set_actions_after_crash", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_pv_bootloader(xen_session *session, xen_vm vm, char *bootloader) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = bootloader } }; xen_call_(session, "VM.set_PV_bootloader", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_pv_kernel(xen_session *session, xen_vm vm, char *kernel) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = kernel } }; xen_call_(session, "VM.set_PV_kernel", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_pv_ramdisk(xen_session *session, xen_vm vm, char *ramdisk) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = ramdisk } }; xen_call_(session, "VM.set_PV_ramdisk", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_pv_args(xen_session *session, xen_vm vm, char *args) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = args } }; xen_call_(session, "VM.set_PV_args", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_pv_bootloader_args(xen_session *session, xen_vm vm, char *bootloader_args) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = bootloader_args } }; xen_call_(session, "VM.set_PV_bootloader_args", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_pv_legacy_args(xen_session *session, xen_vm vm, char *legacy_args) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = legacy_args } }; xen_call_(session, "VM.set_PV_legacy_args", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_hvm_boot_policy(xen_session *session, xen_vm vm, char *boot_policy) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = boot_policy } }; xen_call_(session, "VM.set_HVM_boot_policy", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_hvm_boot_params(xen_session *session, xen_vm vm, xen_string_string_map *boot_params) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string_string_map, .u.set_val = (arbitrary_set *)boot_params } }; xen_call_(session, "VM.set_HVM_boot_params", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_add_to_hvm_boot_params(xen_session *session, xen_vm vm, char *key, char *value) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = key }, { .type = &abstract_type_string, .u.string_val = value } }; xen_call_(session, "VM.add_to_HVM_boot_params", param_values, 3, NULL, NULL); return session->ok; } bool xen_vm_remove_from_hvm_boot_params(xen_session *session, xen_vm vm, char *key) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = key } }; xen_call_(session, "VM.remove_from_HVM_boot_params", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_platform(xen_session *session, xen_vm vm, xen_string_string_map *platform) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string_string_map, .u.set_val = (arbitrary_set *)platform } }; xen_call_(session, "VM.set_platform", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_add_to_platform(xen_session *session, xen_vm vm, char *key, char *value) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = key }, { .type = &abstract_type_string, .u.string_val = value } }; xen_call_(session, "VM.add_to_platform", param_values, 3, NULL, NULL); return session->ok; } bool xen_vm_remove_from_platform(xen_session *session, xen_vm vm, char *key) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = key } }; xen_call_(session, "VM.remove_from_platform", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_pci_bus(xen_session *session, xen_vm vm, char *pci_bus) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = pci_bus } }; xen_call_(session, "VM.set_PCI_bus", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_other_config(xen_session *session, xen_vm vm, xen_string_string_map *other_config) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string_string_map, .u.set_val = (arbitrary_set *)other_config } }; xen_call_(session, "VM.set_other_config", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_add_to_other_config(xen_session *session, xen_vm vm, char *key, char *value) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = key }, { .type = &abstract_type_string, .u.string_val = value } }; xen_call_(session, "VM.add_to_other_config", param_values, 3, NULL, NULL); return session->ok; } bool xen_vm_remove_from_other_config(xen_session *session, xen_vm vm, char *key) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = key } }; xen_call_(session, "VM.remove_from_other_config", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_recommendations(xen_session *session, xen_vm vm, char *recommendations) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = recommendations } }; xen_call_(session, "VM.set_recommendations", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_xenstore_data(xen_session *session, xen_vm vm, xen_string_string_map *xenstore_data) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string_string_map, .u.set_val = (arbitrary_set *)xenstore_data } }; xen_call_(session, "VM.set_xenstore_data", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_add_to_xenstore_data(xen_session *session, xen_vm vm, char *key, char *value) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = key }, { .type = &abstract_type_string, .u.string_val = value } }; xen_call_(session, "VM.add_to_xenstore_data", param_values, 3, NULL, NULL); return session->ok; } bool xen_vm_remove_from_xenstore_data(xen_session *session, xen_vm vm, char *key) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = key } }; xen_call_(session, "VM.remove_from_xenstore_data", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_tags(xen_session *session, xen_vm vm, struct xen_string_set *tags) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string_set, .u.set_val = (arbitrary_set *)tags } }; xen_call_(session, "VM.set_tags", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_add_tags(xen_session *session, xen_vm vm, char *value) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = value } }; xen_call_(session, "VM.add_tags", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_remove_tags(xen_session *session, xen_vm vm, char *value) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = value } }; xen_call_(session, "VM.remove_tags", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_blocked_operations(xen_session *session, xen_vm vm, xen_vm_operations_string_map *blocked_operations) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &vm_operations_string_map_abstract_type_, .u.set_val = (arbitrary_set *)blocked_operations } }; xen_call_(session, "VM.set_blocked_operations", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_add_to_blocked_operations(xen_session *session, xen_vm vm, enum xen_vm_operations key, char *value) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &xen_vm_operations_abstract_type_, .u.enum_val = key }, { .type = &abstract_type_string, .u.string_val = value } }; xen_call_(session, "VM.add_to_blocked_operations", param_values, 3, NULL, NULL); return session->ok; } bool xen_vm_remove_from_blocked_operations(xen_session *session, xen_vm vm, enum xen_vm_operations key) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &xen_vm_operations_abstract_type_, .u.enum_val = key } }; xen_call_(session, "VM.remove_from_blocked_operations", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_suspend_sr(xen_session *session, xen_vm vm, xen_sr suspend_sr) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = suspend_sr } }; xen_call_(session, "VM.set_suspend_SR", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_snapshot(xen_session *session, xen_vm *result, xen_vm vm, char *new_name) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = new_name } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.snapshot"); return session->ok; } bool xen_vm_snapshot_async(xen_session *session, xen_task *result, xen_vm vm, char *new_name) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = new_name } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.snapshot"); return session->ok; } bool xen_vm_snapshot_with_quiesce(xen_session *session, xen_vm *result, xen_vm vm, char *new_name) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = new_name } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.snapshot_with_quiesce"); return session->ok; } bool xen_vm_snapshot_with_quiesce_async(xen_session *session, xen_task *result, xen_vm vm, char *new_name) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = new_name } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.snapshot_with_quiesce"); return session->ok; } bool xen_vm_clone(xen_session *session, xen_vm *result, xen_vm vm, char *new_name) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = new_name } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.clone"); return session->ok; } bool xen_vm_clone_async(xen_session *session, xen_task *result, xen_vm vm, char *new_name) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = new_name } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.clone"); return session->ok; } bool xen_vm_copy(xen_session *session, xen_vm *result, xen_vm vm, char *new_name, xen_sr sr) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = new_name }, { .type = &abstract_type_string, .u.string_val = sr } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.copy"); return session->ok; } bool xen_vm_copy_async(xen_session *session, xen_task *result, xen_vm vm, char *new_name, xen_sr sr) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = new_name }, { .type = &abstract_type_string, .u.string_val = sr } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.copy"); return session->ok; } bool xen_vm_revert(xen_session *session, xen_vm snapshot) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = snapshot } }; xen_call_(session, "VM.revert", param_values, 1, NULL, NULL); return session->ok; } bool xen_vm_revert_async(xen_session *session, xen_task *result, xen_vm snapshot) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = snapshot } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.revert"); return session->ok; } bool xen_vm_checkpoint(xen_session *session, xen_vm *result, xen_vm vm, char *new_name) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = new_name } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.checkpoint"); return session->ok; } bool xen_vm_checkpoint_async(xen_session *session, xen_task *result, xen_vm vm, char *new_name) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = new_name } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.checkpoint"); return session->ok; } bool xen_vm_provision(xen_session *session, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; xen_call_(session, "VM.provision", param_values, 1, NULL, NULL); return session->ok; } bool xen_vm_provision_async(xen_session *session, xen_task *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.provision"); return session->ok; } bool xen_vm_start(xen_session *session, xen_vm vm, bool start_paused, bool force) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_bool, .u.bool_val = start_paused }, { .type = &abstract_type_bool, .u.bool_val = force } }; xen_call_(session, "VM.start", param_values, 3, NULL, NULL); return session->ok; } bool xen_vm_start_async(xen_session *session, xen_task *result, xen_vm vm, bool start_paused, bool force) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_bool, .u.bool_val = start_paused }, { .type = &abstract_type_bool, .u.bool_val = force } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.start"); return session->ok; } bool xen_vm_start_on(xen_session *session, xen_vm vm, xen_host host, bool start_paused, bool force) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = host }, { .type = &abstract_type_bool, .u.bool_val = start_paused }, { .type = &abstract_type_bool, .u.bool_val = force } }; xen_call_(session, "VM.start_on", param_values, 4, NULL, NULL); return session->ok; } bool xen_vm_start_on_async(xen_session *session, xen_task *result, xen_vm vm, xen_host host, bool start_paused, bool force) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = host }, { .type = &abstract_type_bool, .u.bool_val = start_paused }, { .type = &abstract_type_bool, .u.bool_val = force } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.start_on"); return session->ok; } bool xen_vm_pause(xen_session *session, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; xen_call_(session, "VM.pause", param_values, 1, NULL, NULL); return session->ok; } bool xen_vm_pause_async(xen_session *session, xen_task *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.pause"); return session->ok; } bool xen_vm_unpause(xen_session *session, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; xen_call_(session, "VM.unpause", param_values, 1, NULL, NULL); return session->ok; } bool xen_vm_unpause_async(xen_session *session, xen_task *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.unpause"); return session->ok; } bool xen_vm_clean_shutdown(xen_session *session, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; xen_call_(session, "VM.clean_shutdown", param_values, 1, NULL, NULL); return session->ok; } bool xen_vm_clean_shutdown_async(xen_session *session, xen_task *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.clean_shutdown"); return session->ok; } bool xen_vm_shutdown(xen_session *session, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; xen_call_(session, "VM.shutdown", param_values, 1, NULL, NULL); return session->ok; } bool xen_vm_shutdown_async(xen_session *session, xen_task *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.shutdown"); return session->ok; } bool xen_vm_clean_reboot(xen_session *session, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; xen_call_(session, "VM.clean_reboot", param_values, 1, NULL, NULL); return session->ok; } bool xen_vm_clean_reboot_async(xen_session *session, xen_task *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.clean_reboot"); return session->ok; } bool xen_vm_hard_shutdown(xen_session *session, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; xen_call_(session, "VM.hard_shutdown", param_values, 1, NULL, NULL); return session->ok; } bool xen_vm_hard_shutdown_async(xen_session *session, xen_task *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.hard_shutdown"); return session->ok; } bool xen_vm_power_state_reset(xen_session *session, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; xen_call_(session, "VM.power_state_reset", param_values, 1, NULL, NULL); return session->ok; } bool xen_vm_power_state_reset_async(xen_session *session, xen_task *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.power_state_reset"); return session->ok; } bool xen_vm_hard_reboot(xen_session *session, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; xen_call_(session, "VM.hard_reboot", param_values, 1, NULL, NULL); return session->ok; } bool xen_vm_hard_reboot_async(xen_session *session, xen_task *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.hard_reboot"); return session->ok; } bool xen_vm_suspend(xen_session *session, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; xen_call_(session, "VM.suspend", param_values, 1, NULL, NULL); return session->ok; } bool xen_vm_suspend_async(xen_session *session, xen_task *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.suspend"); return session->ok; } bool xen_vm_resume(xen_session *session, xen_vm vm, bool start_paused, bool force) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_bool, .u.bool_val = start_paused }, { .type = &abstract_type_bool, .u.bool_val = force } }; xen_call_(session, "VM.resume", param_values, 3, NULL, NULL); return session->ok; } bool xen_vm_resume_async(xen_session *session, xen_task *result, xen_vm vm, bool start_paused, bool force) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_bool, .u.bool_val = start_paused }, { .type = &abstract_type_bool, .u.bool_val = force } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.resume"); return session->ok; } bool xen_vm_resume_on(xen_session *session, xen_vm vm, xen_host host, bool start_paused, bool force) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = host }, { .type = &abstract_type_bool, .u.bool_val = start_paused }, { .type = &abstract_type_bool, .u.bool_val = force } }; xen_call_(session, "VM.resume_on", param_values, 4, NULL, NULL); return session->ok; } bool xen_vm_resume_on_async(xen_session *session, xen_task *result, xen_vm vm, xen_host host, bool start_paused, bool force) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = host }, { .type = &abstract_type_bool, .u.bool_val = start_paused }, { .type = &abstract_type_bool, .u.bool_val = force } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.resume_on"); return session->ok; } bool xen_vm_pool_migrate(xen_session *session, xen_vm vm, xen_host host, xen_string_string_map *options) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = host }, { .type = &abstract_type_string_string_map, .u.set_val = (arbitrary_set *)options } }; xen_call_(session, "VM.pool_migrate", param_values, 3, NULL, NULL); return session->ok; } bool xen_vm_pool_migrate_async(xen_session *session, xen_task *result, xen_vm vm, xen_host host, xen_string_string_map *options) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = host }, { .type = &abstract_type_string_string_map, .u.set_val = (arbitrary_set *)options } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.pool_migrate"); return session->ok; } bool xen_vm_set_vcpus_number_live(xen_session *session, xen_vm self, int64_t nvcpu) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_int, .u.int_val = nvcpu } }; xen_call_(session, "VM.set_VCPUs_number_live", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_vcpus_number_live_async(xen_session *session, xen_task *result, xen_vm self, int64_t nvcpu) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_int, .u.int_val = nvcpu } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.set_VCPUs_number_live"); return session->ok; } bool xen_vm_add_to_vcpus_params_live(xen_session *session, xen_vm self, char *key, char *value) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_string, .u.string_val = key }, { .type = &abstract_type_string, .u.string_val = value } }; xen_call_(session, "VM.add_to_VCPUs_params_live", param_values, 3, NULL, NULL); return session->ok; } bool xen_vm_add_to_vcpus_params_live_async(xen_session *session, xen_task *result, xen_vm self, char *key, char *value) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_string, .u.string_val = key }, { .type = &abstract_type_string, .u.string_val = value } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.add_to_VCPUs_params_live"); return session->ok; } bool xen_vm_set_ha_restart_priority(xen_session *session, xen_vm self, char *value) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_string, .u.string_val = value } }; xen_call_(session, "VM.set_ha_restart_priority", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_ha_always_run(xen_session *session, xen_vm self, bool value) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_bool, .u.bool_val = value } }; xen_call_(session, "VM.set_ha_always_run", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_compute_memory_overhead(xen_session *session, int64_t *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_int; XEN_CALL_("VM.compute_memory_overhead"); return session->ok; } bool xen_vm_compute_memory_overhead_async(xen_session *session, xen_task *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.compute_memory_overhead"); return session->ok; } bool xen_vm_set_memory_dynamic_max(xen_session *session, xen_vm self, int64_t value) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_int, .u.int_val = value } }; xen_call_(session, "VM.set_memory_dynamic_max", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_memory_dynamic_min(xen_session *session, xen_vm self, int64_t value) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_int, .u.int_val = value } }; xen_call_(session, "VM.set_memory_dynamic_min", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_memory_dynamic_range(xen_session *session, xen_vm self, int64_t min, int64_t max) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_int, .u.int_val = min }, { .type = &abstract_type_int, .u.int_val = max } }; xen_call_(session, "VM.set_memory_dynamic_range", param_values, 3, NULL, NULL); return session->ok; } bool xen_vm_set_memory_dynamic_range_async(xen_session *session, xen_task *result, xen_vm self, int64_t min, int64_t max) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_int, .u.int_val = min }, { .type = &abstract_type_int, .u.int_val = max } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.set_memory_dynamic_range"); return session->ok; } bool xen_vm_set_memory_static_max(xen_session *session, xen_vm self, int64_t value) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_int, .u.int_val = value } }; xen_call_(session, "VM.set_memory_static_max", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_memory_static_min(xen_session *session, xen_vm self, int64_t value) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_int, .u.int_val = value } }; xen_call_(session, "VM.set_memory_static_min", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_memory_static_range(xen_session *session, xen_vm self, int64_t min, int64_t max) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_int, .u.int_val = min }, { .type = &abstract_type_int, .u.int_val = max } }; xen_call_(session, "VM.set_memory_static_range", param_values, 3, NULL, NULL); return session->ok; } bool xen_vm_set_memory_static_range_async(xen_session *session, xen_task *result, xen_vm self, int64_t min, int64_t max) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_int, .u.int_val = min }, { .type = &abstract_type_int, .u.int_val = max } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.set_memory_static_range"); return session->ok; } bool xen_vm_set_memory_limits(xen_session *session, xen_vm self, int64_t static_min, int64_t static_max, int64_t dynamic_min, int64_t dynamic_max) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_int, .u.int_val = static_min }, { .type = &abstract_type_int, .u.int_val = static_max }, { .type = &abstract_type_int, .u.int_val = dynamic_min }, { .type = &abstract_type_int, .u.int_val = dynamic_max } }; xen_call_(session, "VM.set_memory_limits", param_values, 5, NULL, NULL); return session->ok; } bool xen_vm_set_memory_limits_async(xen_session *session, xen_task *result, xen_vm self, int64_t static_min, int64_t static_max, int64_t dynamic_min, int64_t dynamic_max) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_int, .u.int_val = static_min }, { .type = &abstract_type_int, .u.int_val = static_max }, { .type = &abstract_type_int, .u.int_val = dynamic_min }, { .type = &abstract_type_int, .u.int_val = dynamic_max } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.set_memory_limits"); return session->ok; } bool xen_vm_set_memory_target_live(xen_session *session, xen_vm self, int64_t target) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_int, .u.int_val = target } }; xen_call_(session, "VM.set_memory_target_live", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_memory_target_live_async(xen_session *session, xen_task *result, xen_vm self, int64_t target) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_int, .u.int_val = target } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.set_memory_target_live"); return session->ok; } bool xen_vm_wait_memory_target_live(xen_session *session, xen_vm self) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self } }; xen_call_(session, "VM.wait_memory_target_live", param_values, 1, NULL, NULL); return session->ok; } bool xen_vm_wait_memory_target_live_async(xen_session *session, xen_task *result, xen_vm self) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.wait_memory_target_live"); return session->ok; } bool xen_vm_get_cooperative(xen_session *session, bool *result, xen_vm self) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self } }; abstract_type result_type = abstract_type_bool; XEN_CALL_("VM.get_cooperative"); return session->ok; } bool xen_vm_get_cooperative_async(xen_session *session, xen_task *result, xen_vm self) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.get_cooperative"); return session->ok; } bool xen_vm_set_hvm_shadow_multiplier(xen_session *session, xen_vm self, double value) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_float, .u.float_val = value } }; xen_call_(session, "VM.set_HVM_shadow_multiplier", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_shadow_multiplier_live(xen_session *session, xen_vm self, double multiplier) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_float, .u.float_val = multiplier } }; xen_call_(session, "VM.set_shadow_multiplier_live", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_shadow_multiplier_live_async(xen_session *session, xen_task *result, xen_vm self, double multiplier) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_float, .u.float_val = multiplier } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.set_shadow_multiplier_live"); return session->ok; } bool xen_vm_set_vcpus_max(xen_session *session, xen_vm self, int64_t value) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_int, .u.int_val = value } }; xen_call_(session, "VM.set_VCPUs_max", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_vcpus_at_startup(xen_session *session, xen_vm self, int64_t value) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_int, .u.int_val = value } }; xen_call_(session, "VM.set_VCPUs_at_startup", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_send_sysrq(xen_session *session, xen_vm vm, char *key) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = key } }; xen_call_(session, "VM.send_sysrq", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_send_sysrq_async(xen_session *session, xen_task *result, xen_vm vm, char *key) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = key } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.send_sysrq"); return session->ok; } bool xen_vm_send_trigger(xen_session *session, xen_vm vm, char *trigger) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = trigger } }; xen_call_(session, "VM.send_trigger", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_send_trigger_async(xen_session *session, xen_task *result, xen_vm vm, char *trigger) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = trigger } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.send_trigger"); return session->ok; } bool xen_vm_maximise_memory(xen_session *session, int64_t *result, xen_vm self, int64_t total, bool approximate) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_int, .u.int_val = total }, { .type = &abstract_type_bool, .u.bool_val = approximate } }; abstract_type result_type = abstract_type_int; XEN_CALL_("VM.maximise_memory"); return session->ok; } bool xen_vm_maximise_memory_async(xen_session *session, xen_task *result, xen_vm self, int64_t total, bool approximate) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_int, .u.int_val = total }, { .type = &abstract_type_bool, .u.bool_val = approximate } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.maximise_memory"); return session->ok; } bool xen_vm_migrate_send(xen_session *session, xen_vm vm, xen_string_string_map *dest, bool live, xen_vdi_sr_map *vdi_map, xen_vif_network_map *vif_map, xen_string_string_map *options) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string_string_map, .u.set_val = (arbitrary_set *)dest }, { .type = &abstract_type_bool, .u.bool_val = live }, { .type = &abstract_type_string_string_map, .u.set_val = (arbitrary_set *)vdi_map }, { .type = &abstract_type_string_string_map, .u.set_val = (arbitrary_set *)vif_map }, { .type = &abstract_type_string_string_map, .u.set_val = (arbitrary_set *)options } }; xen_call_(session, "VM.migrate_send", param_values, 6, NULL, NULL); return session->ok; } bool xen_vm_migrate_send_async(xen_session *session, xen_task *result, xen_vm vm, xen_string_string_map *dest, bool live, xen_vdi_sr_map *vdi_map, xen_vif_network_map *vif_map, xen_string_string_map *options) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string_string_map, .u.set_val = (arbitrary_set *)dest }, { .type = &abstract_type_bool, .u.bool_val = live }, { .type = &abstract_type_string_string_map, .u.set_val = (arbitrary_set *)vdi_map }, { .type = &abstract_type_string_string_map, .u.set_val = (arbitrary_set *)vif_map }, { .type = &abstract_type_string_string_map, .u.set_val = (arbitrary_set *)options } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.migrate_send"); return session->ok; } bool xen_vm_assert_can_migrate(xen_session *session, xen_vm vm, xen_string_string_map *dest, bool live, xen_vdi_sr_map *vdi_map, xen_vif_network_map *vif_map, xen_string_string_map *options) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string_string_map, .u.set_val = (arbitrary_set *)dest }, { .type = &abstract_type_bool, .u.bool_val = live }, { .type = &abstract_type_string_string_map, .u.set_val = (arbitrary_set *)vdi_map }, { .type = &abstract_type_string_string_map, .u.set_val = (arbitrary_set *)vif_map }, { .type = &abstract_type_string_string_map, .u.set_val = (arbitrary_set *)options } }; xen_call_(session, "VM.assert_can_migrate", param_values, 6, NULL, NULL); return session->ok; } bool xen_vm_assert_can_migrate_async(xen_session *session, xen_task *result, xen_vm vm, xen_string_string_map *dest, bool live, xen_vdi_sr_map *vdi_map, xen_vif_network_map *vif_map, xen_string_string_map *options) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string_string_map, .u.set_val = (arbitrary_set *)dest }, { .type = &abstract_type_bool, .u.bool_val = live }, { .type = &abstract_type_string_string_map, .u.set_val = (arbitrary_set *)vdi_map }, { .type = &abstract_type_string_string_map, .u.set_val = (arbitrary_set *)vif_map }, { .type = &abstract_type_string_string_map, .u.set_val = (arbitrary_set *)options } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.assert_can_migrate"); return session->ok; } bool xen_vm_get_boot_record(xen_session *session, xen_vm_record **result, xen_vm self) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self } }; abstract_type result_type = xen_vm_record_abstract_type_; *result = NULL; XEN_CALL_("VM.get_boot_record"); if (session->ok) { (*result)->handle = xen_strdup_((*result)->uuid); } return session->ok; } bool xen_vm_record_data_source(xen_session *session, xen_vm self, char *data_source) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_string, .u.string_val = data_source } }; xen_call_(session, "VM.record_data_source", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_query_data_source(xen_session *session, double *result, xen_vm self, char *data_source) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_string, .u.string_val = data_source } }; abstract_type result_type = abstract_type_float; XEN_CALL_("VM.query_data_source"); return session->ok; } bool xen_vm_forget_data_source_archives(xen_session *session, xen_vm self, char *data_source) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_string, .u.string_val = data_source } }; xen_call_(session, "VM.forget_data_source_archives", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_assert_operation_valid(xen_session *session, xen_vm self, enum xen_vm_operations op) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &xen_vm_operations_abstract_type_, .u.enum_val = op } }; xen_call_(session, "VM.assert_operation_valid", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_assert_operation_valid_async(xen_session *session, xen_task *result, xen_vm self, enum xen_vm_operations op) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &xen_vm_operations_abstract_type_, .u.enum_val = op } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.assert_operation_valid"); return session->ok; } bool xen_vm_update_allowed_operations(xen_session *session, xen_vm self) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self } }; xen_call_(session, "VM.update_allowed_operations", param_values, 1, NULL, NULL); return session->ok; } bool xen_vm_update_allowed_operations_async(xen_session *session, xen_task *result, xen_vm self) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.update_allowed_operations"); return session->ok; } bool xen_vm_get_allowed_vbd_devices(xen_session *session, struct xen_string_set **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string_set; *result = NULL; XEN_CALL_("VM.get_allowed_VBD_devices"); return session->ok; } bool xen_vm_get_allowed_vif_devices(xen_session *session, struct xen_string_set **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string_set; *result = NULL; XEN_CALL_("VM.get_allowed_VIF_devices"); return session->ok; } bool xen_vm_get_possible_hosts(xen_session *session, struct xen_host_set **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string_set; *result = NULL; XEN_CALL_("VM.get_possible_hosts"); return session->ok; } bool xen_vm_get_possible_hosts_async(xen_session *session, xen_task *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.get_possible_hosts"); return session->ok; } bool xen_vm_assert_can_boot_here(xen_session *session, xen_vm self, xen_host host) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_string, .u.string_val = host } }; xen_call_(session, "VM.assert_can_boot_here", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_assert_can_boot_here_async(xen_session *session, xen_task *result, xen_vm self, xen_host host) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_string, .u.string_val = host } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.assert_can_boot_here"); return session->ok; } bool xen_vm_create_new_blob(xen_session *session, xen_blob *result, xen_vm vm, char *name, char *mime_type, bool pubblic) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = name }, { .type = &abstract_type_string, .u.string_val = mime_type }, { .type = &abstract_type_bool, .u.bool_val = pubblic } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.create_new_blob"); return session->ok; } bool xen_vm_create_new_blob_async(xen_session *session, xen_task *result, xen_vm vm, char *name, char *mime_type, bool pubblic) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = name }, { .type = &abstract_type_string, .u.string_val = mime_type }, { .type = &abstract_type_bool, .u.bool_val = pubblic } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.create_new_blob"); return session->ok; } bool xen_vm_assert_agile(xen_session *session, xen_vm self) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self } }; xen_call_(session, "VM.assert_agile", param_values, 1, NULL, NULL); return session->ok; } bool xen_vm_assert_agile_async(xen_session *session, xen_task *result, xen_vm self) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.assert_agile"); return session->ok; } bool xen_vm_retrieve_wlb_recommendations(xen_session *session, xen_host_string_set_map **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string_string_set_map; *result = NULL; XEN_CALL_("VM.retrieve_wlb_recommendations"); return session->ok; } bool xen_vm_retrieve_wlb_recommendations_async(xen_session *session, xen_task *result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.retrieve_wlb_recommendations"); return session->ok; } bool xen_vm_copy_bios_strings(xen_session *session, xen_vm vm, xen_host host) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = host } }; xen_call_(session, "VM.copy_bios_strings", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_copy_bios_strings_async(xen_session *session, xen_task *result, xen_vm vm, xen_host host) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm }, { .type = &abstract_type_string, .u.string_val = host } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.copy_bios_strings"); return session->ok; } bool xen_vm_set_protection_policy(xen_session *session, xen_vm self, xen_vmpp value) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_string, .u.string_val = value } }; xen_call_(session, "VM.set_protection_policy", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_start_delay(xen_session *session, xen_vm self, int64_t value) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_int, .u.int_val = value } }; xen_call_(session, "VM.set_start_delay", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_start_delay_async(xen_session *session, xen_task *result, xen_vm self, int64_t value) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_int, .u.int_val = value } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.set_start_delay"); return session->ok; } bool xen_vm_set_shutdown_delay(xen_session *session, xen_vm self, int64_t value) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_int, .u.int_val = value } }; xen_call_(session, "VM.set_shutdown_delay", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_shutdown_delay_async(xen_session *session, xen_task *result, xen_vm self, int64_t value) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_int, .u.int_val = value } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.set_shutdown_delay"); return session->ok; } bool xen_vm_set_order(xen_session *session, xen_vm self, int64_t value) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_int, .u.int_val = value } }; xen_call_(session, "VM.set_order", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_order_async(xen_session *session, xen_task *result, xen_vm self, int64_t value) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_int, .u.int_val = value } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.set_order"); return session->ok; } bool xen_vm_set_suspend_vdi(xen_session *session, xen_vm self, xen_vdi value) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_string, .u.string_val = value } }; xen_call_(session, "VM.set_suspend_VDI", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_suspend_vdi_async(xen_session *session, xen_task *result, xen_vm self, xen_vdi value) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_string, .u.string_val = value } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.set_suspend_VDI"); return session->ok; } bool xen_vm_assert_can_be_recovered(xen_session *session, xen_vm self, xen_session *session_to) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_string, .u.string_val = session_to->session_id } }; xen_call_(session, "VM.assert_can_be_recovered", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_assert_can_be_recovered_async(xen_session *session, xen_task *result, xen_vm self, xen_session *session_to) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_string, .u.string_val = session_to->session_id } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.assert_can_be_recovered"); return session->ok; } bool xen_vm_get_srs_required_for_recovery(xen_session *session, struct xen_sr_set **result, xen_vm self, xen_session *session_to) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_string, .u.string_val = session_to->session_id } }; abstract_type result_type = abstract_type_string_set; *result = NULL; XEN_CALL_("VM.get_SRs_required_for_recovery"); return session->ok; } bool xen_vm_get_srs_required_for_recovery_async(xen_session *session, xen_task *result, xen_vm self, xen_session *session_to) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_string, .u.string_val = session_to->session_id } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.get_SRs_required_for_recovery"); return session->ok; } bool xen_vm_recover(xen_session *session, xen_vm self, xen_session *session_to, bool force) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_string, .u.string_val = session_to->session_id }, { .type = &abstract_type_bool, .u.bool_val = force } }; xen_call_(session, "VM.recover", param_values, 3, NULL, NULL); return session->ok; } bool xen_vm_recover_async(xen_session *session, xen_task *result, xen_vm self, xen_session *session_to, bool force) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_string, .u.string_val = session_to->session_id }, { .type = &abstract_type_bool, .u.bool_val = force } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.recover"); return session->ok; } bool xen_vm_import_convert(xen_session *session, char *type, char *username, char *password, xen_sr sr, xen_string_string_map *remote_config) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = type }, { .type = &abstract_type_string, .u.string_val = username }, { .type = &abstract_type_string, .u.string_val = password }, { .type = &abstract_type_string, .u.string_val = sr }, { .type = &abstract_type_string_string_map, .u.set_val = (arbitrary_set *)remote_config } }; xen_call_(session, "VM.import_convert", param_values, 5, NULL, NULL); return session->ok; } bool xen_vm_import_convert_async(xen_session *session, xen_task *result, char *type, char *username, char *password, xen_sr sr, xen_string_string_map *remote_config) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = type }, { .type = &abstract_type_string, .u.string_val = username }, { .type = &abstract_type_string, .u.string_val = password }, { .type = &abstract_type_string, .u.string_val = sr }, { .type = &abstract_type_string_string_map, .u.set_val = (arbitrary_set *)remote_config } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.import_convert"); return session->ok; } bool xen_vm_set_appliance(xen_session *session, xen_vm self, xen_vm_appliance value) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_string, .u.string_val = value } }; xen_call_(session, "VM.set_appliance", param_values, 2, NULL, NULL); return session->ok; } bool xen_vm_set_appliance_async(xen_session *session, xen_task *result, xen_vm self, xen_vm_appliance value) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self }, { .type = &abstract_type_string, .u.string_val = value } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.set_appliance"); return session->ok; } bool xen_vm_query_services(xen_session *session, xen_string_string_map **result, xen_vm self) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self } }; abstract_type result_type = abstract_type_string_string_map; *result = NULL; XEN_CALL_("VM.query_services"); return session->ok; } bool xen_vm_query_services_async(xen_session *session, xen_task *result, xen_vm self) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = self } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("Async.VM.query_services"); return session->ok; } bool xen_vm_get_all(xen_session *session, struct xen_vm_set **result) { abstract_type result_type = abstract_type_string_set; *result = NULL; xen_call_(session, "VM.get_all", NULL, 0, &result_type, result); return session->ok; } bool xen_vm_get_all_records(xen_session *session, xen_vm_xen_vm_record_map **result) { abstract_type result_type = abstract_type_string_xen_vm_record_map; *result = NULL; xen_call_(session, "VM.get_all_records", NULL, 0, &result_type, result); return session->ok; } bool xen_vm_get_uuid(xen_session *session, char **result, xen_vm vm) { abstract_value param_values[] = { { .type = &abstract_type_string, .u.string_val = vm } }; abstract_type result_type = abstract_type_string; *result = NULL; XEN_CALL_("VM.get_uuid"); return session->ok; }
walmir/xare
core/src/test/java/wa/xare/core/node/LoggerNodeTest.java
<filename>core/src/test/java/wa/xare/core/node/LoggerNodeTest.java package wa.xare.core.node; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.when; import io.vertx.core.logging.JULLogDelegateFactory; import io.vertx.core.logging.LoggerFactory; import java.util.logging.Handler; import java.util.logging.Level; import java.util.logging.LogRecord; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.mockito.runners.MockitoJUnitRunner; import wa.xare.core.packet.Packet; @RunWith(MockitoJUnitRunner.class) public class LoggerNodeTest { // @Spy LoggerNode node; // @Mock // Route Route; @Mock Packet packet; LogHandler handler; private String oldLoggerClass; private Level oldLevel; private java.util.logging.Logger logger; // @Spy // Logger logger = LoggerFactory.getLogger(LoggerNode.class); @BeforeClass public static void prepareLogger() { } @Before public void prepare() { try{ oldLoggerClass = System .getProperty(LoggerFactory.LOGGER_DELEGATE_FACTORY_CLASS_NAME); } catch (Exception e) { } System.setProperty(LoggerFactory.LOGGER_DELEGATE_FACTORY_CLASS_NAME, JULLogDelegateFactory.class.getName()); MockitoAnnotations.initMocks(this); node = new LoggerNode(); logger = java.util.logging.Logger .getLogger(LoggerNode.class.getName()); handler = new LogHandler(); oldLevel = logger.getLevel(); logger.setLevel(Level.ALL); logger.setUseParentHandlers(false); logger.addHandler(handler); } @After public void restoreLogger() { if (oldLoggerClass != null) { System.setProperty(LoggerFactory.LOGGER_DELEGATE_FACTORY_CLASS_NAME, oldLoggerClass); } logger.setLevel(oldLevel); } @Test public void testStartProcessing() throws Exception { String msg = "msg:asfdasdf90234jhgf"; when(packet.getBody()).thenReturn(msg); node.setLevel(LoggerNode.INFO); node.startProcessing(packet); assertThat(handler.checkLevel()).isNotNull(); assertThat(handler.checkMessage()).isNotNull(); assertThat(handler.checkLevel()).isEqualTo(Level.INFO); assertThat(handler.checkMessage()).contains(msg); handler.reset(); node.setLevel(LoggerNode.ERROR); node.startProcessing(packet); assertThat(handler.checkLevel()).isEqualTo(Level.SEVERE); assertThat(handler.checkMessage()).contains(msg); handler.reset(); node.setLevel(LoggerNode.TRACE); node.startProcessing(packet); assertThat(handler.checkLevel()).isEqualTo(Level.FINEST); assertThat(handler.checkMessage()).contains(msg); handler.reset(); node.setLevel(LoggerNode.DEBUG); node.startProcessing(packet); assertThat(handler.checkLevel()).isEqualTo(Level.FINE); assertThat(handler.checkMessage()).contains(msg); handler.reset(); node.setLevel(LoggerNode.WARN); node.startProcessing(packet); assertThat(handler.checkLevel()).isEqualTo(Level.WARNING); assertThat(handler.checkMessage()).contains(msg); } class LogHandler extends Handler { Level lastLevel = null; private String lastMessage = null; public Level checkLevel() { return lastLevel; } public String checkMessage() { return lastMessage; } public void publish(LogRecord record) { lastMessage = record.getMessage(); lastLevel = record.getLevel(); } public void close() { } public void flush() { } public void reset() { lastLevel = null; lastMessage = null; } } }
sarang-apps/darshan_browser
third_party/blink/renderer/modules/webusb/usb_alternate_interface.h
<reponame>sarang-apps/darshan_browser // Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef THIRD_PARTY_BLINK_RENDERER_MODULES_WEBUSB_USB_ALTERNATE_INTERFACE_H_ #define THIRD_PARTY_BLINK_RENDERER_MODULES_WEBUSB_USB_ALTERNATE_INTERFACE_H_ #include "services/device/public/mojom/usb_device.mojom-blink.h" #include "third_party/blink/renderer/platform/bindings/script_wrappable.h" #include "third_party/blink/renderer/platform/heap/heap.h" namespace blink { class ExceptionState; class USBEndpoint; class USBInterface; class USBAlternateInterface : public ScriptWrappable { DEFINE_WRAPPERTYPEINFO(); public: static USBAlternateInterface* Create(const USBInterface*, wtf_size_t alternate_index); static USBAlternateInterface* Create(const USBInterface*, uint8_t alternate_setting, ExceptionState&); USBAlternateInterface(const USBInterface*, wtf_size_t alternate_index); const device::mojom::blink::UsbAlternateInterfaceInfo& Info() const; uint8_t alternateSetting() const { return Info().alternate_setting; } uint8_t interfaceClass() const { return Info().class_code; } uint8_t interfaceSubclass() const { return Info().subclass_code; } uint8_t interfaceProtocol() const { return Info().protocol_code; } String interfaceName() const { return Info().interface_name; } HeapVector<Member<USBEndpoint>> endpoints() const; void Trace(Visitor*) override; private: Member<const USBInterface> interface_; const wtf_size_t alternate_index_; }; } // namespace blink #endif // THIRD_PARTY_BLINK_RENDERER_MODULES_WEBUSB_USB_ALTERNATE_INTERFACE_H_
kolyden/uut-rpg
UUT/RPG/Level.cpp
#include "Level.h" #include "LevelLayer.h" namespace uutRPG { UUT_OBJECT_IMPLEMENT(Level) {} Level::Level() { } void Level::SetSize(const Vector2& size) { _size = size; } const Vector2& Level::GetSize() const { return _size; } bool Level::AddLayer(const SharedPtr<LevelLayer>& layer) { return InsertLayer(_layers.Count(), layer); } bool Level::InsertLayer(int index, const SharedPtr<LevelLayer>& layer) { if (!layer || !layer->_level.Expired()) return false; layer->_level = GetSharedThis(); _layers.Insert(index, layer); return true; } uint Level::GetLayerCount() const { return _layers.Count(); } SharedPtr<LevelLayer> Level::GetLayer(int index) const { return _layers[index]; } void Level::Update(float deltaTime) { for (auto& layer : _layers) layer->Update(deltaTime); } }
riyadth/2019-DeepSpace
src/main/java/com/spartronics4915/frc2019/auto/modes/CharacterizeDriveRemoteMode.java
package com.spartronics4915.frc2019.auto.modes; import com.spartronics4915.frc2019.auto.AutoModeBase; import com.spartronics4915.frc2019.auto.AutoModeEndedException; import com.spartronics4915.frc2019.auto.actions.FeedRemoteCharacterization; public class CharacterizeDriveRemoteMode extends AutoModeBase { @Override protected void routine() throws AutoModeEndedException { runAction(new FeedRemoteCharacterization()); } }
lewnelson/lewnelson-com
src/containers/rubixCube/index.js
<filename>src/containers/rubixCube/index.js import RubixCube from './RubixCube' export default RubixCube
ElArtista/AssetLoader
src/model/plyload.c
#include "assets/model/model.h" #include <assert.h> #include <stdlib.h> #include <string.h> #include <stdint.h> #include <stdio.h> #include <assets/model/postprocess.h> /*----------------------------------------------------------------- * Data iterator *-----------------------------------------------------------------*/ struct data_iterator { const unsigned char* cur; const unsigned char* lim; }; /* Initialize iterator from base for sz bytes */ #define it_init(it, data, sz) \ do { it->cur = data; it->lim = data + sz; } while(0) /* Remaining sz bytes exist */ #define it_rem(it, sz) \ (it->cur + sz < it->lim) #ifndef PARSE_SAFE /* Forward sz bytes */ #define it_fw(it, sz) \ do { it->cur += sz; } while (0) /* Forward byte */ #define it_fwb(it) \ ++(it->cur) #else #define it_fw(it, sz) \ do { assert(it_rem(it, sz)); it_fw(it, sz); } while (0) #define it_fwb(it) \ do { assert(it_rem(it, 1)); it_fwb(it); } while (0) #endif /* Forward line */ #define it_fwl(it) \ do { while (*it->cur != '\n') { it_fwb(it); }; it_fwb(it); } while (0) #define is_wordspace(c) \ ((c) == ' ' || (c) == '\t' || (c) == '\n') /* Forward space */ #define it_fws(it) \ do { while (is_wordspace(*it->cur)) it_fwb(it); } while (0) /* Forward word */ #define it_fww(it) \ do { while (!is_wordspace(*it->cur)) it_fwb(it); } while (0) /* Forward next word */ #define it_fwnw(it) \ do { it_fww(it); it_fws(it); } while (0) /* Forward till char */ #define it_fwtc(it, c) \ do { while (it->cur != c) it_fwb(it) it_fwb(it); } while (0) /* Count word */ static int it_cntw(struct data_iterator* it) { int cnt = 0; while (!is_wordspace(*(it->cur + cnt))) ++cnt; return cnt; } /* Compare word */ #define it_cmpw(it, w) \ (strncmp(w, (const char*)it->cur, strlen(w)) == 0) /*----------------------------------------------------------------- * Ply *-----------------------------------------------------------------*/ enum ply_prop_type { /* Name Type Number of bytes * ----------------------------------------- */ PLY_CHAR = 0, /* character 1 */ PLY_UCHAR, /* unsigned character 1 */ PLY_SHORT, /* short integer 2 */ PLY_USHORT, /* unsigned short integer 2 */ PLY_INT, /* integer 4 */ PLY_UINT, /* unsigned integer 4 */ PLY_FLOAT, /* single-precision float 4 */ PLY_DOUBLE, /* double-precision float 8 */ PLY_UNDEFINED }; static const char* ply_prop_type_names[] = { "char", "uchar", "short", "ushort", "int", "uint", "float", "double" }; static const size_t ply_prop_type_sizes[] = { 1, 1, 2, 2, 4, 4, 4, 8 }; struct ply_property { const char* name; enum ply_prop_type dtype; unsigned int is_list; enum ply_prop_type lsz_type; }; struct ply_element { const char* name; unsigned long nentries; struct ply_property* props; unsigned long nprops; }; struct ply_header { enum { PLY_ASCII, PLY_BINARY_LE, PLY_BINARY_BE } format; struct { int maj, min; } ver; struct ply_element* elems; unsigned long nelems; }; struct ply_data { void** elem_chunks; unsigned long nelems; }; static enum ply_prop_type ply_prop_dtype_read(struct data_iterator* it) { int i = 0; for (i = 0; i < PLY_UNDEFINED; ++i) { const char* tname = ply_prop_type_names[i]; size_t tname_sz = strlen(tname); if (strncmp(tname, (const char*)it->cur, tname_sz) == 0) return i; } return i; } static int ply_prop_read(struct ply_property* pp, struct data_iterator* it) { /* Skip "property" word */ it_fwnw(it); /* Check if list type */ if (it_cmpw(it, "list")) { pp->is_list = 1; it_fwnw(it); } /* Read data type */ pp->dtype = ply_prop_dtype_read(it); it_fwnw(it); /* List length dtype */ if (pp->is_list) { pp->lsz_type = ply_prop_dtype_read(it); it_fwnw(it); } else pp->lsz_type = PLY_UNDEFINED; /* Read property name */ int wsz = it_cntw(it); pp->name = calloc(1, wsz + 1); strncpy((char*)pp->name, (const char*)it->cur, wsz); it_fwl(it); return 0; } static int ply_elem_read(struct ply_element* pe, struct data_iterator* it) { /* Skip "element" word */ it_fwnw(it); /* Read element name */ int wsz = it_cntw(it); pe->name = calloc(1, wsz + 1); strncpy((char*)pe->name, (const char*)it->cur, wsz); it_fwnw(it); /* Read element count */ pe->nentries = atol((const char*)it->cur); it_fwl(it); /* Read properties */ while (it_cmpw(it, "property")) { /* Allocate and read property */ pe->nprops++; pe->props = realloc(pe->props, pe->nprops * sizeof(*pe->props)); struct ply_property* pp = pe->props + (pe->nprops - 1); memset(pp, 0, sizeof(*pp)); ply_prop_read(pp, it); } return 0; } static int ply_header_read(struct ply_header* ph, struct data_iterator* it) { /* Zero out */ memset(ph, 0, sizeof(*ph)); /* Read format */ if (!it_cmpw(it, "format")) return 1; it_fwnw(it); if (it_cmpw(it, "ascii")) ph->format = PLY_ASCII; else if (it_cmpw(it, "binary_little_endian")) ph->format = PLY_BINARY_LE; else if (it_cmpw(it, "binary_big_endian")) ph->format = PLY_BINARY_BE; else return 1; it_fwnw(it); /* Read version */ ph->ver.maj = atoi((const char*)it->cur); it_fwb(it); ph->ver.min = atoi((const char*)it->cur); it_fwl(it); /* Skip comments */ while (it_cmpw(it, "comment")) it_fwl(it); /* Read elements */ while (!it_cmpw(it, "end_header")) { if (!it_cmpw(it, "element")) return 1; /* Allocate and read element */ ph->nelems++; ph->elems = realloc(ph->elems, ph->nelems * sizeof(*ph->elems)); struct ply_element* pe = ph->elems + (ph->nelems - 1); memset(pe, 0, sizeof(*pe)); ply_elem_read(pe, it); } it_fwl(it); return 0; } static int ply_element_entries_are_variable_size(struct ply_element* pe) { for (unsigned long i = 0; i < pe->nprops; ++i) if (pe->props[i].is_list) return 1; return 0; } static size_t ply_element_entries_size(struct ply_element* pe) { size_t sz = 0; for (unsigned long i = 0; i < pe->nprops; ++i) sz += ply_prop_type_sizes[pe->props[i].dtype]; return sz * pe->nentries; } static unsigned long ply_read_list_size(enum ply_prop_type pt, void* data) { unsigned long sz = 0; switch (pt) { case PLY_CHAR: case PLY_UCHAR: sz = *(uint8_t*)data; break; case PLY_SHORT: case PLY_USHORT: sz = *(uint16_t*)data; break; case PLY_INT: case PLY_UINT: sz = *(uint32_t*)data; break; case PLY_FLOAT: sz = *(float*)data; break; case PLY_DOUBLE: sz = *(double*)data; break; default: break; } return sz; } static int ply_data_read(struct ply_data* pd, struct ply_header* ph, struct data_iterator* it) { pd->nelems = ph->nelems; pd->elem_chunks = calloc(pd->nelems, sizeof(void*)); for (unsigned long i = 0; i < ph->nelems; ++i) { struct ply_element* pe = ph->elems + i; if (ph->format != PLY_ASCII) { /* TODO: Take care of endianess */ pd->elem_chunks[i] = (void*) it->cur; if (!ply_element_entries_are_variable_size(pe)) it_fw(it, ply_element_entries_size(pe)); else { /* Step through entries */ for (unsigned long j = 0; j < pe->nentries; ++j) { for (unsigned long k = 0; k < pe->nprops; ++k) { struct ply_property* pp = pe->props + k; if (!pp->is_list) it_fw(it, ply_prop_type_sizes[pp->dtype]); else { /* Get variable list size */ unsigned long sz = ply_read_list_size(pp->lsz_type, (void*)it->cur); it_fw(it, ply_prop_type_sizes[pp->lsz_type]); /* Forward by the number of list properties we got */ it_fw(it, sz * ply_prop_type_sizes[pp->dtype]); } } } } } else { assert(0 && "Unimplemented"); } } return 0; } static void ply_header_free(struct ply_header* ph) { for (unsigned long i = 0; i < ph->nelems; ++i) { struct ply_element* pe = ph->elems + i; for (unsigned long j = 0; j < pe->nprops; ++j) free((void*)pe->props[j].name); free((void*)pe->name); free(pe->props); } free(ph->elems); } static void ply_data_free(struct ply_data* pd, int from_ascii) { if (from_ascii) { for (unsigned long i = 0; i < pd->nelems; ++i) { free(pd->elem_chunks[i]); } } free(pd->elem_chunks); } static struct mesh* ply_read_mesh(struct ply_header* ph, struct ply_data* pd) { struct mesh* mesh = mesh_new(); for (unsigned long i = 0; i < ph->nelems; ++i) { struct ply_element* pe = ph->elems + i; void* elem_chunk = pd->elem_chunks[i]; if (strcmp(pe->name, "vertex") == 0) { /* Vertices */ mesh->num_verts = pe->nentries; mesh->vertices = realloc(mesh->vertices, mesh->num_verts * sizeof(struct vertex)); memset(mesh->vertices, 0, mesh->num_verts * sizeof(struct vertex)); /* Speedup */ int entrysz_varies = ply_element_entries_are_variable_size(pe); if (!entrysz_varies) { size_t entry_sz = ply_element_entries_size(pe) / pe->nentries; size_t xyz_ofs[3] = {0, 0, 0}; size_t cur_prop_ofs = 0; for (unsigned long j = 0; j < pe->nprops; ++j) { struct ply_property* pp = pe->props + j; if (strcmp(pp->name, "x") == 0) xyz_ofs[0] = cur_prop_ofs; else if (strcmp(pp->name, "y") == 0) xyz_ofs[1] = cur_prop_ofs; else if (strcmp(pp->name, "z") == 0) xyz_ofs[2] = cur_prop_ofs; cur_prop_ofs += ply_prop_type_sizes[pp->dtype]; } for (unsigned long j = 0; j < pe->nentries; ++j) { void* entryd = elem_chunk + j * entry_sz; struct vertex* v = mesh->vertices + j; v->position[0] = *(float*)(entryd + xyz_ofs[0]); v->position[1] = *(float*)(entryd + xyz_ofs[1]); v->position[2] = *(float*)(entryd + xyz_ofs[2]); } } else { } } else if (strcmp(pe->name, "tristrips") == 0) { /* Triangle Strips */ mesh->num_indices = 0; mesh->indices = realloc(mesh->indices, mesh->num_indices * sizeof(uint32_t)); struct ply_property* ve_prop = 0; for (unsigned long j = 0; j < pe->nprops; ++j) { struct ply_property* pp = pe->props + j; if (strcmp(pp->name, "vertex_indices") == 0) ve_prop = pp; } void* curd = elem_chunk; for (unsigned long j = 0; j < pe->nentries; ++j) { /* Entry's list size */ unsigned long list_sz = ply_read_list_size(ve_prop->lsz_type, curd); curd += ply_prop_type_sizes[ve_prop->lsz_type]; /* List item size */ unsigned long dsz = ply_prop_type_sizes[ve_prop->dtype]; /* Grow indice array */ mesh->indices = realloc(mesh->indices, (3 * (mesh->num_indices + list_sz)) * sizeof(uint32_t)); /* Store indices */ int prev[2] = {-1, -1}; for (unsigned long k = 0; k < list_sz; ++k) { int32_t indice = *(int32_t*)(curd + k * dsz); if (indice == -1) { prev[0] = prev[1] = -1; continue; } if (prev[0] == -1) { prev[0] = indice; continue; } if (prev[1] == -1) { prev[1] = indice; continue; } mesh->indices[mesh->num_indices + 0] = prev[0]; mesh->indices[mesh->num_indices + 1] = prev[1]; mesh->indices[mesh->num_indices + 2] = indice; mesh->num_indices += 3; prev[0] = prev[1]; prev[1] = indice; } curd += list_sz * dsz; } } else if (strcmp(pe->name, "face") == 0) { /* Faces */ mesh->num_indices = 0; mesh->indices = realloc(mesh->indices, mesh->num_indices * sizeof(uint32_t)); struct ply_property* ve_prop = 0; for (unsigned long j = 0; j < pe->nprops; ++j) { struct ply_property* pp = pe->props + j; if (strcmp(pp->name, "vertex_indices") == 0) ve_prop = pp; } void* curd = elem_chunk; for (unsigned long j = 0; j < pe->nentries; ++j) { /* Entry's list size */ unsigned long list_sz = ply_read_list_size(ve_prop->lsz_type, curd); curd += ply_prop_type_sizes[ve_prop->lsz_type]; /* List item size */ unsigned long dsz = ply_prop_type_sizes[ve_prop->dtype]; /* Grow indice array */ mesh->indices = realloc(mesh->indices, (mesh->num_indices + list_sz) * sizeof(uint32_t)); /* Store indices */ for (unsigned long k = 0; k < list_sz; ++k) { int32_t indice = *(int32_t*)(curd + k * dsz); mesh->indices[mesh->num_indices++] = indice; } curd += list_sz * dsz; } } } return mesh; } struct model* model_from_ply(const unsigned char* data, size_t sz) { /* Data iterator */ struct data_iterator it; it_init((&it), data, sz); /* Check magic */ if (it_rem((&it), 4) && strncmp((const char*)data, "ply\n", 4) != 0) { printf("Invalid ply header!\n"); return 0; } it_fwl((&it)); /* Read header and data */ struct ply_header ply_header; ply_header_read(&ply_header, &it); struct ply_data ply_data; ply_data_read(&ply_data, &ply_header, &it); /* Read mesh */ struct mesh* mesh = ply_read_mesh(&ply_header, &ply_data); mesh_generate_normals(mesh); mesh->mgroup_idx = 0; /* Setup model struct */ struct model* m = model_new(); m->num_meshes++; m->meshes = realloc(m->meshes, m->num_meshes * sizeof(struct mesh*)); m->meshes[m->num_meshes - 1] = mesh; m->num_materials = 1; /* Create and append the root mesh group */ struct mesh_group* mgroup = mesh_group_new(); mgroup->name = strdup("root_group"); m->num_mesh_groups++; m->mesh_groups = realloc(m->mesh_groups, m->num_mesh_groups * sizeof(struct mesh_group*)); m->mesh_groups[m->num_mesh_groups - 1] = mgroup; /* Free header and data */ ply_data_free(&ply_data, ply_header.format == PLY_ASCII); ply_header_free(&ply_header); return m; }