text stringlengths 1 1.05M |
|---|
<reponame>opentaps/opentaps-1
/*
* Copyright (c) Open Source Strategies, Inc.
*
* Opentaps is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Opentaps is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Opentaps. If not, see <http://www.gnu.org/licenses/>.
*/
package org.opentaps.tests.domains;
import org.opentaps.domain.DomainsLoader;
import org.opentaps.foundation.infrastructure.Infrastructure;
import org.opentaps.foundation.infrastructure.User;
/**
* A domains loader to test loading additional domains from an XML file.
* The domains should be retrievable from a DomainsDirectory retrieved from TestDomainsLoader.
*/
public class TestDomainsLoader extends DomainsLoader {
/** The file defining the domains directory beans. */
public static final String TEST_DOMAINS_DIRECTORY = "test-domains-directory.xml";
/** The test domain bean name. */
public static final String TEST_DOMAIN = "testDomain";
/**
* Default constructor.
*/
public TestDomainsLoader() {
super();
}
/**
* Creates a new <code>TestDomainsLoader</code> instance, and registers the test domains.
* @param infrastructure an <code>Infrastructure</code> value
* @param user an <code>User</code> value
*/
public TestDomainsLoader(Infrastructure infrastructure, User user) {
super(infrastructure, user);
super.registerDomains(TEST_DOMAINS_DIRECTORY);
}
}
|
<filename>TOJ/toj 618.cpp
#include <bits/stdc++.h>
using namespace std;
int main() {
cout << "Hello 8th junior programming camp practice contest!" << endl;
}
|
'use strict';
angular.module('myApp.replyStory', ['ngRoute'])
.config(['$routeProvider', function($routeProvider) {
$routeProvider.when('/replyStory', {
templateUrl: 'replyStory/replyStory.html',
controller: 'replyStoryCtrl'
});
}])
.controller('replyStoryCtrl', [function() {
}]);
|
<reponame>ArcheSpace/Arche.js<filename>packages/core/src/shader/ShaderData.ts
import { IClone } from "../clone/IClone";
import { Color, Matrix, Vector2, Vector3, Vector4 } from "@arche-engine/math";
import { IRefObject } from "../asset";
import { ShaderDataGroup } from "./ShaderDataGroup";
import { Shader } from "./Shader";
import { ShaderMacro } from "./ShaderMacro";
import { ShaderMacroCollection } from "./ShaderMacroCollection";
import { ShaderProperty } from "./ShaderProperty";
import { SampledTexture2D } from "../texture";
import { SampledTexture } from "../texture/SampledTexture";
import { Buffer } from "../graphic";
import { Engine } from "../Engine";
import { MacroName } from "./InternalMacroName";
import { ignoreClone } from "../clone/CloneManager";
export type ShaderPropertyResourceType = Buffer | SampledTexture;
/**
* Shader data collection,Correspondence includes shader properties data and macros data.
*/
export class ShaderData implements IRefObject, IClone {
private static _intArray1: Int32Array = new Int32Array(1);
private static _floatArray1: Float32Array = new Float32Array(1);
private static _floatArray2: Float32Array = new Float32Array(2);
private static _floatArray3: Float32Array = new Float32Array(3);
private static _floatArray4: Float32Array = new Float32Array(4);
/** @internal */
@ignoreClone
_index: number = -1;
/** @internal */
_group: ShaderDataGroup;
/** @internal */
_propertyResources: Record<number, ShaderPropertyResourceType> = Object.create(null);
/** @internal */
_propertyFunctors: Record<number, () => Buffer> = Object.create(null);
/** @internal */
_macroCollection: ShaderMacroCollection = new ShaderMacroCollection();
private readonly _engine: Engine;
private _refCount: number = 0;
/**
* @internal
*/
constructor(group: ShaderDataGroup, engine: Engine) {
this._engine = engine;
this._group = group;
}
/**
* Set float by shader property name.
* @remarks Corresponding float shader property type.
* @param propertyName - Shader property name
* @param value - Float
*/
setBufferFunctor(propertyName: string, value: () => Buffer): void;
/**
* Set float by shader property.
* @remarks Corresponding float shader property type.
* @param property - Shader property
* @param value - Float
*/
setBufferFunctor(property: ShaderProperty, value: () => Buffer): void;
setBufferFunctor(property: string | ShaderProperty, value: () => Buffer): void {
if (typeof property === "string") {
property = Shader.getPropertyByName(property);
}
if (property._group !== this._group) {
if (property._group === undefined) {
property._group = this._group;
} else {
throw `Shader property ${property.name} has been used as ${ShaderDataGroup[property._group]} property.`;
}
}
if (this._propertyResources[property._uniqueId] == undefined) {
this._propertyFunctors[property._uniqueId] = value;
}
}
//--------------------------------------------------------------------------------------------------------------------
/**
* Get float by shader property name.
* @param propertyID - Shader property name
* @returns Float
*/
getFloat(propertyID: number): Buffer;
/**
* Get float by shader property name.
* @param propertyName - Shader property name
* @returns Float
*/
getFloat(propertyName: string): Buffer;
/**
* Get float by shader property.
* @param property - Shader property
* @returns Float
*/
getFloat(property: ShaderProperty): Buffer;
getFloat(property: number | string | ShaderProperty): Buffer {
return this._getDataBuffer(property);
}
/**
* Set float by shader property name.
* @remarks Corresponding float shader property type.
* @param propertyName - Shader property name
* @param value - Float
*/
setFloat(propertyName: string, value: number): void;
/**
* Set float by shader property.
* @remarks Corresponding float shader property type.
* @param property - Shader property
* @param value - Float
*/
setFloat(property: ShaderProperty, value: number): void;
setFloat(property: string | ShaderProperty, value: number): void {
ShaderData._floatArray1[0] = value;
this._setDataBuffer(property, ShaderData._floatArray1);
}
//------------------------------------------------------------------------------------------------------------------
/**
* Get int by shader property name.
* @param propertyID - Shader property name
* @returns Int
*/
getInt(propertyID: number): Buffer;
/**
* Get int by shader property name.
* @param propertyName - Shader property name
* @returns Int
*/
getInt(propertyName: string): Buffer;
/**
* Get int by shader property.
* @param property - Shader property
* @returns Int
*/
getInt(property: ShaderProperty): Buffer;
getInt(property: number | string | ShaderProperty): Buffer {
return this._getDataBuffer(property);
}
/**
* Set int by shader property name.
* @remarks Correspondence includes int and bool shader property type.
* @param propertyName - Shader property name
* @param value - Int
*/
setInt(propertyName: string, value: number): void;
/**
* Set int by shader property.
* @remarks Correspondence includes int and bool shader property type.
* @param property - Shader property
* @param value - Int
*/
setInt(property: ShaderProperty, value: number): void;
setInt(property: string | ShaderProperty, value: number): void {
ShaderData._intArray1[0] = value;
this._setDataBuffer(property, ShaderData._intArray1);
}
//------------------------------------------------------------------------------------------------------------------
/**
* Get float array by shader property name.
* @param propertyID - Shader property name
* @returns Float array
*/
getFloatArray(propertyID: number): Buffer;
/**
* Get float array by shader property name.
* @param propertyName - Shader property name
* @returns Float array
*/
getFloatArray(propertyName: string): Buffer;
/**
* Get float array by shader property.
* @param property - Shader property
* @returns Float array
*/
getFloatArray(property: ShaderProperty): Buffer;
getFloatArray(property: number | string | ShaderProperty): Buffer {
return this._getDataBuffer(property);
}
/**
* Set float array by shader property name.
* @remarks Correspondence includes float array、vec2 array、vec3 array、vec4 array and matrix array shader property type.
* @param propertyName - Shader property name
* @param value - Float array
*/
setFloatArray(propertyName: string, value: Float32Array): void;
/**
* Set float array by shader property.
* @remarks Correspondence includes float array、vec2 array、vec3 array、vec4 array and matrix array shader property type.
* @param property - Shader property
* @param value - Float array
*/
setFloatArray(property: ShaderProperty, value: Float32Array): void;
setFloatArray(property: string | ShaderProperty, value: Float32Array): void {
this._setDataBuffer(property, value);
}
//------------------------------------------------------------------------------------------------------------------
/**
* Get int array by shader property name.
* @param propertyID - Shader property name
* @returns Int Array
*/
getIntArray(propertyID: number): Buffer;
/**
* Get int array by shader property name.
* @param propertyName - Shader property name
* @returns Int Array
*/
getIntArray(propertyName: string): Buffer;
/**
* Get int array by shader property.
* @param property - Shader property
* @returns Int Array
*/
getIntArray(property: ShaderProperty): Buffer;
getIntArray(property: number | string | ShaderProperty): Buffer {
return this._getDataBuffer(property);
}
/**
* Set int array by shader property name.
* @remarks Correspondence includes bool array、int array、bvec2 array、bvec3 array、bvec4 array、ivec2 array、ivec3 array and ivec4 array shader property type.
* @param propertyName - Shader property name
* @param value - Int Array
*/
setIntArray(propertyName: string, value: Int32Array): void;
/**
* Set int array by shader property.
* @remarks Correspondence includes bool array、int array、bvec2 array、bvec3 array、bvec4 array、ivec2 array、ivec3 array and ivec4 array shader property type.
* @param property - Shader property
* @param value - Int Array
*/
setIntArray(property: ShaderProperty, value: Int32Array): void;
setIntArray(property: string | ShaderProperty, value: Int32Array): void {
this._setDataBuffer(property, value);
}
//------------------------------------------------------------------------------------------------------------------
/**
* Get two-dimensional from shader property name.
* @param propertyID - Shader property name
* @returns Two-dimensional vector
*/
getVector2(propertyID: number): Buffer;
/**
* Get two-dimensional from shader property name.
* @param propertyName - Shader property name
* @returns Two-dimensional vector
*/
getVector2(propertyName: string): Buffer;
/**
* Get two-dimensional from shader property.
* @param property - Shader property
* @returns Two-dimensional vector
*/
getVector2(property: ShaderProperty): Buffer;
getVector2(property: number | string | ShaderProperty): Buffer {
return this._getDataBuffer(property);
}
/**
* Set two-dimensional vector from shader property name.
* @remarks Correspondence includes vec2、ivec2 and bvec2 shader property type.
* @param property - Shader property name
* @param value - Two-dimensional vector
*/
setVector2(property: string, value: Vector2): void;
/**
* Set two-dimensional vector from shader property.
* @remarks Correspondence includes vec2、ivec2 and bvec2 shader property type.
* @param property - Shader property
* @param value - Two-dimensional vector
*/
setVector2(property: ShaderProperty, value: Vector2): void;
setVector2(property: string | ShaderProperty, value: Vector2): void {
ShaderData._floatArray2[0] = value.x;
ShaderData._floatArray2[1] = value.y;
this._setDataBuffer(property, ShaderData._floatArray2);
}
//------------------------------------------------------------------------------------------------------------------
/**
* Get vector3 by shader property name.
* @param propertyID - Shader property name
* @returns Three-dimensional vector
*/
getVector3(propertyID: number): Buffer;
/**
* Get vector3 by shader property name.
* @param propertyName - Shader property name
* @returns Three-dimensional vector
*/
getVector3(propertyName: string): Buffer;
/**
* Get vector3 by shader property.
* @param property - Shader property
* @returns Three-dimensional vector
*/
getVector3(property: ShaderProperty): Buffer;
getVector3(property: number | string | ShaderProperty): Buffer {
return this._getDataBuffer(property);
}
/**
* Set three-dimensional vector by shader property name.
* @remarks Correspondence includes vec3、ivec3 and bvec3 shader property type.
* @param property - Shader property name
* @param value - Three-dimensional vector
*/
setVector3(property: string, value: Vector3): void;
/**
* Set three-dimensional vector by shader property.
* @remarks Correspondence includes vec3、ivec3 and bvec3 shader property type.
* @param property - Shader property
* @param value - Three-dimensional vector
*/
setVector3(property: ShaderProperty, value: Vector3): void;
setVector3(property: string | ShaderProperty, value: Vector3): void {
ShaderData._floatArray3[0] = value.x;
ShaderData._floatArray3[1] = value.y;
ShaderData._floatArray3[2] = value.z;
this._setDataBuffer(property, ShaderData._floatArray3);
}
//------------------------------------------------------------------------------------------------------------------
/**
* Get vector4 by shader property name.
* @param propertyID - Shader property name
* @returns Four-dimensional vector
*/
getVector4(propertyID: number): Buffer;
/**
* Get vector4 by shader property name.
* @param propertyName - Shader property name
* @returns Four-dimensional vector
*/
getVector4(propertyName: string): Buffer;
/**
* Get vector4 by shader property.
* @param property - Shader property
* @returns Four-dimensional vector
*/
getVector4(property: ShaderProperty): Buffer;
getVector4(property: number | string | ShaderProperty): Buffer {
return this._getDataBuffer(property);
}
/**
* Set four-dimensional vector by shader property name.
* @remarks Correspondence includes vec4、ivec4 and bvec4 shader property type.
* @param property - Shader property name
* @param value - Four-dimensional vector
*/
setVector4(property: string, value: Vector4): void;
/**
* Set four-dimensional vector by shader property.
* @remarks Correspondence includes vec4、ivec4 and bvec4 shader property type.
* @param property - Shader property
* @param value - Four-dimensional vector
*/
setVector4(property: ShaderProperty, value: Vector4): void;
setVector4(property: string | ShaderProperty, value: Vector4): void {
ShaderData._floatArray4[0] = value.x;
ShaderData._floatArray4[1] = value.y;
ShaderData._floatArray4[2] = value.z;
ShaderData._floatArray4[3] = value.w;
this._setDataBuffer(property, ShaderData._floatArray4);
}
//------------------------------------------------------------------------------------------------------------------
/**
* Get matrix by shader property name.
* @param propertyID - Shader property name
* @returns Matrix
*/
getMatrix(propertyID: number): Buffer;
/**
* Get matrix by shader property name.
* @param propertyName - Shader property name
* @returns Matrix
*/
getMatrix(propertyName: string): Buffer;
/**
* Get matrix by shader property.
* @param property - Shader property
* @returns Matrix
*/
getMatrix(property: ShaderProperty): Buffer;
getMatrix(property: number | string | ShaderProperty): Buffer {
return this._getDataBuffer(property);
}
/**
* Set matrix by shader property name.
* @remarks Correspondence includes matrix shader property type.
* @param propertyName - Shader property name
* @param value - Matrix
*/
setMatrix(propertyName: string, value: Matrix);
/**
* Set matrix by shader property.
* @remarks Correspondence includes matrix shader property type.
* @param property - Shader property
* @param value - Matrix
*/
setMatrix(property: ShaderProperty, value: Matrix);
setMatrix(property: string | ShaderProperty, value: Matrix): void {
this._setDataBuffer(property, value.elements);
}
//------------------------------------------------------------------------------------------------------------------
/**
* Get color by shader property name.
* @param propertyID - Shader property name
* @returns Color
*/
getColor(propertyID: number): Buffer;
/**
* Get color by shader property name.
* @param propertyName - Shader property name
* @returns Color
*/
getColor(propertyName: string): Buffer;
/**
* Get color by shader property.
* @param property - Shader property
* @returns Color
*/
getColor(property: ShaderProperty): Buffer;
getColor(property: number | string | ShaderProperty): Buffer {
return this._getDataBuffer(property);
}
/**
* Set color by shader property name.
* @remarks Correspondence includes vec4 shader property type.
* @param propertyName - Shader property name
* @param value - Color
*/
setColor(propertyName: string, value: Color): void;
/**
* Set color by shader property.
* @remarks Correspondence includes vec4 shader property type.
* @param property - Shader property
* @param value - Color
*/
setColor(property: ShaderProperty, value: Color): void;
setColor(property: string | ShaderProperty, value: Color): void {
ShaderData._floatArray4[0] = value.r;
ShaderData._floatArray4[1] = value.g;
ShaderData._floatArray4[2] = value.b;
ShaderData._floatArray4[3] = value.a;
this._setDataBuffer(property, ShaderData._floatArray4);
}
//------------------------------------------------------------------------------------------------------------------
/**
* Get texture by shader property name.
* @param propertyID - Shader property name
* @returns Texture
*/
getTextureView(propertyID: number): GPUTextureView;
/**
* Get texture by shader property name.
* @param propertyName - Shader property name
* @returns Texture
*/
getTextureView(propertyName: string): GPUTextureView;
/**
* Get texture by shader property.
* @param property - Shader property
* @returns Texture
*/
getTextureView(property: ShaderProperty): GPUTextureView;
getTextureView(property: number | string | ShaderProperty): GPUTextureView {
return this._getTextureView(property);
}
/**
* Get texture by shader property name.
* @param propertyID - Shader property name
* @returns Texture
*/
getSampler(propertyID: number): GPUSampler;
/**
* Get texture by shader property name.
* @param propertyName - Shader property name
* @returns Texture
*/
getSampler(propertyName: string): GPUSampler;
/**
* Get texture by shader property.
* @param property - Shader property
* @returns Texture
*/
getSampler(property: ShaderProperty): GPUSampler;
getSampler(property: number | string | ShaderProperty): GPUSampler {
return this._getSampler(property);
}
/**
* Set texture by shader property name.
* @param textureName - Shader property name
* @param samplerName - Shader property name
* @param value - Texture
*/
setSampledTexture(textureName: string, samplerName: string, value: SampledTexture): void;
/**
* Set texture by shader property.
* @param textureProperty - Shader property
* @param samplerProperty - Shader property
* @param value - Texture
*/
setSampledTexture(textureProperty: ShaderProperty, samplerProperty: ShaderProperty, value: SampledTexture): void;
setSampledTexture(
textureProperty: string | ShaderProperty,
samplerProperty: string | ShaderProperty,
value: SampledTexture
): void {
this._setSampledTexture(textureProperty, samplerProperty, value);
}
//------------------------------------------------------------------------------------------------------------------
/**
* Enable macro.
* @param macroName - Macro name
*/
enableMacro(macroName: MacroName): void;
/**
* Enable macro.
* @param macroName - Macro name
*/
enableMacro(macroName: string): void;
/**
* Enable macro.
* @param macro - Shader macro
*/
enableMacro(macro: ShaderMacro): void;
/**
* Enable macro.
* @remarks Name and value will combine one macro, it's equal the macro of "name value".
* @param name - Macro name
* @param value - Macro value
*/
enableMacro(name: string, value: string): void;
enableMacro(macro: string | ShaderMacro, value: string = null): void {
this._macroCollection.enableMacro(macro, value);
}
/**
* Disable macro
* @param macroName - Macro name
*/
disableMacro(macroName: MacroName): void;
/**
* Disable macro
* @param macroName - Macro name
*/
disableMacro(macroName: string): void;
/**
* Disable macro
* @param macro - Shader macro
*/
disableMacro(macro: ShaderMacro): void;
disableMacro(macro: string | ShaderMacro): void {
this._macroCollection.disableMacro(macro);
}
clone(): ShaderData {
const shaderData = new ShaderData(this._group, this._engine);
this.cloneTo(shaderData);
return shaderData;
}
cloneTo(target: ShaderData): void {
// CloneManager.deepCloneObject(this._macroCollection, target._macroCollection);
// Object.assign(target._variableMacros, this._variableMacros);
//
// const properties = this._propertyResources;
// const targetProperties = target._propertyResources;
// const keys = Object.keys(properties);
// for (let i = 0, n = keys.length; i < n; i++) {
// const k = keys[i];
// const property: ShaderPropertyResourceType = properties[k];
// if (property != null) {
// if (typeof property === "number") {
// targetProperties[k] = property;
// } else if (property instanceof SamplerTexture2D) {
// targetProperties[k] = property;
// } else if (property instanceof Array || property instanceof Float32Array || property instanceof Int32Array) {
// targetProperties[k] = property.slice();
// } else {
// const targetProperty = targetProperties[k];
// if (targetProperty) {
// property.cloneTo(targetProperty);
// } else {
// targetProperties[k] = property.clone();
// }
// }
// } else {
// targetProperties[k] = property;
// }
// }
}
//------------------------------------------------------------------------------------------------------------------
/**
* @internal
*/
_setDataBuffer(property: string | ShaderProperty, value: Float32Array | Int32Array): void {
if (typeof property === "string") {
property = Shader.getPropertyByName(property);
}
if (property._group !== this._group) {
if (property._group === undefined) {
property._group = this._group;
} else {
throw `Shader property ${property.name} has been used as ${ShaderDataGroup[property._group]} property.`;
}
}
if (this._propertyResources[property._uniqueId] == undefined) {
this._propertyResources[property._uniqueId] = new Buffer(
this._engine,
value.byteLength,
GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST
);
}
(<Buffer>this._propertyResources[property._uniqueId]).uploadData(value, 0, 0, value.length);
}
/**
* @internal
*/
_setSampledTexture(
texProperty: string | ShaderProperty,
sampleProperty: string | ShaderProperty,
value: SampledTexture
): void {
// texture
{
if (typeof texProperty === "string") {
texProperty = Shader.getPropertyByName(texProperty);
}
if (texProperty._group !== this._group) {
if (texProperty._group === undefined) {
texProperty._group = this._group;
} else {
throw `Shader property ${texProperty.name} has been used as ${ShaderDataGroup[texProperty._group]} property.`;
}
}
if (this._propertyResources[texProperty._uniqueId] == undefined) {
this._propertyResources[texProperty._uniqueId] = value;
}
}
// sampler
{
if (typeof sampleProperty === "string") {
sampleProperty = Shader.getPropertyByName(sampleProperty);
}
if (sampleProperty._group !== this._group) {
if (sampleProperty._group === undefined) {
sampleProperty._group = this._group;
} else {
throw `Shader property ${sampleProperty.name} has been used as ${
ShaderDataGroup[sampleProperty._group]
} property.`;
}
}
if (this._propertyResources[sampleProperty._uniqueId] == undefined) {
this._propertyResources[sampleProperty._uniqueId] = value;
}
}
}
/**
* @internal
*/
_getDataBuffer(property: number | string | ShaderProperty): Buffer {
if (typeof property === "string") {
property = Shader.getPropertyByName(property)._uniqueId;
}
if (typeof property !== "string" && typeof property !== "number") {
property = property._uniqueId;
}
let buffer = this._propertyResources[property] as Buffer;
if (buffer === undefined || buffer === null) {
const functor = this._propertyFunctors[property];
if (functor !== undefined) {
buffer = functor();
}
}
return buffer;
}
/**
* @internal
*/
_getSampler(property: number | string | ShaderProperty): GPUSampler {
if (typeof property === "string") {
property = Shader.getPropertyByName(property)._uniqueId;
}
if (typeof property !== "string" && typeof property !== "number") {
property = property._uniqueId;
}
return (<SampledTexture>this._propertyResources[property]).sampler;
}
_getTextureView(property: number | string | ShaderProperty): GPUTextureView {
if (typeof property === "string") {
property = Shader.getPropertyByName(property)._uniqueId;
}
if (typeof property !== "string" && typeof property !== "number") {
property = property._uniqueId;
}
return (<SampledTexture>this._propertyResources[property]).textureView;
}
/**
* @internal
*/
_getRefCount(): number {
return this._refCount;
}
/**
* @internal
*/
_addRefCount(value: number): void {
this._refCount += value;
const properties = this._propertyResources;
for (const k in properties) {
const property = properties[k];
// @todo: Separate array to speed performance.
if (property && property instanceof SampledTexture2D) {
property._addRefCount(value);
}
}
}
}
|
/**
* Copyright 2017 iovation, Inc.
* <p>
* Licensed under the MIT License.
* You may not use this file except in compliance with the License.
* A copy of the License is located in the "LICENSE.txt" file accompanying
* this file. This file is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.iovation.launchkey.sdk.client;
import com.iovation.launchkey.sdk.domain.PublicKey;
import com.iovation.launchkey.sdk.domain.organization.Directory;
import com.iovation.launchkey.sdk.error.*;
import java.security.interfaces.RSAPublicKey;
import java.util.Date;
import java.util.List;
import java.util.UUID;
public interface OrganizationClient extends ServiceManagingClient {
/**
* Create a new Directory
*
* @param name Name of the Service. This value will be visible to the user during the Auth Request as well as in
* the Session list.
* @return ID of the Directory created
* @throws InvalidResponseException When the response JWT is missing or does not pass validation, when the response
* content hash does not match the value in the JWT, or when the JWE in the body fails validation, or the decrypted
* JWE in the body cannot be parsed or mapped to the expected data.
* @throws InvalidRequestException When the Platform API returns a 400 Bad Request HTTP Status
* @throws InvalidCredentialsException When the Platform API returns a 401 Unauthorized or 403 Forbidden HTTP Status
* @throws PlatformErrorException When the Platform API returns an unexpected HTTP Status
* @throws UnknownEntityException When the Platform API returns a 404 Not Found HTTP Status.
* @throws CommunicationErrorException When the HTTP client is unable to connect to the Platform API, cannot
* negotiate TLS with the Platform API, or is disconnected while sending or receiving a message from the
* Platform API.
* @throws InvalidStateException When the SDK does not have the proper resource to perform an action. This is most
* often due to invalid dependencies being provided or algorithms not being supported by the JCE provider.
* @throws MarshallingError When the response cannot be marshaled
* @throws CryptographyError When there is an error encrypting and signing the request or decrypting and verifying
* the signature of the response
*/
UUID createDirectory(String name)
throws PlatformErrorException, UnknownEntityException, InvalidResponseException, InvalidStateException,
InvalidCredentialsException, CommunicationErrorException, MarshallingError,
CryptographyError;
/**
* Update a Directory
*
* @param directoryId ID of the Directory you wish to update
* @param active Should the Directory be active
* @param androidKey GCM push key
* @param iosP12 APNS push certificate in .P12 format that has been Base64 Encoded
* @param denialContextInquiryEnabled Should the user be prompted for denial context when they deny authorization
* requests for any and all child services.
* @throws InvalidResponseException When the response JWT is missing or does not pass validation, when the response
* content hash does not match the value in the JWT, or when the JWE in the body fails validation, or the decrypted
* JWE in the body cannot be parsed or mapped to the expected data.
* @throws InvalidRequestException When the Platform API returns a 400 Bad Request HTTP Status
* @throws InvalidCredentialsException When the Platform API returns a 401 Unauthorized or 403 Forbidden HTTP Status
* @throws PlatformErrorException When the Platform API returns an unexpected HTTP Status
* @throws UnknownEntityException When the Platform API returns a 404 Not Found HTTP Status.
* @throws CommunicationErrorException When the HTTP client is unable to connect to the Platform API, cannot
* negotiate TLS with the Platform API, or is disconnected while sending or receiving a message from the
* Platform API.
* @throws InvalidStateException When the SDK does not have the proper resource to perform an action. This is most
* often due to invalid dependencies being provided or algorithms not being supported by the JCE provider.
* @throws MarshallingError When the response cannot be marshaled
* @throws CryptographyError When there is an error encrypting and signing the request or decrypting and verifying
* the signature of the response
*/
void updateDirectory(UUID directoryId, Boolean active, String androidKey, String iosP12, Boolean denialContextInquiryEnabled)
throws PlatformErrorException, UnknownEntityException, InvalidResponseException, InvalidStateException,
InvalidCredentialsException, CommunicationErrorException, MarshallingError,
CryptographyError;
/**
* Update a Directory
*
* @param directoryId ID of the Directory you wish to update
* @param active Should the Directory be active
* @param androidKey GCM push key
* @param iosP12 APNS push certificate in .P12 format that has been Base64 Encoded
* @throws InvalidResponseException When the response JWT is missing or does not pass validation, when the response
* content hash does not match the value in the JWT, or when the JWE in the body fails validation, or the decrypted
* JWE in the body cannot be parsed or mapped to the expected data.
* @throws InvalidRequestException When the Platform API returns a 400 Bad Request HTTP Status
* @throws InvalidCredentialsException When the Platform API returns a 401 Unauthorized or 403 Forbidden HTTP Status
* @throws PlatformErrorException When the Platform API returns an unexpected HTTP Status
* @throws UnknownEntityException When the Platform API returns a 404 Not Found HTTP Status.
* @throws CommunicationErrorException When the HTTP client is unable to connect to the Platform API, cannot
* negotiate TLS with the Platform API, or is disconnected while sending or receiving a message from the
* Platform API.
* @throws InvalidStateException When the SDK does not have the proper resource to perform an action. This is most
* often due to invalid dependencies being provided or algorithms not being supported by the JCE provider.
* @throws MarshallingError When the response cannot be marshaled
* @throws CryptographyError When there is an error encrypting and signing the request or decrypting and verifying
* the signature of the response
* @deprecated Please use {@link #updateDirectory(UUID, Boolean, String, String, Boolean)}
*/
@Deprecated
void updateDirectory(UUID directoryId, Boolean active, String androidKey, String iosP12)
throws PlatformErrorException, UnknownEntityException, InvalidResponseException, InvalidStateException,
InvalidCredentialsException, CommunicationErrorException, MarshallingError,
CryptographyError;
/**
* Get a Directory
*
* @param directoryId ID of the Directory you wish to get
* @return Directory with the ID provided
* @throws InvalidResponseException When the response JWT is missing or does not pass validation, when the response
* content hash does not match the value in the JWT, or when the JWE in the body fails validation, or the decrypted
* JWE in the body cannot be parsed or mapped to the expected data.
* @throws InvalidRequestException When the Platform API returns a 400 Bad Request HTTP Status
* @throws InvalidCredentialsException When the Platform API returns a 401 Unauthorized or 403 Forbidden HTTP Status
* @throws PlatformErrorException When the Platform API returns an unexpected HTTP Status
* @throws UnknownEntityException When the Platform API returns a 404 Not Found HTTP Status.
* @throws CommunicationErrorException When the HTTP client is unable to connect to the Platform API, cannot
* negotiate TLS with the Platform API, or is disconnected while sending or receiving a message from the
* Platform API.
* @throws InvalidStateException When the SDK does not have the proper resource to perform an action. This is most
* often due to invalid dependencies being provided or algorithms not being supported by the JCE provider.
* @throws MarshallingError When the response cannot be marshaled
* @throws CryptographyError When there is an error encrypting and signing the request or decrypting and verifying
* the signature of the response
*/
Directory getDirectory(UUID directoryId)
throws PlatformErrorException, UnknownEntityException, InvalidResponseException, InvalidStateException,
InvalidCredentialsException, CommunicationErrorException, MarshallingError,
CryptographyError;
/**
* Get a list of Directories
*
* @param directoryIds List of IDs of the Services you wish to get
* @return Directories with the IDs provided
* @throws InvalidResponseException When the response JWT is missing or does not pass validation, when the response
* content hash does not match the value in the JWT, or when the JWE in the body fails validation, or the decrypted
* JWE in the body cannot be parsed or mapped to the expected data.
* @throws InvalidRequestException When the Platform API returns a 400 Bad Request HTTP Status
* @throws InvalidCredentialsException When the Platform API returns a 401 Unauthorized or 403 Forbidden HTTP Status
* @throws PlatformErrorException When the Platform API returns an unexpected HTTP Status
* @throws UnknownEntityException When the Platform API returns a 404 Not Found HTTP Status.
* @throws CommunicationErrorException When the HTTP client is unable to connect to the Platform API, cannot
* negotiate TLS with the Platform API, or is disconnected while sending or receiving a message from the
* Platform API.
* @throws InvalidStateException When the SDK does not have the proper resource to perform an action. This is most
* often due to invalid dependencies being provided or algorithms not being supported by the JCE provider.
* @throws MarshallingError When the response cannot be marshaled
* @throws CryptographyError When there is an error encrypting and signing the request or decrypting and verifying
* the signature of the response
*/
List<Directory> getDirectories(List<UUID> directoryIds)
throws PlatformErrorException, UnknownEntityException, InvalidResponseException, InvalidStateException,
InvalidCredentialsException, CommunicationErrorException, MarshallingError,
CryptographyError;
/**
* Get all Directories
*
* @return All Directories for the Organization
* @throws InvalidResponseException When the response JWT is missing or does not pass validation, when the response
* content hash does not match the value in the JWT, or when the JWE in the body fails validation, or the decrypted
* JWE in the body cannot be parsed or mapped to the expected data.
* @throws InvalidRequestException When the Platform API returns a 400 Bad Request HTTP Status
* @throws InvalidCredentialsException When the Platform API returns a 401 Unauthorized or 403 Forbidden HTTP Status
* @throws PlatformErrorException When the Platform API returns an unexpected HTTP Status
* @throws UnknownEntityException When the Platform API returns a 404 Not Found HTTP Status.
* @throws CommunicationErrorException When the HTTP client is unable to connect to the Platform API, cannot
* negotiate TLS with the Platform API, or is disconnected while sending or receiving a message from the
* Platform API.
* @throws InvalidStateException When the SDK does not have the proper resource to perform an action. This is most
* often due to invalid dependencies being provided or algorithms not being supported by the JCE provider.
* @throws MarshallingError When the response cannot be marshaled
* @throws CryptographyError When there is an error encrypting and signing the request or decrypting and verifying
* the signature of the response
*/
List<Directory> getAllDirectories()
throws PlatformErrorException, UnknownEntityException, InvalidResponseException, InvalidStateException,
InvalidCredentialsException, CommunicationErrorException, MarshallingError,
CryptographyError;
/**
* Request the Platform API to generate a new Authenticator SDK Key and add to the Directory identified by the
* provided Directory ID. One generated and added, it will be returned as the response.
*
* @param directoryId ID of the Directory you wish to generate and add an Authenticator SDK Key
* @return The new Authenticator SDK Key that was generated and added to the Directory
* @throws InvalidResponseException When the response JWT is missing or does not pass validation, when the response
* content hash does not match the value in the JWT, or when the JWE in the body fails validation, or the decrypted
* JWE in the body cannot be parsed or mapped to the expected data.
* @throws InvalidRequestException When the Platform API returns a 400 Bad Request HTTP Status
* @throws InvalidCredentialsException When the Platform API returns a 401 Unauthorized or 403 Forbidden HTTP Status
* @throws PlatformErrorException When the Platform API returns an unexpected HTTP Status
* @throws UnknownEntityException When the Platform API returns a 404 Not Found HTTP Status.
* @throws CommunicationErrorException When the HTTP client is unable to connect to the Platform API, cannot
* negotiate TLS with the Platform API, or is disconnected while sending or receiving a message from the
* Platform API.
* @throws InvalidStateException When the SDK does not have the proper resource to perform an action. This is most
* often due to invalid dependencies being provided or algorithms not being supported by the JCE provider.
* @throws MarshallingError When the response cannot be marshaled
* @throws CryptographyError When there is an error encrypting and signing the request or decrypting and verifying
* the signature of the response
*/
UUID generateAndAddDirectorySdkKey(UUID directoryId)
throws PlatformErrorException, UnknownEntityException, InvalidResponseException, InvalidStateException,
InvalidCredentialsException, CommunicationErrorException, MarshallingError,
CryptographyError;
/**
* Request the provided Authenticator SDK Key from the Directory identified by the provided Directory ID.
*
* @param directoryId ID of the Directory you wish to generate and add an Authenticator SDK Key
* @param sdkKey The Authenticator SDK Key to remove
* @throws InvalidResponseException When the response JWT is missing or does not pass validation, when the response
* content hash does not match the value in the JWT, or when the JWE in the body fails validation, or the decrypted
* JWE in the body cannot be parsed or mapped to the expected data.
* @throws InvalidRequestException When the Platform API returns a 400 Bad Request HTTP Status
* @throws InvalidCredentialsException When the Platform API returns a 401 Unauthorized or 403 Forbidden HTTP Status
* @throws PlatformErrorException When the Platform API returns an unexpected HTTP Status
* @throws UnknownEntityException When the Platform API returns a 404 Not Found HTTP Status.
* @throws CommunicationErrorException When the HTTP client is unable to connect to the Platform API, cannot
* negotiate TLS with the Platform API, or is disconnected while sending or receiving a message from the
* Platform API.
* @throws InvalidStateException When the SDK does not have the proper resource to perform an action. This is most
* often due to invalid dependencies being provided or algorithms not being supported by the JCE provider.
* @throws MarshallingError When the response cannot be marshaled
* @throws CryptographyError When there is an error encrypting and signing the request or decrypting and verifying
* the signature of the response
*/
void removeDirectorySdkKey(UUID directoryId, UUID sdkKey)
throws PlatformErrorException, UnknownEntityException, InvalidResponseException, InvalidStateException,
InvalidCredentialsException, CommunicationErrorException, MarshallingError,
CryptographyError;
/**
* Get all Authenticator SDK Keys for the Directory identified by the provided Directory ID.
*
* @param directoryId ID of the Directory you wish to generate and add an Authenticator SDK Key
* @return All Authenticator SDK Keys for the Directory
* @throws InvalidResponseException When the response JWT is missing or does not pass validation, when the response
* content hash does not match the value in the JWT, or when the JWE in the body fails validation, or the decrypted
* JWE in the body cannot be parsed or mapped to the expected data.
* @throws InvalidRequestException When the Platform API returns a 400 Bad Request HTTP Status
* @throws InvalidCredentialsException When the Platform API returns a 401 Unauthorized or 403 Forbidden HTTP Status
* @throws PlatformErrorException When the Platform API returns an unexpected HTTP Status
* @throws UnknownEntityException When the Platform API returns a 404 Not Found HTTP Status.
* @throws CommunicationErrorException When the HTTP client is unable to connect to the Platform API, cannot
* negotiate TLS with the Platform API, or is disconnected while sending or receiving a message from the
* Platform API.
* @throws InvalidStateException When the SDK does not have the proper resource to perform an action. This is most
* often due to invalid dependencies being provided or algorithms not being supported by the JCE provider.
* @throws MarshallingError When the response cannot be marshaled
* @throws CryptographyError When there is an error encrypting and signing the request or decrypting and verifying
* the signature of the response
*/
List<UUID> getAllDirectorySdkKeys(UUID directoryId)
throws PlatformErrorException, UnknownEntityException, InvalidResponseException, InvalidStateException,
InvalidCredentialsException, CommunicationErrorException, MarshallingError,
CryptographyError;
/**
* Get a list of Public Keys for a Directory
*
* @param directoryId ID of the Directory for which you wish to retrieve Public Keys
* @return Public Keys for the Directory whose ID was provided
* @throws InvalidResponseException When the response JWT is missing or does not pass validation, when the response
* content hash does not match the value in the JWT, or when the JWE in the body fails validation, or the decrypted
* JWE in the body cannot be parsed or mapped to the expected data.
* @throws InvalidRequestException When the Platform API returns a 400 Bad Request HTTP Status
* @throws InvalidCredentialsException When the Platform API returns a 401 Unauthorized or 403 Forbidden HTTP Status
* @throws PlatformErrorException When the Platform API returns an unexpected HTTP Status
* @throws UnknownEntityException When the Platform API returns a 404 Not Found HTTP Status.
* @throws CommunicationErrorException When the HTTP client is unable to connect to the Platform API, cannot
* negotiate TLS with the Platform API, or is disconnected while sending or receiving a message from the
* Platform API.
* @throws InvalidStateException When the SDK does not have the proper resource to perform an action. This is most
* often due to invalid dependencies being provided or algorithms not being supported by the JCE provider.
* @throws MarshallingError When the response cannot be marshaled
* @throws CryptographyError When there is an error encrypting and signing the request or decrypting and verifying
* the signature of the response
*/
List<PublicKey> getDirectoryPublicKeys(UUID directoryId)
throws PlatformErrorException, UnknownEntityException, InvalidResponseException, InvalidStateException,
InvalidCredentialsException, CommunicationErrorException, MarshallingError,
CryptographyError;
/**
* Add a Public Key for a Directory
*
* @param directoryId ID of the Directory for which you wish to add a Public Key
* @param publicKey RSA Public key to be added
* @param active Will the Public Key be active upon creation
* @param expires When will the Public Key expire
* @return Key ID for the created key. This will be used to identify the key in subsequent
* calls for this Public Key.
* @throws InvalidResponseException When the response JWT is missing or does not pass validation, when the response
* content hash does not match the value in the JWT, or when the JWE in the body fails validation, or the decrypted
* JWE in the body cannot be parsed or mapped to the expected data.
* @throws InvalidRequestException When the Platform API returns a 400 Bad Request HTTP Status
* @throws InvalidCredentialsException When the Platform API returns a 401 Unauthorized or 403 Forbidden HTTP Status
* @throws PlatformErrorException When the Platform API returns an unexpected HTTP Status
* @throws UnknownEntityException When the Platform API returns a 404 Not Found HTTP Status.
* @throws CommunicationErrorException When the HTTP client is unable to connect to the Platform API, cannot
* negotiate TLS with the Platform API, or is disconnected while sending or receiving a message from the
* Platform API.
* @throws InvalidStateException When the SDK does not have the proper resource to perform an action. This is most
* often due to invalid dependencies being provided or algorithms not being supported by the JCE provider.
* @throws MarshallingError When the response cannot be marshaled
* @throws CryptographyError When there is an error encrypting and signing the request or decrypting and verifying
* the signature of the response
*/
String addDirectoryPublicKey(UUID directoryId, RSAPublicKey publicKey, Boolean active, Date expires)
throws PlatformErrorException, UnknownEntityException, InvalidResponseException, InvalidStateException,
InvalidCredentialsException, CommunicationErrorException, MarshallingError,
CryptographyError;
/**
* Update a Public Key for a Directory
*
* @param directoryId ID of the Directory for which you wish to update the Public Key
* @param keyId MD5 fingerprint of the RSA public key used to identify the Public Key
* @param active Will the Public Key be active
* @param expires When will the Public Key expire
* @throws InvalidResponseException When the response JWT is missing or does not pass validation, when the response
* content hash does not match the value in the JWT, or when the JWE in the body fails validation, or the decrypted
* JWE in the body cannot be parsed or mapped to the expected data.
* @throws InvalidRequestException When the Platform API returns a 400 Bad Request HTTP Status
* @throws InvalidCredentialsException When the Platform API returns a 401 Unauthorized or 403 Forbidden HTTP Status
* @throws PlatformErrorException When the Platform API returns an unexpected HTTP Status
* @throws UnknownEntityException When the Platform API returns a 404 Not Found HTTP Status.
* @throws CommunicationErrorException When the HTTP client is unable to connect to the Platform API, cannot
* negotiate TLS with the Platform API, or is disconnected while sending or receiving a message from the
* Platform API.
* @throws InvalidStateException When the SDK does not have the proper resource to perform an action. This is most
* often due to invalid dependencies being provided or algorithms not being supported by the JCE provider.
* @throws MarshallingError When the response cannot be marshaled
* @throws CryptographyError When there is an error encrypting and signing the request or decrypting and verifying
* the signature of the response
*/
void updateDirectoryPublicKey(UUID directoryId, String keyId, Boolean active, Date expires)
throws PlatformErrorException, UnknownEntityException, InvalidResponseException, InvalidStateException,
InvalidCredentialsException, CommunicationErrorException, MarshallingError,
CryptographyError;
/**
* Remove a Public Key from a Service. You may not remove the only Public Key from a Service. You may use
* {@link #updateServicePublicKey(UUID, String, Boolean, Date)} to mark the key as inactive if it should no longer
* be able to be used.
*
* @param directoryId ID of the Directory for which you wish to remove the Public Key
* @param keyId MD5 fingerprint of the RSA public key used to identify the Public Key
* @throws InvalidResponseException When the response JWT is missing or does not pass validation, when the response
* content hash does not match the value in the JWT, or when the JWE in the body fails validation, or the decrypted
* JWE in the body cannot be parsed or mapped to the expected data.
* @throws InvalidRequestException When the Platform API returns a 400 Bad Request HTTP Status
* @throws InvalidCredentialsException When the Platform API returns a 401 Unauthorized or 403 Forbidden HTTP Status
* @throws PlatformErrorException When the Platform API returns an unexpected HTTP Status
* @throws UnknownEntityException When the Platform API returns a 404 Not Found HTTP Status.
* @throws CommunicationErrorException When the HTTP client is unable to connect to the Platform API, cannot
* negotiate TLS with the Platform API, or is disconnected while sending or receiving a message from the
* Platform API.
* @throws InvalidStateException When the SDK does not have the proper resource to perform an action. This is most
* often due to invalid dependencies being provided or algorithms not being supported by the JCE provider.
* @throws MarshallingError When the response cannot be marshaled
* @throws CryptographyError When there is an error encrypting and signing the request or decrypting and verifying
* the signature of the response
*/
void removeDirectoryPublicKey(UUID directoryId, String keyId)
throws PlatformErrorException, UnknownEntityException, InvalidResponseException, InvalidStateException,
InvalidCredentialsException, CommunicationErrorException, MarshallingError,
CryptographyError;
} |
package com.trackorjargh.javarepository;
import java.util.List;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import com.trackorjargh.javaclass.Shows;
public interface ShowRepository extends JpaRepository<Shows, Long>{
@Query(value="Select max(id) from Shows", nativeQuery=true)
Long findLastId();
// SELECT TOP 10 * FROM FILM ORDER BY ID DESC
@Query(value = "Select * from Shows order by id desc limit ?1", nativeQuery = true)
List<Shows> findByLastAdded(int additions);
@Query(value = "SELECT SHOWS.* FROM POINT_SHOW INNER JOIN SHOWS ON POINT_SHOW.SHOW_ID=SHOWS.ID GROUP BY POINT_SHOW.SHOW_ID ORDER BY AVG(POINT_SHOW.POINTS) DESC \n-- #pageable\n",
countQuery = "SELECT COUNT(*) FROM POINT_SHOW GROUP BY SHOW_ID",
nativeQuery = true)
Page<Shows> findBestPointShow(Pageable pageable);
@Query(value = "SELECT SHOWS.* FROM SHOWS_GENDERS INNER JOIN SHOWS ON SHOWS_GENDERS.SHOWS_ID=SHOWS.ID WHERE SHOWS_GENDERS.GENDERS_ID IN (SELECT ID FROM GENDER WHERE LOWER(NAME) LIKE LOWER(?1)) \n-- #pageable\n",
countQuery = "SELECT COUNT(SHOWS.ID) FROM SHOWS_GENDERS INNER JOIN SHOWS ON SHOWS_GENDERS.SHOWS_ID=SHOWS.ID WHERE SHOWS_GENDERS.GENDERS_ID IN (SELECT ID FROM GENDER WHERE LOWER(NAME) LIKE LOWER(?1))",
nativeQuery = true)
Page<Shows> findShowsByGender(String gender, Pageable pageable);
@Query(value = "SELECT SHOWS.* FROM SHOWS_GENDERS INNER JOIN SHOWS ON SHOWS_GENDERS.SHOWS_ID=SHOWS.ID WHERE SHOWS_GENDERS.GENDERS_ID IN (SELECT GENDERS_ID FROM FILM_GENDERS WHERE FILMS_ID = ?1) AND SHOWS.ID != ?1 \n-- #pageable\n",
countQuery = "SELECT COUNT(SHOWS.ID) FROM SHOWS_GENDERS INNER JOIN SHOWS ON SHOWS_GENDERS.SHOWS_ID=SHOWS.ID WHERE SHOWS_GENDERS.GENDERS_ID IN (SELECT GENDERS_ID FROM FILM_GENDERS WHERE FILMS_ID = ?1) AND SHOWS.ID != ?1",
nativeQuery = true)
Page<Shows> findShowsRelationsById(long id, Pageable pageable);
Page<Shows> findByNameContainingIgnoreCase(String name, Pageable pageable);
Shows findById(Long id);
Shows findByNameIgnoreCase(String name);
}
|
#!/bin/bash
# Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
if [ ! -d build ]; then
mkdir build
fi
cd build || exit
cmake .. \
-DMINDSPORE_PATH="`pip show mindspore-ascend | grep Location | awk '{print $2"/mindspore"}' | xargs realpath`"
make
|
<reponame>LLcat1217/RxJava<filename>src/test/java/io/reactivex/observers/SafeObserverTest.java
/**
* Copyright (c) 2016-present, RxJava Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package io.reactivex.observers;
import static org.junit.Assert.*;
import java.util.List;
import java.util.concurrent.atomic.AtomicReference;
import org.junit.*;
import io.reactivex.*;
import io.reactivex.disposables.*;
import io.reactivex.exceptions.*;
import io.reactivex.plugins.RxJavaPlugins;
public class SafeObserverTest {
@Test
public void onNextFailure() {
AtomicReference<Throwable> onError = new AtomicReference<Throwable>();
try {
OBSERVER_ONNEXT_FAIL(onError).onNext("one");
fail("expects exception to be thrown");
} catch (Exception e) {
assertNull(onError.get());
assertTrue(e instanceof SafeObserverTestException);
assertEquals("onNextFail", e.getMessage());
}
}
@Test
public void onNextFailureSafe() {
AtomicReference<Throwable> onError = new AtomicReference<Throwable>();
try {
SafeObserver<String> safeObserver = new SafeObserver<String>(OBSERVER_ONNEXT_FAIL(onError));
safeObserver.onSubscribe(Disposables.empty());
safeObserver.onNext("one");
assertNotNull(onError.get());
assertTrue(onError.get() instanceof SafeObserverTestException);
assertEquals("onNextFail", onError.get().getMessage());
} catch (Exception e) {
fail("expects exception to be passed to onError");
}
}
@Test
public void onCompleteFailure() {
AtomicReference<Throwable> onError = new AtomicReference<Throwable>();
try {
OBSERVER_ONCOMPLETED_FAIL(onError).onComplete();
fail("expects exception to be thrown");
} catch (Exception e) {
assertNull(onError.get());
assertTrue(e instanceof SafeObserverTestException);
assertEquals("onCompleteFail", e.getMessage());
}
}
@Test
public void onErrorFailure() {
try {
OBSERVER_ONERROR_FAIL().onError(new SafeObserverTestException("error!"));
fail("expects exception to be thrown");
} catch (Exception e) {
assertTrue(e instanceof SafeObserverTestException);
assertEquals("onErrorFail", e.getMessage());
}
}
@Test
@Ignore("Observers can't throw")
public void onErrorFailureSafe() {
try {
new SafeObserver<String>(OBSERVER_ONERROR_FAIL()).onError(new SafeObserverTestException("error!"));
fail("expects exception to be thrown");
} catch (Exception e) {
e.printStackTrace();
assertTrue(e instanceof RuntimeException);
assertEquals("Error occurred when trying to propagate error to Observer.onError", e.getMessage());
Throwable e2 = e.getCause();
assertTrue(e2 instanceof CompositeException);
List<Throwable> innerExceptions = ((CompositeException) e2).getExceptions();
assertEquals(2, innerExceptions.size());
Throwable e3 = innerExceptions.get(0);
assertTrue(e3 instanceof SafeObserverTestException);
assertEquals("error!", e3.getMessage());
Throwable e4 = innerExceptions.get(1);
assertTrue(e4 instanceof SafeObserverTestException);
assertEquals("onErrorFail", e4.getMessage());
}
}
@Test
@Ignore("Observers can't throw")
public void onErrorNotImplementedFailureSafe() {
try {
new SafeObserver<String>(OBSERVER_ONERROR_NOTIMPLEMENTED()).onError(new SafeObserverTestException("error!"));
fail("expects exception to be thrown");
} catch (Exception e) {
// assertTrue(e instanceof OnErrorNotImplementedException);
assertTrue(e.getCause() instanceof SafeObserverTestException);
assertEquals("error!", e.getCause().getMessage());
}
}
@Test
public void onNextOnErrorFailure() {
try {
OBSERVER_ONNEXT_ONERROR_FAIL().onNext("one");
fail("expects exception to be thrown");
} catch (Exception e) {
e.printStackTrace();
assertTrue(e instanceof SafeObserverTestException);
assertEquals("onNextFail", e.getMessage());
}
}
@Test
@Ignore("Observers can't throw")
public void onNextOnErrorFailureSafe() {
try {
new SafeObserver<String>(OBSERVER_ONNEXT_ONERROR_FAIL()).onNext("one");
fail("expects exception to be thrown");
} catch (Exception e) {
e.printStackTrace();
assertTrue(e instanceof RuntimeException);
assertEquals("Error occurred when trying to propagate error to Observer.onError", e.getMessage());
Throwable e2 = e.getCause();
assertTrue(e2 instanceof CompositeException);
List<Throwable> innerExceptions = ((CompositeException) e2).getExceptions();
assertEquals(2, innerExceptions.size());
Throwable e3 = innerExceptions.get(0);
assertTrue(e3 instanceof SafeObserverTestException);
assertEquals("onNextFail", e3.getMessage());
Throwable e4 = innerExceptions.get(1);
assertTrue(e4 instanceof SafeObserverTestException);
assertEquals("onErrorFail", e4.getMessage());
}
}
static final Disposable THROWING_DISPOSABLE = new Disposable() {
@Override
public boolean isDisposed() {
// TODO Auto-generated method stub
return false;
}
@Override
public void dispose() {
// break contract by throwing exception
throw new SafeObserverTestException("failure from unsubscribe");
}
};
@Test
@Ignore("Observers can't throw")
public void onCompleteSuccessWithUnsubscribeFailure() {
Observer<String> o = OBSERVER_SUCCESS();
try {
o.onSubscribe(THROWING_DISPOSABLE);
new SafeObserver<String>(o).onComplete();
fail("expects exception to be thrown");
} catch (Exception e) {
e.printStackTrace();
// FIXME no longer assertable
// assertTrue(o.isUnsubscribed());
// assertTrue(e instanceof UnsubscribeFailedException);
assertTrue(e.getCause() instanceof SafeObserverTestException);
assertEquals("failure from unsubscribe", e.getMessage());
// expected since onError fails so SafeObserver can't help
}
}
@Test
@Ignore("Observers can't throw")
public void onErrorSuccessWithUnsubscribeFailure() {
AtomicReference<Throwable> onError = new AtomicReference<Throwable>();
Observer<String> o = OBSERVER_SUCCESS(onError);
try {
o.onSubscribe(THROWING_DISPOSABLE);
new SafeObserver<String>(o).onError(new SafeObserverTestException("failed"));
fail("we expect the unsubscribe failure to cause an exception to be thrown");
} catch (Exception e) {
e.printStackTrace();
// FIXME no longer assertable
// assertTrue(o.isUnsubscribed());
// we still expect onError to have received something before unsubscribe blew up
assertNotNull(onError.get());
assertTrue(onError.get() instanceof SafeObserverTestException);
assertEquals("failed", onError.get().getMessage());
// now assert the exception that was thrown
RuntimeException onErrorFailedException = (RuntimeException) e;
assertTrue(onErrorFailedException.getCause() instanceof SafeObserverTestException);
assertEquals("failure from unsubscribe", e.getMessage());
}
}
@Test
@Ignore("Observers can't throw")
public void onErrorFailureWithUnsubscribeFailure() {
Observer<String> o = OBSERVER_ONERROR_FAIL();
try {
o.onSubscribe(THROWING_DISPOSABLE);
new SafeObserver<String>(o).onError(new SafeObserverTestException("onError failure"));
fail("expects exception to be thrown");
} catch (Exception e) {
e.printStackTrace();
// FIXME no longer assertable
// assertTrue(o.isUnsubscribed());
// assertions for what is expected for the actual failure propagated to onError which then fails
assertTrue(e instanceof RuntimeException);
assertEquals("Error occurred when trying to propagate error to Observer.onError and during unsubscription.", e.getMessage());
Throwable e2 = e.getCause();
assertTrue(e2 instanceof CompositeException);
List<Throwable> innerExceptions = ((CompositeException) e2).getExceptions();
assertEquals(3, innerExceptions.size());
Throwable e3 = innerExceptions.get(0);
assertTrue(e3 instanceof SafeObserverTestException);
assertEquals("onError failure", e3.getMessage());
Throwable e4 = innerExceptions.get(1);
assertTrue(e4 instanceof SafeObserverTestException);
assertEquals("onErrorFail", e4.getMessage());
Throwable e5 = innerExceptions.get(2);
assertTrue(e5 instanceof SafeObserverTestException);
assertEquals("failure from unsubscribe", e5.getMessage());
}
}
@Test
@Ignore("Observers can't throw")
public void onErrorNotImplementedFailureWithUnsubscribeFailure() {
Observer<String> o = OBSERVER_ONERROR_NOTIMPLEMENTED();
try {
o.onSubscribe(THROWING_DISPOSABLE);
new SafeObserver<String>(o).onError(new SafeObserverTestException("error!"));
fail("expects exception to be thrown");
} catch (Exception e) {
e.printStackTrace();
// FIXME no longer assertable
// assertTrue(o.isUnsubscribed());
// assertions for what is expected for the actual failure propagated to onError which then fails
assertTrue(e instanceof RuntimeException);
assertEquals("Observer.onError not implemented and error while unsubscribing.", e.getMessage());
Throwable e2 = e.getCause();
assertTrue(e2 instanceof CompositeException);
List<Throwable> innerExceptions = ((CompositeException) e2).getExceptions();
assertEquals(2, innerExceptions.size());
Throwable e3 = innerExceptions.get(0);
assertTrue(e3 instanceof SafeObserverTestException);
assertEquals("error!", e3.getMessage());
Throwable e4 = innerExceptions.get(1);
assertTrue(e4 instanceof SafeObserverTestException);
assertEquals("failure from unsubscribe", e4.getMessage());
}
}
private static Observer<String> OBSERVER_SUCCESS() {
return new DefaultObserver<String>() {
@Override
public void onComplete() {
}
@Override
public void onError(Throwable e) {
}
@Override
public void onNext(String args) {
}
};
}
private static Observer<String> OBSERVER_SUCCESS(final AtomicReference<Throwable> onError) {
return new DefaultObserver<String>() {
@Override
public void onComplete() {
}
@Override
public void onError(Throwable e) {
onError.set(e);
}
@Override
public void onNext(String args) {
}
};
}
private static Observer<String> OBSERVER_ONNEXT_FAIL(final AtomicReference<Throwable> onError) {
return new DefaultObserver<String>() {
@Override
public void onComplete() {
}
@Override
public void onError(Throwable e) {
onError.set(e);
}
@Override
public void onNext(String args) {
throw new SafeObserverTestException("onNextFail");
}
};
}
private static Observer<String> OBSERVER_ONNEXT_ONERROR_FAIL() {
return new DefaultObserver<String>() {
@Override
public void onComplete() {
}
@Override
public void onError(Throwable e) {
throw new SafeObserverTestException("onErrorFail");
}
@Override
public void onNext(String args) {
throw new SafeObserverTestException("onNextFail");
}
};
}
private static Observer<String> OBSERVER_ONERROR_FAIL() {
return new DefaultObserver<String>() {
@Override
public void onComplete() {
}
@Override
public void onError(Throwable e) {
throw new SafeObserverTestException("onErrorFail");
}
@Override
public void onNext(String args) {
}
};
}
private static Observer<String> OBSERVER_ONERROR_NOTIMPLEMENTED() {
return new DefaultObserver<String>() {
@Override
public void onComplete() {
}
@Override
public void onError(Throwable e) {
throw new RuntimeException(e);
// throw new OnErrorNotImplementedException(e);
}
@Override
public void onNext(String args) {
}
};
}
private static Observer<String> OBSERVER_ONCOMPLETED_FAIL(final AtomicReference<Throwable> onError) {
return new DefaultObserver<String>() {
@Override
public void onComplete() {
throw new SafeObserverTestException("onCompleteFail");
}
@Override
public void onError(Throwable e) {
onError.set(e);
}
@Override
public void onNext(String args) {
}
};
}
@SuppressWarnings("serial")
static class SafeObserverTestException extends RuntimeException {
SafeObserverTestException(String message) {
super(message);
}
}
@Test
@Ignore("Observers can't throw")
public void testOnCompletedThrows() {
final AtomicReference<Throwable> error = new AtomicReference<Throwable>();
SafeObserver<Integer> s = new SafeObserver<Integer>(new DefaultObserver<Integer>() {
@Override
public void onNext(Integer t) {
}
@Override
public void onError(Throwable e) {
error.set(e);
}
@Override
public void onComplete() {
throw new TestException();
}
});
try {
s.onComplete();
Assert.fail();
} catch (RuntimeException e) {
assertNull(error.get());
}
}
@Test
public void testActual() {
Observer<Integer> actual = new DefaultObserver<Integer>() {
@Override
public void onNext(Integer t) {
}
@Override
public void onError(Throwable e) {
}
@Override
public void onComplete() {
}
};
SafeObserver<Integer> s = new SafeObserver<Integer>(actual);
assertSame(actual, s.actual);
}
@Test
public void dispose() {
TestObserver<Integer> to = new TestObserver<Integer>();
SafeObserver<Integer> so = new SafeObserver<Integer>(to);
Disposable d = Disposables.empty();
so.onSubscribe(d);
to.dispose();
assertTrue(d.isDisposed());
assertTrue(so.isDisposed());
}
@Test
public void onNextAfterComplete() {
TestObserver<Integer> to = new TestObserver<Integer>();
SafeObserver<Integer> so = new SafeObserver<Integer>(to);
Disposable d = Disposables.empty();
so.onSubscribe(d);
so.onComplete();
so.onNext(1);
so.onError(new TestException());
so.onComplete();
to.assertResult();
}
@Test
public void onNextNull() {
TestObserver<Integer> to = new TestObserver<Integer>();
SafeObserver<Integer> so = new SafeObserver<Integer>(to);
Disposable d = Disposables.empty();
so.onSubscribe(d);
so.onNext(null);
to.assertFailure(NullPointerException.class);
}
@Test
public void onNextWithoutOnSubscribe() {
TestObserver<Integer> to = new TestObserver<Integer>();
SafeObserver<Integer> so = new SafeObserver<Integer>(to);
so.onNext(1);
to.assertFailureAndMessage(NullPointerException.class, "Subscription not set!");
}
@Test
public void onErrorWithoutOnSubscribe() {
TestObserver<Integer> to = new TestObserver<Integer>();
SafeObserver<Integer> so = new SafeObserver<Integer>(to);
so.onError(new TestException());
to.assertFailure(CompositeException.class);
TestHelper.assertError(to, 0, TestException.class);
TestHelper.assertError(to, 1, NullPointerException.class, "Subscription not set!");
}
@Test
public void onCompleteWithoutOnSubscribe() {
TestObserver<Integer> to = new TestObserver<Integer>();
SafeObserver<Integer> so = new SafeObserver<Integer>(to);
so.onComplete();
to.assertFailureAndMessage(NullPointerException.class, "Subscription not set!");
}
@Test
public void onNextNormal() {
TestObserver<Integer> to = new TestObserver<Integer>();
SafeObserver<Integer> so = new SafeObserver<Integer>(to);
Disposable d = Disposables.empty();
so.onSubscribe(d);
so.onNext(1);
so.onComplete();
to.assertResult(1);
}
static final class CrashDummy implements Observer<Object>, Disposable {
boolean crashOnSubscribe;
int crashOnNext;
boolean crashOnError;
boolean crashOnComplete;
boolean crashDispose;
Throwable error;
CrashDummy(boolean crashOnSubscribe, int crashOnNext,
boolean crashOnError, boolean crashOnComplete, boolean crashDispose) {
this.crashOnSubscribe = crashOnSubscribe;
this.crashOnNext = crashOnNext;
this.crashOnError = crashOnError;
this.crashOnComplete = crashOnComplete;
this.crashDispose = crashDispose;
}
@Override
public void dispose() {
if (crashDispose) {
throw new TestException("dispose()");
}
}
@Override
public boolean isDisposed() {
return false;
}
@Override
public void onSubscribe(Disposable d) {
if (crashOnSubscribe) {
throw new TestException("onSubscribe()");
}
}
@Override
public void onNext(Object value) {
if (--crashOnNext == 0) {
throw new TestException("onNext(" + value + ")");
}
}
@Override
public void onError(Throwable e) {
if (crashOnError) {
throw new TestException("onError(" + e + ")");
}
error = e;
}
@Override
public void onComplete() {
if (crashOnComplete) {
throw new TestException("onComplete()");
}
}
public SafeObserver<Object> toSafe() {
return new SafeObserver<Object>(this);
}
public CrashDummy assertError(Class<? extends Throwable> clazz) {
if (!clazz.isInstance(error)) {
throw new AssertionError("Different error: " + error);
}
return this;
}
public CrashDummy assertInnerError(int index, Class<? extends Throwable> clazz) {
List<Throwable> cel = TestHelper.compositeList(error);
TestHelper.assertError(cel, index, clazz);
return this;
}
public CrashDummy assertInnerError(int index, Class<? extends Throwable> clazz, String message) {
List<Throwable> cel = TestHelper.compositeList(error);
TestHelper.assertError(cel, index, clazz, message);
return this;
}
}
@Test
public void onNextOnErrorCrash() {
List<Throwable> list = TestHelper.trackPluginErrors();
try {
CrashDummy cd = new CrashDummy(false, 1, true, false, false);
SafeObserver<Object> so = cd.toSafe();
so.onSubscribe(cd);
so.onNext(1);
TestHelper.assertError(list, 0, CompositeException.class);
List<Throwable> ce = TestHelper.compositeList(list.get(0));
TestHelper.assertError(ce, 0, TestException.class, "onNext(1)");
TestHelper.assertError(ce, 1, TestException.class, "onError(io.reactivex.exceptions.TestException: onNext(1))");
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void onNextDisposeCrash() {
CrashDummy cd = new CrashDummy(false, 1, false, false, true);
SafeObserver<Object> so = cd.toSafe();
so.onSubscribe(cd);
so.onNext(1);
cd.assertError(CompositeException.class);
cd.assertInnerError(0, TestException.class, "onNext(1)");
cd.assertInnerError(1, TestException.class, "dispose()");
}
@Test
public void onSubscribeTwice() {
List<Throwable> list = TestHelper.trackPluginErrors();
try {
CrashDummy cd = new CrashDummy(false, 1, false, false, false);
SafeObserver<Object> so = cd.toSafe();
so.onSubscribe(cd);
so.onSubscribe(cd);
TestHelper.assertError(list, 0, IllegalStateException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void onSubscribeCrashes() {
List<Throwable> list = TestHelper.trackPluginErrors();
try {
CrashDummy cd = new CrashDummy(true, 1, false, false, false);
SafeObserver<Object> so = cd.toSafe();
so.onSubscribe(cd);
TestHelper.assertUndeliverable(list, 0, TestException.class, "onSubscribe()");
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void onSubscribeAndDisposeCrashes() {
List<Throwable> list = TestHelper.trackPluginErrors();
try {
CrashDummy cd = new CrashDummy(true, 1, false, false, true);
SafeObserver<Object> so = cd.toSafe();
so.onSubscribe(cd);
TestHelper.assertError(list, 0, CompositeException.class);
List<Throwable> ce = TestHelper.compositeList(list.get(0));
TestHelper.assertError(ce, 0, TestException.class, "onSubscribe()");
TestHelper.assertError(ce, 1, TestException.class, "dispose()");
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void onNextOnSubscribeCrash() {
List<Throwable> list = TestHelper.trackPluginErrors();
try {
CrashDummy cd = new CrashDummy(true, 1, false, false, false);
SafeObserver<Object> so = cd.toSafe();
so.onNext(1);
TestHelper.assertError(list, 0, CompositeException.class);
List<Throwable> ce = TestHelper.compositeList(list.get(0));
TestHelper.assertError(ce, 0, NullPointerException.class, "Subscription not set!");
TestHelper.assertError(ce, 1, TestException.class, "onSubscribe()");
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void onNextNullDisposeCrashes() {
CrashDummy cd = new CrashDummy(false, 1, false, false, true);
SafeObserver<Object> so = cd.toSafe();
so.onSubscribe(cd);
so.onNext(null);
cd.assertInnerError(0, NullPointerException.class);
cd.assertInnerError(1, TestException.class, "dispose()");
}
@Test
public void noSubscribeOnErrorCrashes() {
List<Throwable> list = TestHelper.trackPluginErrors();
try {
CrashDummy cd = new CrashDummy(false, 1, true, false, false);
SafeObserver<Object> so = cd.toSafe();
so.onNext(1);
TestHelper.assertError(list, 0, CompositeException.class);
List<Throwable> ce = TestHelper.compositeList(list.get(0));
TestHelper.assertError(ce, 0, NullPointerException.class, "Subscription not set!");
TestHelper.assertError(ce, 1, TestException.class, "onError(java.lang.NullPointerException: Subscription not set!)");
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void onErrorNull() {
CrashDummy cd = new CrashDummy(false, 1, false, false, false);
SafeObserver<Object> so = cd.toSafe();
so.onSubscribe(cd);
so.onError(null);
cd.assertError(NullPointerException.class);
}
@Test
public void onErrorNoSubscribeCrash() {
List<Throwable> list = TestHelper.trackPluginErrors();
try {
CrashDummy cd = new CrashDummy(true, 1, false, false, false);
SafeObserver<Object> so = cd.toSafe();
so.onError(new TestException());
TestHelper.assertError(list, 0, CompositeException.class);
List<Throwable> ce = TestHelper.compositeList(list.get(0));
TestHelper.assertError(ce, 0, TestException.class);
TestHelper.assertError(ce, 1, NullPointerException.class, "Subscription not set!");
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void onErrorNoSubscribeOnErrorCrash() {
List<Throwable> list = TestHelper.trackPluginErrors();
try {
CrashDummy cd = new CrashDummy(false, 1, true, false, false);
SafeObserver<Object> so = cd.toSafe();
so.onError(new TestException());
TestHelper.assertError(list, 0, CompositeException.class);
List<Throwable> ce = TestHelper.compositeList(list.get(0));
TestHelper.assertError(ce, 0, TestException.class);
TestHelper.assertError(ce, 1, NullPointerException.class, "Subscription not set!");
TestHelper.assertError(ce, 2, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void onCompleteteCrash() {
List<Throwable> list = TestHelper.trackPluginErrors();
try {
CrashDummy cd = new CrashDummy(false, 1, false, true, false);
SafeObserver<Object> so = cd.toSafe();
so.onSubscribe(cd);
so.onComplete();
TestHelper.assertUndeliverable(list, 0, TestException.class, "onComplete()");
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void onCompleteteNoSubscribeCrash() {
List<Throwable> list = TestHelper.trackPluginErrors();
try {
CrashDummy cd = new CrashDummy(true, 1, false, true, false);
SafeObserver<Object> so = cd.toSafe();
so.onComplete();
TestHelper.assertError(list, 0, CompositeException.class);
List<Throwable> ce = TestHelper.compositeList(list.get(0));
TestHelper.assertError(ce, 0, NullPointerException.class, "Subscription not set!");
TestHelper.assertError(ce, 1, TestException.class, "onSubscribe()");
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void onCompleteteNoSubscribeOnErrorCrash() {
List<Throwable> list = TestHelper.trackPluginErrors();
try {
CrashDummy cd = new CrashDummy(false, 1, true, true, false);
SafeObserver<Object> so = cd.toSafe();
so.onComplete();
TestHelper.assertError(list, 0, CompositeException.class);
List<Throwable> ce = TestHelper.compositeList(list.get(0));
TestHelper.assertError(ce, 0, NullPointerException.class, "Subscription not set!");
TestHelper.assertError(ce, 1, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
}
|
// Copyright 2020 OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package collection
import (
"strings"
metricspb "github.com/census-instrumentation/opencensus-proto/gen-go/metrics/v1"
resourcepb "github.com/census-instrumentation/opencensus-proto/gen-go/resource/v1"
quotav1 "github.com/openshift/api/quota/v1"
"go.opentelemetry.io/collector/translator/conventions"
corev1 "k8s.io/api/core/v1"
"github.com/open-telemetry/opentelemetry-collector-contrib/receiver/k8sclusterreceiver/internal/utils"
)
var clusterResourceQuotaLimitMetric = &metricspb.MetricDescriptor{
Name: "openshift.clusterquota.limit",
Description: "The configured upper limit for a particular resource.",
Type: metricspb.MetricDescriptor_GAUGE_INT64,
LabelKeys: []*metricspb.LabelKey{{
Key: "resource",
}},
}
var clusterResourceQuotaUsedMetric = &metricspb.MetricDescriptor{
Name: "openshift.clusterquota.used",
Description: "The usage for a particular resource with a configured limit.",
Type: metricspb.MetricDescriptor_GAUGE_INT64,
LabelKeys: []*metricspb.LabelKey{{
Key: "resource",
}},
}
var appliedClusterResourceQuotaLimitMetric = &metricspb.MetricDescriptor{
Name: "openshift.appliedclusterquota.limit",
Description: "The upper limit for a particular resource in a specific namespace.",
Type: metricspb.MetricDescriptor_GAUGE_INT64,
LabelKeys: []*metricspb.LabelKey{
{
Key: "resource",
},
{
Key: conventions.AttributeK8sNamespace,
},
},
}
var appliedClusterResourceQuotaUsedMetric = &metricspb.MetricDescriptor{
Name: "openshift.appliedclusterquota.used",
Description: "The usage for a particular resource in a specific namespace.",
Type: metricspb.MetricDescriptor_GAUGE_INT64,
LabelKeys: []*metricspb.LabelKey{
{
Key: "resource",
},
{
Key: conventions.AttributeK8sNamespace,
},
},
}
func getMetricsForClusterResourceQuota(rq *quotav1.ClusterResourceQuota) []*resourceMetrics {
metrics := make([]*metricspb.Metric, 0)
metrics = appendClusterQuotaMetrics(metrics, clusterResourceQuotaLimitMetric, rq.Status.Total.Hard, "")
metrics = appendClusterQuotaMetrics(metrics, clusterResourceQuotaUsedMetric, rq.Status.Total.Used, "")
for _, ns := range rq.Status.Namespaces {
metrics = appendClusterQuotaMetrics(metrics, appliedClusterResourceQuotaLimitMetric, ns.Status.Hard, ns.Namespace)
metrics = appendClusterQuotaMetrics(metrics, appliedClusterResourceQuotaUsedMetric, ns.Status.Used, ns.Namespace)
}
return []*resourceMetrics{
{
resource: getResourceForClusterResourceQuota(rq),
metrics: metrics,
},
}
}
func appendClusterQuotaMetrics(metrics []*metricspb.Metric, metric *metricspb.MetricDescriptor, rl corev1.ResourceList, namespace string) []*metricspb.Metric {
for k, v := range rl {
val := v.Value()
if strings.HasSuffix(string(k), ".cpu") {
val = v.MilliValue()
}
labels := []*metricspb.LabelValue{{Value: string(k), HasValue: true}}
if namespace != "" {
labels = append(labels, &metricspb.LabelValue{Value: namespace, HasValue: true})
}
metrics = append(metrics,
&metricspb.Metric{
MetricDescriptor: metric,
Timeseries: []*metricspb.TimeSeries{
utils.GetInt64TimeSeriesWithLabels(val, labels),
},
},
)
}
return metrics
}
func getResourceForClusterResourceQuota(rq *quotav1.ClusterResourceQuota) *resourcepb.Resource {
return &resourcepb.Resource{
Type: k8sType,
Labels: map[string]string{
k8sKeyClusterResourceQuotaUID: string(rq.UID),
k8sKeyClusterResourceQuotaName: rq.Name,
conventions.AttributeK8sCluster: rq.ClusterName,
},
}
}
|
//
// Copyright (C) 2015-2017 <NAME> <<EMAIL>>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package dockerparser
import (
"testing"
"github.com/stretchr/testify/require"
)
func TestShortParse(t *testing.T) {
is := require.New(t)
reference := parse(is, "foo/bar")
is.Equal("foo/bar:latest", reference.Name())
is.Equal("foo/bar", reference.ShortName())
is.Equal("latest", reference.Tag())
is.Equal("docker.io", reference.Registry())
is.Equal("docker.io/foo/bar", reference.Repository())
is.Equal("docker.io/foo/bar:latest", reference.Remote())
}
func TestShortParseWithTag(t *testing.T) {
is := require.New(t)
reference := parse(is, "foo/bar:1.1")
is.Equal("foo/bar:1.1", reference.Name())
is.Equal("foo/bar", reference.ShortName())
is.Equal("1.1", reference.Tag())
is.Equal("docker.io", reference.Registry())
is.Equal("docker.io/foo/bar", reference.Repository())
is.Equal("docker.io/foo/bar:1.1", reference.Remote())
}
func TestShortParseWithDigest(t *testing.T) {
is := require.New(t)
reference := parse(is, "foo/bar@sha256:bc8813ea7b3603864987522f02a76101c17ad122e1c46d790efc0fca78ca7bfb")
is.Equal("foo/bar@sha256:bc8813ea7b3603864987522f02a76101c17ad122e1c46d790efc0fca78ca7bfb", reference.Name())
is.Equal("foo/bar", reference.ShortName())
is.Equal("sha256:bc8813ea7b3603864987522f02a76101c17ad122e1c46d790efc0fca78ca7bfb", reference.Tag())
is.Equal("docker.io", reference.Registry())
is.Equal("docker.io/foo/bar", reference.Repository())
is.Equal("docker.io/foo/bar@sha256:bc8813ea7b3603864987522f02a76101c17ad122e1c46d790efc0fca78ca7bfb", reference.Remote())
}
func TestRegistry(t *testing.T) {
is := require.New(t)
reference := parse(is, "localhost.localdomain/foo/bar")
is.Equal("foo/bar:latest", reference.Name())
is.Equal("foo/bar", reference.ShortName())
is.Equal("latest", reference.Tag())
is.Equal("localhost.localdomain", reference.Registry())
is.Equal("localhost.localdomain/foo/bar", reference.Repository())
is.Equal("localhost.localdomain/foo/bar:latest", reference.Remote())
}
func TestRegistryWithTag(t *testing.T) {
is := require.New(t)
reference := parse(is, "localhost.localdomain/foo/bar:1.1")
is.Equal("foo/bar:1.1", reference.Name())
is.Equal("foo/bar", reference.ShortName())
is.Equal("1.1", reference.Tag())
is.Equal("localhost.localdomain", reference.Registry())
is.Equal("localhost.localdomain/foo/bar", reference.Repository())
is.Equal("localhost.localdomain/foo/bar:1.1", reference.Remote())
}
func TestRegistryWithDigest(t *testing.T) {
is := require.New(t)
reference := parse(is, "localhost.localdomain/foo/bar@sha256:bc8813ea7b3603864987522f02a76101c17ad122e1c46d790efc0fca78ca7bfb")
is.Equal("foo/bar@sha256:bc8813ea7b3603864987522f02a76101c17ad122e1c46d790efc0fca78ca7bfb", reference.Name())
is.Equal("foo/bar", reference.ShortName())
is.Equal("sha256:bc8813ea7b3603864987522f02a76101c17ad122e1c46d790efc0fca78ca7bfb", reference.Tag())
is.Equal("localhost.localdomain", reference.Registry())
is.Equal("localhost.localdomain/foo/bar", reference.Repository())
is.Equal("localhost.localdomain/foo/bar@sha256:bc8813ea7b3603864987522f02a76101c17ad122e1c46d790efc0fca78ca7bfb", reference.Remote())
}
func TestRegistryWithPort(t *testing.T) {
is := require.New(t)
reference := parse(is, "localhost.localdomain:5000/foo/bar")
is.Equal("foo/bar:latest", reference.Name())
is.Equal("foo/bar", reference.ShortName())
is.Equal("latest", reference.Tag())
is.Equal("localhost.localdomain:5000", reference.Registry())
is.Equal("localhost.localdomain:5000/foo/bar", reference.Repository())
is.Equal("localhost.localdomain:5000/foo/bar:latest", reference.Remote())
}
func TestRegistryWithPortAndTag(t *testing.T) {
is := require.New(t)
reference := parse(is, "localhost.localdomain:5000/foo/bar:1.1")
is.Equal("foo/bar:1.1", reference.Name())
is.Equal("foo/bar", reference.ShortName())
is.Equal("1.1", reference.Tag())
is.Equal("localhost.localdomain:5000", reference.Registry())
is.Equal("localhost.localdomain:5000/foo/bar", reference.Repository())
is.Equal("localhost.localdomain:5000/foo/bar:1.1", reference.Remote())
}
func TestRegistryWithPortAndDigest(t *testing.T) {
is := require.New(t)
reference := parse(is, "localhost.localdomain:5000/foo/bar@sha256:bc8813ea7b3603864987522f02a76101c17ad122e1c46d790efc0fca78ca7bfb")
is.Equal("foo/bar@sha256:bc8813ea7b3603864987522f02a76101c17ad122e1c46d790efc0fca78ca7bfb", reference.Name())
is.Equal("foo/bar", reference.ShortName())
is.Equal("sha256:bc8813ea7b3603864987522f02a76101c17ad122e1c46d790efc0fca78ca7bfb", reference.Tag())
is.Equal("localhost.localdomain:5000", reference.Registry())
is.Equal("localhost.localdomain:5000/foo/bar", reference.Repository())
is.Equal("localhost.localdomain:5000/foo/bar@sha256:bc8813ea7b3603864987522f02a76101c17ad122e1c46d790efc0fca78ca7bfb", reference.Remote())
}
func TestHttpRegistryClean(t *testing.T) {
is := require.New(t)
reference := parse(is, "http://localhost.localdomain:5000/foo/bar:latest")
is.Equal("foo/bar:latest", reference.Name())
is.Equal("foo/bar", reference.ShortName())
is.Equal("latest", reference.Tag())
is.Equal("localhost.localdomain:5000", reference.Registry())
is.Equal("localhost.localdomain:5000/foo/bar", reference.Repository())
is.Equal("localhost.localdomain:5000/foo/bar:latest", reference.Remote())
}
func TestHttpsRegistryClean(t *testing.T) {
is := require.New(t)
reference := parse(is, "https://localhost.localdomain:5000/foo/bar:latest")
is.Equal("foo/bar:latest", reference.Name())
is.Equal("foo/bar", reference.ShortName())
is.Equal("latest", reference.Tag())
is.Equal("localhost.localdomain:5000", reference.Registry())
is.Equal("localhost.localdomain:5000/foo/bar", reference.Repository())
is.Equal("localhost.localdomain:5000/foo/bar:latest", reference.Remote())
}
func TestParseError(t *testing.T) {
is := require.New(t)
reference, err := Parse("sftp://user:passwd@example.com/foo/bar:latest")
is.Error(err)
is.Nil(reference)
}
func parse(is *require.Assertions, remote string) *Reference {
reference, err := Parse(remote)
is.NoError(err, "parse error was not expected")
is.NotNil(reference)
is.NotEmpty(reference)
return reference
}
|
#!/bin/sh
make clean
make all
./mixer_test
./sbus2_test ../../../../data/sbus2/sbus2_r7008SB_gps_baro_tx_off.txt |
package it.progess.core.pojo;
import it.progess.core.vo.Ivo;
public class ConcreteTableConverter implements Itbl {
@Override
public void convertToTable(Ivo vo) {
// Implement the conversion logic to generate the table representation
// Example: Assuming vo has fields representing columns, create a table with rows and columns based on the vo data
// Sample pseudo-code:
// for each row in vo {
// create a new table row
// for each column in vo {
// add column value to the table row
// }
// add the table row to the table representation
// }
// Store or display the table representation as required
}
} |
const makePath = (separator: string, ...parts: string[]) => {
let newParts: string[] = [];
parts.forEach((part) => {
const splitParts = part ? part.split(separator).filter((val) => val) : [];
if (splitParts.length) {
newParts = newParts.concat(splitParts);
}
});
let path = newParts.join(separator);
// preserve the leading separator if it was provided
if (parts[0] && parts[0].indexOf(separator) === 0) {
path = `${separator}${path}`;
}
return path;
};
const formatGuid = (guid: string) => {
let formattedGuid = guid;
if (guid.length === 36) {
// no wrapping braces
formattedGuid = `{${guid}}`;
}
return formattedGuid.toUpperCase();
};
export const getDynamicPlaceholderKey = (
parentPlaceholderPath: string,
rendering: { uid: string, [key: string]: any },
placeholderName: string
) => {
if (rendering && rendering.uid) {
const uid = formatGuid(rendering.uid);
const index = 0; // this could become dynamic if we wish to support "incrementing" dynamic placeholders as well
return makePath('/', parentPlaceholderPath, `${placeholderName}-${uid}-${index}`);
}
return makePath('/', parentPlaceholderPath, placeholderName);
};
|
package cloud
import (
"fmt"
"math/rand"
"sync"
)
// FakeProvider is an in-memory Provider suitable for tests.
type FakeProvider struct {
instancesMutex sync.Mutex
instances map[string]Instance
}
// MarkRunning marks a VM as running and gives it a random IP address.
func (p *FakeProvider) MarkRunning(id string) {
p.instancesMutex.Lock()
defer p.instancesMutex.Unlock()
inst := p.instances[id]
ipAddress := make([]byte, 4)
rand.Read(ipAddress)
inst.IPAddress = fmt.Sprintf("%d.%d.%d.%d", ipAddress[0], ipAddress[1], ipAddress[2], ipAddress[3])
inst.State = InstanceStateRunning
p.instances[inst.ID] = inst
}
// List returns all the instances in the fake provider.
func (p *FakeProvider) List() ([]Instance, error) {
if rand.Intn(10) == 0 {
return nil, fmt.Errorf("random error occurred")
}
p.instancesMutex.Lock()
defer p.instancesMutex.Unlock()
var instances []Instance
for _, instance := range p.instances {
instances = append(instances, instance)
}
return instances, nil
}
// Create creates an instance in the fake provider.
func (p *FakeProvider) Create(id string, attrs CreateAttributes) (Instance, error) {
if rand.Intn(5) == 0 {
return Instance{}, fmt.Errorf("random error occurred")
}
p.instancesMutex.Lock()
defer p.instancesMutex.Unlock()
if attrs.ImageName == "" {
return Instance{}, fmt.Errorf("image is required")
}
if attrs.ImageName == "standard-image" {
inst := Instance{
ID: id,
State: InstanceStateStarting,
}
if p.instances == nil {
p.instances = make(map[string]Instance)
}
p.instances[inst.ID] = inst
return inst, nil
}
return Instance{}, fmt.Errorf("unknown image")
}
// Get returns the instance with the given ID, or an error if the instance
// wasn't found
func (p *FakeProvider) Get(id string) (Instance, error) {
p.instancesMutex.Lock()
defer p.instancesMutex.Unlock()
instance, ok := p.instances[id]
if !ok {
return Instance{}, fmt.Errorf("instance not found")
}
return instance, nil
}
// Destroy deletes the instance with the given ID. Returns an error if an
// instance with the given ID doesn't exist.
func (p *FakeProvider) Destroy(id string) error {
p.instancesMutex.Lock()
defer p.instancesMutex.Unlock()
if _, ok := p.instances[id]; !ok {
return fmt.Errorf("instance not found")
}
delete(p.instances, id)
return nil
}
|
#! /usr/bin/bash
# add or remove folders which do not need to be removed
folders = (
"alacritty"
"emacs"
"fish"
"i3"
"kitty"
"lazygit"
"nvim"
"polybar"
"shell"
"vim"
"vscode"
)
# unstow each of the folder in folders
for folder in ${folders[@]}; do
stow -D folder
done
|
#!/usr/bin/env bash
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Runs some basic "hello world" logic to test parallelization, basic grouping
# functionality, persisting an RDD to the distributed filesystem, viewing the
# same files with "hadoop fs", reading it back in with Spark, and finally
# deleting the files with "hadoop fs".
#
# Usage: ./bdutil shell < spark-validate-setup.sh
#
# Warning: If the script returns a nonzero code, then there may be some test
# files which should be cleaned up; you can find these with
# hadoop fs -ls validate_spark_*
set -e
# Find hadoop-confg.sh
HADOOP_CONFIGURE_CMD=''
HADOOP_CONFIGURE_CMD=$(find ${HADOOP_LIBEXEC_DIR} ${HADOOP_PREFIX} \
/home/hadoop /usr/*/hadoop* -name hadoop-config.sh | head -n 1)
# If hadoop-config.sh has been found source it
if [[ -n "${HADOOP_CONFIGURE_CMD}" ]]; then
echo "Sourcing '${HADOOP_CONFIGURE_CMD}'"
. ${HADOOP_CONFIGURE_CMD}
fi
HADOOP_CMD="${HADOOP_PREFIX}/bin/hadoop"
# Find the spark-shell command.
SPARK_SHELL=$(find /home/hadoop -name spark-shell | head -n 1)
# Create a unique directory for testing RDD persistence.
PARENT_DIR="/validate_spark_$(date +%s)"
# Get info about the cluster.
NUM_WORKERS=$(wc -l $(dirname ${SPARK_SHELL})/../conf/slaves \
| cut -d ' ' -f 1)
NUM_CPUS=$(grep -c ^processor /proc/cpuinfo)
NUM_SHARDS=$((${NUM_WORKERS} * ${NUM_CPUS}))
echo "NUM_WORKERS: ${NUM_WORKERS}"
echo "NUM_CPUS: ${NUM_CPUS}"
echo "NUM_SHARDS: ${NUM_SHARDS}"
# Create an RDD.
${SPARK_SHELL} << EOF
import java.net.InetAddress
val greetings = sc.parallelize(1 to ${NUM_SHARDS}).map({ i =>
(i, InetAddress.getLocalHost().getHostName(),
"Hello " + i + ", from host " + InetAddress.getLocalHost().getHostName())
})
val uniqueHostnames = greetings.map(tuple => tuple._2).distinct()
println("Got unique hostnames:")
for (hostname <- uniqueHostnames.collect()) {
println(hostname)
}
val uniqueGreetings = greetings.map(tuple => tuple._3).distinct()
println("Unique greetings:")
for (greeting <- uniqueGreetings.collect()) {
println(greeting)
}
val numHostnames = uniqueHostnames.count()
if (numHostnames != ${NUM_WORKERS}) {
println("Expected ${NUM_WORKERS} hosts, got " + numHostnames)
exit(1)
}
val numGreetings = uniqueGreetings.count()
if (numGreetings != ${NUM_SHARDS}) {
println("Expected ${NUM_SHARDS} greetings, got " + numGreetings)
exit(1)
}
greetings.saveAsObjectFile("${PARENT_DIR}/")
exit(0)
EOF
# Check it with "hadoop fs".
echo "Checking _SUCCESS marker with 'hadoop fs'..."
NUM_FILES=$(${HADOOP_CMD} fs -ls ${PARENT_DIR}/part-* | wc -l | cut -d ' ' -f 1)
echo "Found ${NUM_FILES} files."
${HADOOP_CMD} fs -stat "${PARENT_DIR}/_SUCCESS"
# Read the RDD back in and verify it.
${SPARK_SHELL} << EOF
val greetings = sc.objectFile[(Int, String, String)]("${PARENT_DIR}/")
val uniqueHostnames = greetings.map(tuple => tuple._2).distinct()
println("Got unique hostnames:")
for (hostname <- uniqueHostnames.collect()) {
println(hostname)
}
val uniqueGreetings = greetings.map(tuple => tuple._3).distinct()
println("Unique greetings:")
for (greeting <- uniqueGreetings.collect()) {
println(greeting)
}
val numHostnames = uniqueHostnames.count()
if (numHostnames != ${NUM_WORKERS}) {
println("Expected ${NUM_WORKERS} hosts, got " + numHostnames)
exit(1)
}
val numGreetings = uniqueGreetings.count()
if (numGreetings != ${NUM_SHARDS}) {
println("Expected ${NUM_SHARDS} greetings, got " + numGreetings)
exit(1)
}
exit(0)
EOF
echo "Cleaning up ${PARENT_DIR}..."
${HADOOP_CMD} fs -rmr ${PARENT_DIR}
echo 'All done!'
|
import React, { useContext } from 'react'
import styled from 'styled-components'
import { Element } from './Element'
import { ElementsContext } from './index'
const CanvasContainer = styled.div`
flex: 1;
position: relative;
`
export const Canvas: React.FC = () => {
const { elements } = useContext(ElementsContext)
return (
<CanvasContainer>
{elements.map(element => {
return <Element key={element} />
})}
</CanvasContainer>
)
}
|
#!/bin/bash
# Wrapper script for restoring the configuration of the X-Road central server.
# See $COMMON_RESTORE_SCRIPT for details.
source /usr/share/xroad/scripts/_backup_restore_common.sh
COMMON_RESTORE_SCRIPT=/usr/share/xroad/scripts/_restore_xroad.sh
THIS_FILE=$(pwd)/$0
usage () {
cat << EOF
Usage: $0 -s <security server ID> -f <path of tar archive> [-F] [-R]
Restore the configuration (files and database) of the X-Road security server
from a tar archive.
OPTIONS:
-h Show this message and exit.
-b Treat all input values as encoded in base64.
-s ID of the security server. Mandatory if -F is not used.
-f Absolute path of the tar archive to be used for restoration. Mandatory.
-F Force restoration, taking only the type of server into account.
-R Skip removal of old files and just copy the backup on top of the existing configuration.
EOF
}
execute_restore () {
if [ -x ${COMMON_RESTORE_SCRIPT} ] ; then
local args="-t security -f ${BACKUP_FILENAME}"
if [ -n ${FORCE_RESTORE} ] && [[ ${FORCE_RESTORE} = true ]] ; then
args="${args} -F"
else
args="${args} -s ${SECURITY_SERVER_ID}"
if [[ $USE_BASE_64 = true ]] ; then
args="${args} -b"
fi
fi
if [ -n ${SKIP_REMOVAL} ] && [[ ${SKIP_REMOVAL} = true ]] ; then
args="${args} -R"
fi
sudo -u root ${COMMON_RESTORE_SCRIPT} ${args} 2>&1
if [ $? -ne 0 ] ; then
echo "Failed to restore the configuration of the X-Road security server"
exit 1
fi
else
echo "Could not execute the restore script at ${COMMON_RESTORE_SCRIPT}"
exit 1
fi
}
while getopts ":RFs:f:bh" opt ; do
case $opt in
h)
usage
exit 0
;;
R)
SKIP_REMOVAL=true
;;
F)
FORCE_RESTORE=true
;;
s)
SECURITY_SERVER_ID=$OPTARG
;;
f)
BACKUP_FILENAME=$OPTARG
;;
b)
USE_BASE_64=true
;;
\?)
echo "Invalid option $OPTARG"
usage
exit 2
;;
:)
echo "Option -$OPTARG requires an argument"
usage
exit 2
;;
esac
done
check_user
check_security_server_id
check_backup_file_name
execute_restore
# vim: ts=2 sw=2 sts=2 et filetype=sh
|
#!/usr/bin/env bash
# Copyright (c) 2014 The Bitcoin Core developers
# Copyright (c) 2014-2015 The Dash developers
# Copyright (c) 2015-2017 The PIVX developers
# Copyright (c) 2017 The Quotation developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Functions used by more than one test
function echoerr {
echo "$@" 1>&2;
}
# Usage: ExtractKey <key> "<json_object_string>"
# Warning: this will only work for the very-well-behaved
# JSON produced by quotationd, do NOT use it to try to
# parse arbitrary/nested/etc JSON.
function ExtractKey {
echo $2 | tr -d ' "{}\n' | awk -v RS=',' -F: "\$1 ~ /$1/ { print \$2}"
}
function CreateDataDir {
DIR=$1
mkdir -p $DIR
CONF=$DIR/quotation.conf
echo "regtest=1" >> $CONF
echo "keypool=2" >> $CONF
echo "rpcuser=rt" >> $CONF
echo "rpcpassword=rt" >> $CONF
echo "rpcwait=1" >> $CONF
echo "walletnotify=${SENDANDWAIT} -STOP" >> $CONF
shift
while (( "$#" )); do
echo $1 >> $CONF
shift
done
}
function AssertEqual {
if (( $( echo "$1 == $2" | bc ) == 0 ))
then
echoerr "AssertEqual: $1 != $2"
declare -f CleanUp > /dev/null 2>&1
if [[ $? -eq 0 ]] ; then
CleanUp
fi
exit 1
fi
}
# CheckBalance -datadir=... amount account minconf
function CheckBalance {
declare -i EXPECT="$2"
B=$( $CLI $1 getbalance $3 $4 )
if (( $( echo "$B == $EXPECT" | bc ) == 0 ))
then
echoerr "bad balance: $B (expected $2)"
declare -f CleanUp > /dev/null 2>&1
if [[ $? -eq 0 ]] ; then
CleanUp
fi
exit 1
fi
}
# Use: Address <datadir> [account]
function Address {
$CLI $1 getnewaddress $2
}
# Send from to amount
function Send {
from=$1
to=$2
amount=$3
address=$(Address $to)
txid=$( ${SENDANDWAIT} $CLI $from sendtoaddress $address $amount )
}
# Use: Unspent <datadir> <n'th-last-unspent> <var>
function Unspent {
local r=$( $CLI $1 listunspent | awk -F'[ |:,"]+' "\$2 ~ /$3/ { print \$3 }" | tail -n $2 | head -n 1)
echo $r
}
# Use: CreateTxn1 <datadir> <n'th-last-unspent> <destaddress>
# produces hex from signrawtransaction
function CreateTxn1 {
TXID=$(Unspent $1 $2 txid)
AMOUNT=$(Unspent $1 $2 amount)
VOUT=$(Unspent $1 $2 vout)
RAWTXN=$( $CLI $1 createrawtransaction "[{\"txid\":\"$TXID\",\"vout\":$VOUT}]" "{\"$3\":$AMOUNT}")
ExtractKey hex "$( $CLI $1 signrawtransaction $RAWTXN )"
}
# Use: SendRawTxn <datadir> <hex_txn_data>
function SendRawTxn {
${SENDANDWAIT} $CLI $1 sendrawtransaction $2
}
# Use: GetBlocks <datadir>
# returns number of blocks from getinfo
function GetBlocks {
$CLI $1 getblockcount
}
|
<reponame>ahangchen/WeFace
// 简历处理的js
$(function(){
// 获得参数数组
function getUrlVars(){
var vars = [], hash;
var hashes = window.location.href.slice(window.location.href.indexOf('?') + 1).split('&');
for(var i = 0; i < hashes.length; i++)
{
hash = hashes[i].split('=');
vars.push(hash[0]);
vars[hash[0]] = hash[1];
}
return vars;
}
//得到指定参数的value
function getUrlVar(name){
return getUrlVars()[name];
}
function getSex(id){
if(id == 1){
return "男";
}
if(id == 2){
return "女";
}
else {
return "未定";
}
}
// 从URL中获取投递的id
var apply_id = getUrlVar('a_id');
var t_id = getUrlVar('t_id');
var postData ={
apply_id:apply_id
};
var wefaceBace_site = "http://wemeet.tech:8080/";
$('#resumeBtn').attr('href',wefaceBace_site + 'team/resumeManage/resume.html?t_id='+t_id);
$('#positionBtn').attr('href',wefaceBace_site + 'team/position/showPosition.html?t_id='+t_id);
$("#returnBtn").attr("href","resume.html?t_id="+t_id);
var resumeInfo = {
imgSrc:"",
name:"",
sex:"",
age:"",
mail:"",
tel:"",
school:"",
depart:"",
grade:"",
addr:"",
resumePath:""
};
$.ajax({
type:'post',
data:postData,
url:cur_site + "team/apply/info/",
dataType:"json",
success:function(data){
resumeInfo.imgSrc = cur_media + data.avatar_path;
resumeInfo.name = data.name;
resumeInfo.sex = getSex(data.sex);
resumeInfo.age = data.age;
resumeInfo.mail = data.mail;
resumeInfo.tel = data.tel;
resumeInfo.school = data.school;
resumeInfo.depart = data.major;
resumeInfo.addr = data.location;
resumeInfo.resumePath = data.resume_path;
resumeInfo.grade = data.grade;
initForm(resumeInfo);
},
error:function(data){
console.log(data.msg);
},
headers:{
"Access-Control-Allow-Origin":"*"
}
});
$('#send').click(function(){
$('.sendMail').css('display','block');
});
$('#unsend').click(function(){
$('.sendMail').css('display','none');
});
$("#solveBtn").click(function(){
var mailText = $('#mailContent').val();
var mailData = {
apply_id:apply_id,
text:mailText
};
var selectVar = $('input[name="group1"]:checked').val();
var solveData = {
apply_id:apply_id,
state:selectVar
};
var isSend = $('input[name="mailGroup"]:checked').val();
if(isSend) {
$.ajax({
type: "post",
url: cur_site + "team/apply/mail/",
data: mailData,
dataType: "json",
success: function (data) {
console.log("邮件发送成功");
},
error: function (data) {
console.log(data.msg);
},
headers: {
"Access-Control-Allow-Origin": "*"
}
});
}
$.ajax({
type:"post",
url:cur_site + "team/apply/handle/",
data:solveData,
dataType:"json",
success:function(data){
alert("处理成功");
window.location = wefaceBace_site + 'team/resumeManage/resume.html?t_id='+t_id;
},
error:function(data){
console.log(data.msg);
},
headers:{
"Access-Control-Allow-Origin":"*"
}
});
});
// 点击链接下载文件
$('.resumeName').click(function(){
window.location.href = resumeInfo.resumePath;
});
function initForm(info){
$('.infoCard .imgBox img').attr('src',info.imgSrc);
if(info.school == ""){
$(".schoolChip").css('display','none');
}
else{
$(".school").html(info.school);
}
if(info.addr == ''){
$('addr').css('display','none');
}
else {
$(".addr").html(info.addr);
}
$(".name").html(info.name);
$(".sex").html(info.sex);
$(".age").html(info.age+"岁");
$(".mail").html(info.mail);
$(".tel").html(info.tel);
$(".department").html(info.depart);
$(".grade").html(info.grade);
var subPath = info.resumePath.split('/');
var pathLen = subPath.length;
var fileName = subPath[pathLen-1];
}
});
|
#!/bin/bash
. ./setup.sh
RED='\033[0;31m'
GREEN='\033[0;32m'
BLUE='\033[0;34m'
BRED='\033[1;31m'
BGREEN='\033[1;32m'
BBLUE='\033[1;34m'
NC='\033[0m'
echo -e "${BBLUE}Execution of OpenACC programs${NC}"
./parallel
RC=$?
echo -n "Parallel"
if [ $RC != 0 ]; then
echo -e " ${BRED}[FAILED]${NC}"
else
echo -e " ${BGREEN}[PASSED]${NC}"
fi
./jacobi
RC=$?
echo -n "Jacobi"
if [ $RC != 0 ]; then
echo -e " ${BRED}[FAILED]${NC}"
else
echo -e " ${BGREEN}[PASSED]${NC}"
fi
./jacobi_data
RC=$?
echo -n "Jacobi data"
if [ $RC != 0 ]; then
echo -e " ${BRED}[FAILED]${NC}"
else
echo -e " ${BGREEN}[PASSED]${NC}"
fi
./jacobi_data2
RC=$?
echo -n "Jacobi data 2"
if [ $RC != 0 ]; then
echo -e " ${BRED}[FAILED]${NC}"
else
echo -e " ${BGREEN}[PASSED]${NC}"
fi
./householder3 256 256
RC=$?
echo -n "Householder"
if [ $RC != 0 ]; then
echo -e " ${BRED}[FAILED]${NC}"
else
echo -e " ${BGREEN}[PASSED]${NC}"
fi
NB=4
STARTED=`./gang $NB | grep "toto" | wc -l`
echo -n "Gang:"
if [ $NB == $STARTED ]; then
echo -e " ${BGREEN}[PASSED]${NC}"
else
echo -e " ${BRED}[FAILED]${NC}", $STARTED, " instead of ", $NB, "requested"
fi
./inout
RC=$?
echo -n "Data in/out:"
if [ $RC == 0 ] ; then
echo -e " ${BGREEN}[PASSED]${NC}"
else
echo -e " ${BRED}[FAILED]${NC}"
fi
./inout_data
RC=$?
echo -n "Data in/out (2):"
if [ $? == 0 ] ; then
echo -e " ${BGREEN}[PASSED]${NC}"
else
echo -e " ${BRED}[FAILED]${NC}"
fi
# Offloading stuff
for SOURCE in "jacobi" "data" ; do
if [ `arch` == "ppc64le" ]; then
ARCHITECTURES=(nvptx64-nvidia-cuda powerpc64le-unknown-linux-gnu)
fi
if [ `arch` == "x86_64" ]; then
# ARCHITECTURES=(nvptx64-nvidia-cuda x86_64-unknown-linux-gnu)
ARCHITECTURES=(x86_64-unknown-linux-gnu)
fi
for TARGET in ${ARCHITECTURES[@]} ; do
./${SOURCE}_$TARGET
RET=$?
echo -n "Running " $SOURCE " with offloading on " $TARGET
if [ $RET == 0 ] ; then
echo -e " ${BGREEN}[PASSED]${NC}"
else
echo -e " ${BRED}[FAILED]${NC}"
fi
done
done
echo -e "${BBLUE}Profiling interface${NC}"
cd profiling
./run.sh
cd ..
echo -e "${BBLUE}TAU selective instrumentation plugin${NC}"
cd tau
./run.sh
cd ..
|
#!/bin/sh
set -ex
bindgen /opt/cuda/include/cuda.h > src/driver/ll.rs
rustfmt src/driver/ll.rs
|
<reponame>JamesCrockford/grunt-patternlibrary<gh_stars>0
module.exports = function(grunt) {
/**
* Gets JSON from the provided patterns aray
*
* @param {array} patterns
*
* @returns {string} - Stringified JSON of data
*/
function getJSON(patterns) {
var _ = require('lodash');
var data = patterns;
return JSON.stringify(data);
}
/**
* Gets markup given the provided data
*
* @param {string} template - Path to the wrapper template
* @param {string} data.title - Title for the page
* @param {array} data.stylesheets - Array of stylesheets
* @param {array} data.javascripts - Array of javascripts
* @param {array} data.patterns - Array of patterns
*
* @returns {string} - Markup for the wrapper
*/
function getMarkup(template, data) {
return grunt.template.process(grunt.file.read(template), { data: data });
}
return {
getMarkup: getMarkup,
getJSON: getJSON
};
};
|
import pandas as pd
import matplotlib.pyplot as plt
# Read the dataset into a pandas DataFrame
data = {
'Date': ['2022-01-01', '2022-01-01', '2022-01-02', '2022-01-02', '2022-01-03', '2022-01-03'],
'User': ['user1', 'user2', 'user3', 'user1', 'user2', 'user4']
}
df = pd.DataFrame(data)
# Convert the 'Date' column to datetime type
df['Date'] = pd.to_datetime(df['Date'])
# Group by date and count the number of unique users
access_count = df.groupby('Date')['User'].nunique()
# Plot the bar chart
plt.bar(access_count.index, access_count.values)
plt.xlabel('Date')
plt.ylabel('Number of Users')
plt.title('User Access by Day')
plt.show() |
#!/bin/bash
cd "${SRCROOT}"
cd ..
if [[ -s "$HOME/.nvm/nvm.sh" ]]; then
. "$HOME/.nvm/nvm.sh"
elif [[ -x "$(command -v brew)" && -s "$(brew --prefix nvm)/nvm.sh" ]]; then
. "$(brew --prefix nvm)/nvm.sh"
fi
export NODE_BINARY=node
#Generate ios sourcemap
react-native bundle --platform ios \
--entry-file index.js \
--dev false \
--bundle-output ./ios/main.jsbundle \
--sourcemap-output ./ios-sourcemap.json &&
zip ./ios-sourcemap.zip ./ios-sourcemap.json
if [ ${INSTABUG_APP_TOKEN} == "YOUR_APP_TOKEN" ]; then
echo "Instabug: Looking for Token..."
if [ ! "${INSTABUG_APP_TOKEN}" ]; then
INSTABUG_APP_TOKEN=$(grep -r --exclude-dir={node_modules,ios,android} 'Instabug.startWithToken(\"[0-9a-zA-Z]*\"' ./ -m 1 | grep -o '\"[0-9a-zA-Z]*\"' | cut -d "\"" -f 2)
fi
if [ ! "${INSTABUG_APP_TOKEN}" ]; then
INSTABUG_APP_TOKEN=$(grep -r --exclude-dir={node_modules,ios,android} "Instabug.startWithToken(\'[0-9a-zA-Z]*\'" ./ -m 1 | grep -o "\'[0-9a-zA-Z]*\'" | cut -d "\"" -f 2)
fi
fi
if [ ! "${INSTABUG_APP_TOKEN}" ] || [ -z "${INSTABUG_APP_TOKEN}" ] || [ "${INSTABUG_APP_TOKEN}" == "YOUR_APP_TOKEN" ];then
echo "Instabug: err: INSTABUG_APP_TOKEN not found. Make sure you've added the SDK initialization line Instabug.startWithToken Or added it to the environment variable in the gradle"
exit 0
else
echo "Instabug: Uploading files..."
#Upload ios sourcemap
curl -X POST 'https://api.instabug.com/api/sdk/v3/symbols_files' -F "symbols_file=@./ios-sourcemap.json" -F "application_token=${INSTABUG_APP_TOKEN}" -F "platform=react_native" -F "os=ios"
echo
fi
|
public class Building {
int height;
int numberOfRooms;
} |
#!/bin/bash
#
# Emulates the LHS serial device: I/O through stdin and stdout
(socat -u UDP4-RECV:16001 -&socat -u - UDP4-SENDTO:127.0.0.1:16000)
|
<gh_stars>0
package org.ringingmaster.util.javafx.grid.canvas;
/**
* TODO Comments
*
* @author <NAME>
*/
public interface BlinkTimerListener {
void blinkTimerManager_triggerBlink(boolean blinkOn);
}
|
#! /bin/bash
#SBATCH -o /home/hpc/pr63so/di69fol/workspace/SWEET_2015_12_26/benchmarks_performance/rexi_tests_lrz_freq_waves/2016_01_06_scalability_rexi_spec_dt_vs_M_run2/run_rexi_dt0000_2_m000327_t001_n0128_r2688_a1.txt
###SBATCH -e /home/hpc/pr63so/di69fol/workspace/SWEET_2015_12_26/benchmarks_performance/rexi_tests_lrz_freq_waves/2016_01_06_scalability_rexi_spec_dt_vs_M_run2/run_rexi_dt0000_2_m000327_t001_n0128_r2688_a1.err
#SBATCH -J rexi_dt0000_2_m000327_t001_n0128_r2688_a1
#SBATCH --get-user-env
#SBATCH --clusters=mpp2
#SBATCH --ntasks=2688
#SBATCH --cpus-per-task=1
#SBATCH --exclusive
#SBATCH --export=NONE
#SBATCH --time=00:10:00
#declare -x NUMA_BLOCK_ALLOC_VERBOSITY=1
declare -x KMP_AFFINITY="granularity=thread,compact,1,0"
declare -x OMP_NUM_THREADS=1
echo "OMP_NUM_THREADS=$OMP_NUM_THREADS"
echo
. /etc/profile.d/modules.sh
module unload gcc
module unload fftw
module unload python
module load python/2.7_anaconda_nompi
module unload intel
module load intel/16.0
module unload mpi.intel
module load mpi.intel/5.1
module load gcc/5
cd /home/hpc/pr63so/di69fol/workspace/SWEET_2015_12_26/benchmarks_performance/rexi_tests_lrz_freq_waves/2016_01_06_scalability_rexi_spec_dt_vs_M_run2
cd ../../../
. local_software/env_vars.sh
# force to use FFTW WISDOM data
declare -x SWEET_FFTW_LOAD_WISDOM_FROM_FILE="FFTW_WISDOM_nofreq_T0"
time -p mpiexec.hydra -genv OMP_NUM_THREADS 1 -envall -ppn 28 -n 2688 ./build/rexi_m_tno_a1 --initial-freq-x-mul=2.0 --initial-freq-y-mul=1.0 -f 1 -g 1 -H 1 -X 1 -Y 1 --compute-error 1 -t 50 -R 4 -C 0.3 -N 128 -U 0 -S 0 --use-specdiff-for-complex-array 1 --rexi-h 0.2 --timestepping-mode 1 --staggering 0 --rexi-m=327 -C -0.2
|
#!/bin/bash -eux
apt-get remove -y chef chef-zero puppet puppet-common
apt-get autoremove -y
apt-get clean -y
apt-get autoclean -y
|
SRCPATH=your/path/to/gromosXX
buildPATH=you/want/gromos/here/build_gromosXX
cd ${SRCPATH}
rm LINUX/* -r
rmdir LINUX
rm ${buildPATH}/* -r
rmdir ${buildPATH}
|
<gh_stars>1-10
def resize_image(image, width, height):
"""Resize image to explicit pixel dimensions."""
width = int(width)
height = int(height)
aspect_ratio = image.width / image.height
if height == 0:
new_width = width
new_height = int(round(width / aspect_ratio, 0))
elif width == 0:
new_height = height
new_width = int(round(height * aspect_ratio, 0))
else:
new_width = width
new_height = height
image = image.resize((new_width, new_height))
return image
|
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This script is for production metadata store manipulation
# It is designed to run in hadoop CLI, both in sandbox or in real hadoop environment
#
# If you're a developer of Kylin and want to download sandbox's metadata into your dev machine,
# take a look at SandboxMetastoreCLI
source $(cd -P -- "$(dirname -- "$0")" && pwd -P)/header.sh
if [ "$1" == "backup" ]
then
mkdir -p ${KYLIN_HOME}/meta_backups
_now=$(date +"%Y_%m_%d_%H_%M_%S")
_file="${KYLIN_HOME}/meta_backups/meta_${_now}"
echo "Starting backup to ${_file}"
mkdir -p ${_file}
${KYLIN_HOME}/bin/kylin.sh org.apache.kylin.common.persistence.ResourceTool download ${_file}
echo "metadata store backed up to ${_file}"
elif [ "$1" == "fetch" ]
then
_file=$2
_now=$(date +"%Y_%m_%d_%H_%M_%S")
_fileDst="${KYLIN_HOME}/meta_backups/meta_${_now}"
echo "Starting restoring $_fileDst"
mkdir -p $_fileDst
${KYLIN_HOME}/bin/kylin.sh org.apache.kylin.common.persistence.ResourceTool fetch $_fileDst $_file
echo "metadata store backed up to $_fileDst"
elif [ "$1" == "restore" ]
then
_file=$2
echo "Starting restoring $_file"
${KYLIN_HOME}/bin/kylin.sh org.apache.kylin.common.persistence.ResourceTool upload $_file
elif [ "$1" == "list" ]
then
_file=$2
echo "Starting list $_file"
${KYLIN_HOME}/bin/kylin.sh org.apache.kylin.common.persistence.ResourceTool list $_file
elif [ "$1" == "remove" ]
then
_file=$2
echo "Starting remove $_file"
${KYLIN_HOME}/bin/kylin.sh org.apache.kylin.common.persistence.ResourceTool remove $_file
elif [ "$1" == "cat" ]
then
_file=$2
echo "Starting cat $_file"
${KYLIN_HOME}/bin/kylin.sh org.apache.kylin.common.persistence.ResourceTool cat $_file
elif [ "$1" == "reset" ]
then
${KYLIN_HOME}/bin/kylin.sh org.apache.kylin.common.persistence.ResourceTool reset
elif [ "$1" == "refresh-cube-signature" ]
then
${KYLIN_HOME}/bin/kylin.sh org.apache.kylin.cube.cli.CubeSignatureRefresher
elif [ "$1" == "clean" ]
then
${KYLIN_HOME}/bin/kylin.sh org.apache.kylin.tool.MetadataCleanupJob "${@:2}"
else
echo "usage: metastore.sh backup"
echo " metastore.sh fetch DATA"
echo " metastore.sh reset"
echo " metastore.sh refresh-cube-signature"
echo " metastore.sh restore PATH_TO_LOCAL_META"
echo " metastore.sh list RESOURCE_PATH"
echo " metastore.sh cat RESOURCE_PATH"
echo " metastore.sh remove RESOURCE_PATH"
echo " metastore.sh clean [--delete true]"
exit 1
fi
|
#!/bin/bash
array=(1 2 3 4 5 6 7 8 9 10)
echo "${array[@]:0:5}" |
def Ct_a(a: float, method: str) -> float:
if method == 'Spera':
return 0.3 + 0.4 * a
else:
raise ValueError("Invalid method. Supported methods: 'Spera'") |
#!/bin/bash
cd $(dirname $0)/../..
$CTL create cm media-frontend-nginx --from-file=media-frontend-config/nginx.conf -n social-network
$CTL create cm media-frontend-lua --from-file=media-frontend-config/lua-scripts -n social-network
|
<filename>webfx-kit/webfx-kit-javafxbase-emul/src/main/java/com/sun/javafx/collections/ObservableListWrapper.java
/*
* Copyright (c) 2010, 2014, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package com.sun.javafx.collections;
import javafx.collections.ModifiableObservableListBase;
import com.sun.javafx.collections.NonIterableChange.SimplePermutationChange;
import java.util.BitSet;
import java.util.Collection;
import java.util.Comparator;
import java.util.List;
import java.util.RandomAccess;
import javafx.beans.InvalidationListener;
import javafx.beans.Observable;
import javafx.collections.ObservableList;
import javafx.util.Callback;
/**
* A List wrapper class that implements observability.
*
*/
public class ObservableListWrapper<E> extends ModifiableObservableListBase<E> implements
ObservableList<E>, SortableList<E>, RandomAccess {
private final List<E> backingList;
private final ElementObserver elementObserver;
public ObservableListWrapper(List<E> list) {
backingList = list;
elementObserver = null;
}
public ObservableListWrapper(List<E> list, Callback<E, Observable[]> extractor) {
backingList = list;
this.elementObserver = new ElementObserver(extractor, new Callback<E, InvalidationListener>() {
@Override
public InvalidationListener call(final E e) {
return new InvalidationListener() {
@Override
public void invalidated(Observable observable) {
beginChange();
int i = 0;
final int size = size();
for (; i < size; ++i) {
if (get(i) == e) {
nextUpdate(i);
}
}
endChange();
}
};
}
}, this);
final int sz = backingList.size();
for (int i = 0; i < sz; ++i) {
elementObserver.attachListener(backingList.get(i));
}
}
@Override
public E get(int index) {
return backingList.get(index);
}
@Override
public int size() {
return backingList.size();
}
@Override
protected void doAdd(int index, E element) {
if (elementObserver != null)
elementObserver.attachListener(element);
backingList.add(index, element);
}
@Override
protected E doSet(int index, E element) {
E removed = backingList.set(index, element);
if (elementObserver != null) {
elementObserver.detachListener(removed);
elementObserver.attachListener(element);
}
return removed;
}
@Override
protected E doRemove(int index) {
E removed = backingList.remove(index);
if (elementObserver != null)
elementObserver.detachListener(removed);
return removed;
}
@Override
public int indexOf(Object o) {
return backingList.indexOf(o);
}
@Override
public int lastIndexOf(Object o) {
return backingList.lastIndexOf(o);
}
@Override
public boolean contains(Object o) {
return backingList.contains(o);
}
@Override
public boolean containsAll(Collection<?> c) {
return backingList.containsAll(c);
}
@Override
public void clear() {
if (elementObserver != null) {
final int sz = size();
for (int i = 0; i < sz; ++i) {
elementObserver.detachListener(get(i));
}
}
if (hasListeners()) {
beginChange();
nextRemove(0, this);
}
backingList.clear();
++modCount;
if (hasListeners()) {
endChange();
}
}
@Override
public void remove(int fromIndex, int toIndex) {
beginChange();
for (int i = fromIndex; i < toIndex; ++i) {
remove(fromIndex);
}
endChange();
}
@Override
public boolean removeAll(Collection<?> c) {
beginChange();
BitSet bs = new BitSet(c.size());
for (int i = 0; i < size(); ++i) {
if (c.contains(get(i))) {
bs.set(i);
}
}
if (!bs.isEmpty()) {
int cur = size();
while ((cur = bs.previousSetBit(cur - 1)) >= 0) {
remove(cur);
}
}
endChange();
return !bs.isEmpty();
}
@Override
public boolean retainAll(Collection<?> c) {
beginChange();
BitSet bs = new BitSet(c.size());
for (int i = 0; i < size(); ++i) {
if (!c.contains(get(i))) {
bs.set(i);
}
}
if (!bs.isEmpty()) {
int cur = size();
while ((cur = bs.previousSetBit(cur - 1)) >= 0) {
remove(cur);
}
}
endChange();
return !bs.isEmpty();
}
private SortHelper helper;
@Override
@SuppressWarnings("unchecked")
public void sort() {
if (backingList.isEmpty()) {
return;
}
int[] perm = getSortHelper().sort((List<? extends Comparable>)backingList);
fireChange(new SimplePermutationChange<E>(0, size(), perm, this));
}
@Override
public void sort(Comparator<? super E> comparator) {
if (backingList.isEmpty()) {
return;
}
int[] perm = getSortHelper().sort(backingList, comparator);
fireChange(new SimplePermutationChange<E>(0, size(), perm, this));
}
private SortHelper getSortHelper() {
if (helper == null) {
helper = new SortHelper();
}
return helper;
}
}
|
import {$log} from 'ts-log-debug';
import {InputOutputServiceInterface, ByteOrder, PinMode} from '../InputOutputServiceInterface';
export class NoOpInputOutputService implements InputOutputServiceInterface {
constructor() {
}
setup() {
$log.info('Initialising NoOp GPIO.');
}
public setPinMode(pin: number, mode: PinMode) {
$log.debug('setPinMode ' + pin + ' ' + mode);
}
digitalWrite(pin: number, state: boolean) {
$log.debug('digitalWrite ' + pin + ' ' + state);
}
public shiftOut(dataPin: number, clockPin: number, order: ByteOrder, value: number) {
$log.debug('shiftOut ' + dataPin + ' ' + clockPin + ' ' + order + ' ' + value);
}
/* tslint:disable:no-bitwise */
public shiftOut16(dataPin: number, clockPin: number, value: number) {
$log.debug('shiftOut16');
for (let i = 15; i >= 0; i--) {
const bit = value & (1 << i) ? 1 : 0;
}
}
/* tslint:enable:no-bitwise */
}
|
/* spellchecker: disable */
import { expect } from 'chai';
import { vec2, v2 } from '../source/vec2';
/* spellchecker: enable */
describe('gl-matrix vec2 extensions clamp', () => {
it('should clamp a vec2 as specified in GLSL', () => {
let a: vec2 = vec2.fromValues(2, 2);
const b: vec2 = vec2.create();
a = vec2.clamp(b, a, vec2.fromValues(0, 0), vec2.fromValues(1, 1));
expect(vec2.equals(a, b)).to.be.true;
expect(vec2.equals(a, vec2.fromValues(1, 1))).to.be.true;
a[0] = 3;
a[1] = 4;
vec2.clamp(b, a, vec2.fromValues(1, 2), vec2.fromValues(2, 3));
expect(vec2.equals(b, vec2.fromValues(2, 3))).to.be.true;
});
});
describe('gl-matrix vec2 extensions abs', () => {
it('should return the absolute of a vec2 as specified in GLSL', () => {
const a: vec2 = vec2.fromValues(-2, 2);
vec2.abs(a, a);
expect(a[0]).to.equal(2);
expect(a[1]).to.equal(2);
});
});
describe('gl-matrix vec2 extensions', () => {
it('should provide tinified empty vec2 constructors', () => {
expect(vec2.equals(v2(), vec2.fromValues(0, 0))).to.be.true;
});
it('should parse vec2 from string', () => {
expect(vec2.parse(undefined)).to.be.undefined;
expect(vec2.parse('')).to.be.undefined;
expect(vec2.parse('[')).to.be.undefined;
expect(vec2.parse('[0.0, 0.0]')).to.be.undefined;
expect(vec2.parse('0.0')).to.be.undefined;
expect(vec2.parse('0.0, 0.0, 0.0')).to.be.undefined;
expect(vec2.equals(vec2.parse('0.0, 0.0')!, v2())).to.be.true;
expect(vec2.equals(vec2.parse('2.0, 4.0')!, vec2.fromValues(2.0, 4.0))).to.be.true;
});
it('should provide default initialized vec and mat abbreviations', () => {
expect(vec2.equals(vec2.create(), v2())).to.be.true;
});
});
|
export class WeeklyMenu {
id : Number;
time : String;
monday : String;
tuesday : String;
wednesday : String;
thursday : String;
friday : String;
saturday : String;
sunday : String;
} |
package cyclops.pure.instances.control;
import static cyclops.async.Future.narrowK;
import cyclops.function.higherkinded.DataWitness.future;
import cyclops.function.higherkinded.Higher;
import cyclops.pure.arrow.Cokleisli;
import cyclops.pure.arrow.Kleisli;
import cyclops.pure.arrow.MonoidK;
import cyclops.pure.arrow.MonoidKs;
import cyclops.container.control.Either;
import cyclops.async.Future;
import cyclops.container.control.Option;
import cyclops.function.combiner.Monoid;
import cyclops.pure.container.functional.Active;
import cyclops.pure.container.functional.Coproduct;
import cyclops.pure.container.functional.Nested;
import cyclops.pure.container.functional.Product;
import cyclops.pure.typeclasses.InstanceDefinitions;
import cyclops.pure.typeclasses.Pure;
import cyclops.pure.typeclasses.comonad.Comonad;
import cyclops.pure.typeclasses.foldable.Foldable;
import cyclops.pure.typeclasses.foldable.Unfoldable;
import cyclops.pure.typeclasses.functor.Functor;
import cyclops.pure.typeclasses.monad.Applicative;
import cyclops.pure.typeclasses.monad.Monad;
import cyclops.pure.typeclasses.monad.MonadPlus;
import cyclops.pure.typeclasses.monad.MonadRec;
import cyclops.pure.typeclasses.monad.MonadZero;
import cyclops.pure.typeclasses.monad.Traverse;
import cyclops.pure.typeclasses.monad.TraverseByTraverse;
import java.util.function.Function;
import lombok.AllArgsConstructor;
import lombok.experimental.UtilityClass;
/**
* Companion class for creating Type Class instances for working with Futures
*
* @author johnmcclean
*/
@UtilityClass
public class FutureInstances {
private final FutureTypeclasses INSTANCE = new FutureTypeclasses();
public static <T> Kleisli<future, Future<T>, T> kindKleisli() {
return Kleisli.of(FutureInstances.monad(),
Future::widen);
}
public static <T> Cokleisli<future, T, Future<T>> kindCokleisli() {
return Cokleisli.of(Future::narrowK);
}
public static <W1, T> Nested<future, W1, T> nested(Future<Higher<W1, T>> nested,
InstanceDefinitions<W1> def2) {
return Nested.of(nested,
FutureInstances.definitions(),
def2);
}
public static <W1, T> Product<future, W1, T> product(Future<T> f,
Active<W1, T> active) {
return Product.of(allTypeclasses(f),
active);
}
public static <W1, T> Coproduct<W1, future, T> coproduct(Future<T> f,
InstanceDefinitions<W1> def2) {
return Coproduct.right(f,
def2,
FutureInstances.definitions());
}
public static <T> Active<future, T> allTypeclasses(Future<T> f) {
return Active.of(f,
FutureInstances.definitions());
}
public static <W2, R, T> Nested<future, W2, R> mapM(Future<T> f,
Function<? super T, ? extends Higher<W2, R>> fn,
InstanceDefinitions<W2> defs) {
return Nested.of(f.map(fn),
FutureInstances.definitions(),
defs);
}
public static InstanceDefinitions<future> definitions() {
return new InstanceDefinitions<future>() {
@Override
public <T, R> Functor<future> functor() {
return FutureInstances.functor();
}
@Override
public <T> Pure<future> unit() {
return FutureInstances.unit();
}
@Override
public <T, R> Applicative<future> applicative() {
return FutureInstances.applicative();
}
@Override
public <T, R> Monad<future> monad() {
return FutureInstances.monad();
}
@Override
public <T, R> Option<MonadZero<future>> monadZero() {
return Option.some(FutureInstances.monadZero());
}
@Override
public <T> Option<MonadPlus<future>> monadPlus() {
return Option.some(FutureInstances.monadPlus());
}
@Override
public <T> MonadRec<future> monadRec() {
return FutureInstances.monadRec();
}
@Override
public <T> Option<MonadPlus<future>> monadPlus(MonoidK<future> m) {
return Option.some(FutureInstances.monadPlus(m));
}
@Override
public <C2, T> Traverse<future> traverse() {
return FutureInstances.traverse();
}
@Override
public <T> Foldable<future> foldable() {
return FutureInstances.foldable();
}
@Override
public <T> Option<Comonad<future>> comonad() {
return Option.none();
}
@Override
public <T> Option<Unfoldable<future>> unfoldable() {
return Option.none();
}
};
}
public static <T, R> Functor<future> functor() {
return INSTANCE;
}
public static <T> Pure<future> unit() {
return INSTANCE;
}
public static <T, R> Applicative<future> applicative() {
return INSTANCE;
}
public static <T, R> Monad<future> monad() {
return INSTANCE;
}
public static <T, R> MonadZero<future> monadZero() {
return INSTANCE;
}
public static <T, R> MonadRec<future> monadRec() {
return INSTANCE;
}
public static <T> MonadPlus<future> monadPlus() {
return INSTANCE;
}
public static <T> MonadPlus<future> monadPlus(MonoidK<future> m) {
return INSTANCE;
}
public static <L> Traverse<future> traverse() {
return INSTANCE;
}
public static <L> Foldable<future> foldable() {
return INSTANCE;
}
@AllArgsConstructor
@lombok.With
public static class FutureTypeclasses implements MonadPlus<future>, MonadRec<future>, TraverseByTraverse<future>,
Foldable<future> {
private final MonoidK<future> monoidK;
public FutureTypeclasses() {
monoidK = MonoidKs.firstSuccessfulFuture();
}
@Override
public <T> T foldRight(Monoid<T> monoid,
Higher<future, T> ds) {
return narrowK(ds).fold(monoid);
}
@Override
public <T> T foldLeft(Monoid<T> monoid,
Higher<future, T> ds) {
return narrowK(ds).fold(monoid);
}
@Override
public <T, R> Higher<future, R> flatMap(Function<? super T, ? extends Higher<future, R>> fn,
Higher<future, T> ds) {
return narrowK(ds).flatMap(t -> narrowK(fn.apply(t)));
}
@Override
public <C2, T, R> Higher<C2, Higher<future, R>> traverseA(Applicative<C2> applicative,
Function<? super T, ? extends Higher<C2, R>> fn,
Higher<future, T> ds) {
Future<T> future = narrowK(ds);
return future.fold(right -> applicative.map(m -> Future.ofResult(m),
fn.apply(right)),
left -> applicative.unit(Future.ofError(left)));
}
@Override
public <T, R> R foldMap(Monoid<R> mb,
Function<? super T, ? extends R> fn,
Higher<future, T> ds) {
Future<R> opt = narrowK(ds).map(fn);
return opt.fold(mb);
}
@Override
public <T, R> Higher<future, R> ap(Higher<future, ? extends Function<T, R>> fn,
Higher<future, T> apply) {
return narrowK(apply).zip(narrowK(fn),
(a, b) -> b.apply(a));
}
@Override
public <T> Higher<future, T> unit(T value) {
return Future.ofResult(value);
}
@Override
public <T, R> Higher<future, R> map(Function<? super T, ? extends R> fn,
Higher<future, T> ds) {
return narrowK(ds).map(fn);
}
@Override
public <T, R> Higher<future, R> tailRec(T initial,
Function<? super T, ? extends Higher<future, ? extends Either<T, R>>> fn) {
return Future.tailRec(initial,
t -> narrowK(fn.apply(t)));
}
@Override
public <T> MonoidK<future> monoid() {
return monoidK;
}
}
}
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package controle;
import com.ginf.ginffinal.Usuario;
import com.ginf.ginffinal.Usuario;
import java.util.List;
import org.hibernate.Session;
import org.hibernate.Transaction;
import util.HibernateUtil;
/**
*
* @author kalebe
*/
public class UsuarioControle {
//Função de salvar/atualizar um usuario
public static boolean salvar(Usuario usuario){
try{
Session sessionRecheio;
sessionRecheio = HibernateUtil.getSession();
Transaction tr = sessionRecheio.beginTransaction();
sessionRecheio.saveOrUpdate(usuario);
tr.commit();
return true;
}
catch(Exception ex){
return false;
}
}
//Localiza um usuario pelo id
public static Usuario buscar(Integer id)
{
String idUsuario = id.toString();
Session sessionRecheio;
sessionRecheio = HibernateUtil.getSession();
Transaction tr = sessionRecheio.beginTransaction();
String hql = "from Usuario u where u.id='"+idUsuario+"'";
Usuario usuario = (Usuario)sessionRecheio.createQuery(hql).uniqueResult();
tr.commit();
return usuario;
}
//Retorna todos os usuario do sistema
public static List<Usuario> listar()
{
Session sessionRecheio;
sessionRecheio = HibernateUtil.getSession();
Transaction tr = sessionRecheio.beginTransaction();
String hql = "from Usuario u";
List<Usuario> lista = (List)sessionRecheio.createQuery(hql).list();
tr.commit();
return lista;
}
//Função de apagar um usuario
public static boolean deletar(Usuario usuario){
try{
Session sessionRecheio;
sessionRecheio = HibernateUtil.getSession();
Transaction tr = sessionRecheio.beginTransaction();
sessionRecheio.delete(usuario);
tr.commit();
return true;
}
catch(Exception ex){
return false;
}
}
}
|
<gh_stars>0
/**
* Final Project. Factions which extend the Computer class
*
* @author <NAME>
* @email <EMAIL>
* @version 1.0
* @since 2020-04-15
*/
package edu.bu.met.cs665;
class Bandits extends Computer {
public Bandits(
ScavengeBehavior scavengeBehavior,
TradeBehavior tradeBehavior,
AttackBehavior attackBehavior) {
super(scavengeBehavior, tradeBehavior, attackBehavior);
}
public void display() {
System.out.printf("\nBandits' sol: ");
}
}
class Outcasts extends Computer {
public Outcasts(
ScavengeBehavior scavengeBehavior,
TradeBehavior tradeBehavior,
AttackBehavior attackBehavior) {
super(scavengeBehavior, tradeBehavior, attackBehavior);
}
public void display() {
System.out.printf("\nOutcasts sol: ");
}
}
class UltraWealthy extends Computer {
public UltraWealthy(
ScavengeBehavior scavengeBehavior,
TradeBehavior tradeBehavior,
AttackBehavior attackBehavior) {
super(scavengeBehavior, tradeBehavior, attackBehavior);
}
public void display() {
System.out.printf("\nWealthy's sol: ");
}
}
|
<gh_stars>0
package com.twitter.finatra.multiserver.Add1HttpServer
import com.twitter.adder.thriftscala.Adder
import com.twitter.adder.thriftscala.Adder._
import com.twitter.finagle.Filter
import com.twitter.inject.thrift.filters.ThriftClientFilterBuilder
import com.twitter.inject.thrift.modules.FilteredThriftClientModule
import com.twitter.util.Future
object AdderThriftClientModule
extends FilteredThriftClientModule[Adder[Future], Adder.ServiceIface] {
override val label = "adder-thrift"
override val dest = "flag!adder-thrift-server"
override def filterServiceIface(
serviceIface: ServiceIface,
filter: ThriftClientFilterBuilder): ServiceIface = {
serviceIface.copy(
add1 = filter.method(Add1)
.exceptionFilter(Filter.identity[Add1.Args, Add1.Result]) // Example of replacing the default exception filter
.timeout(3.minutes)
.exponentialRetry(
shouldRetryResponse = PossiblyRetryableExceptions,
requestTimeout = 1.minute,
start = 50.millis,
multiplier = 2,
retries = 3)
.andThen(serviceIface.add1),
add1String = filter.method(Add1String)
.timeout(3.minutes)
.exponentialRetry(
shouldRetryResponse = PossiblyRetryableExceptions,
requestTimeout = 1.minute,
start = 50.millis,
multiplier = 2,
retries = 3)
.andThen(serviceIface.add1String),
add1Slowly = filter.method(Add1Slowly)
.timeout(3.minutes)
.exponentialRetry(
shouldRetryResponse = PossiblyRetryableExceptions,
requestTimeout = 1.millis, // We purposely set a very small timeout so that we can test handling IndividualRequestTimeoutException
start = 50.millis,
multiplier = 2,
retries = 3)
.andThen(serviceIface.add1Slowly),
add1AlwaysError = filter.method(Add1AlwaysError)
.timeout(3.minutes)
.exponentialRetry(
shouldRetryResponse = PossiblyRetryableExceptions,
requestTimeout = 1.minute,
start = 50.millis,
multiplier = 2,
retries = 3)
.andThen(serviceIface.add1AlwaysError))
}
}
|
def shortest_path(graph, start_vertex, end_vertex):
# create an array to save distances from the start vertex
distances = [float("inf")] * len(graph)
# distances from start to start is 0
distances[start_vertex] = 0
# create a queue to store vertex
queue = []
queue.append(start_vertex)
while queue:
# move from start to the next vertex in the graph
curr_vertex = queue.pop(0)
# look at the neighbours of the current vertex
curr_neighbours = graph[curr_vertex]
for i in range(len(curr_neighbours)):
# distance to the current neighbour is the distance to the current vertex + edge weight
dist = distances[curr_vertex] + curr_neighbours[i]
if dist < distances[i]:
# update the shortest distance
distances[i] = dist
# add the neighbour to the queue to further explore
queue.append(i)
return distances[end_vertex] |
#!bin/bash
#This script is used to finish a ios test automation.
compileresult=0
cd ..
cp -r iOS_SikuliTest.sikuli ../../../samples/Cpp/TestCpp/proj.mac
cd ../../../samples/Cpp/TestCpp/proj.mac
sdk_num=$(sed -n '1p' sdk_num.txt)
for((i=1;i<=$sdk_num;i++))
do
a=$(sed -n '1p' directory_name.txt)
echo $a
$(pwd)/build/${a}/TestCpp.app/Contents/MacOS/TestCpp &
$SIKULI_HOME/sikuli-ide.sh -r $(pwd)/iOS_SikuliTest.sikuli
done
#Sikuli Test success!
echo Sikuli Test Success!
#git checkout -f
#git clean -df -x
exit 0
#End |
const CACHE_VERSION = 'v4';
const CACHE_NAME = `FullClock#${CACHE_VERSION}`;
const urlsToCache = [
'.',
'clock.js',
];
self.addEventListener('install', ev => {
ev.waitUntil(
caches.open(CACHE_NAME).then(cache => cache.addAll(urlsToCache))
);
});
self.addEventListener('activate', ev => {
ev.waitUntil(
caches.keys().then(names => (
names.filter(name => name.startsWith('FullClock#') && name !== CACHE_NAME)
)).then(names => (
Promise.all(names.map(name => caches.delete(name)))
))
);
});
self.addEventListener('fetch', ev => {
ev.respondWith(
caches.match(ev.request).then(resp => {
if (resp) return resp;
let req2 = ev.request.clone();
return fetch(ev.request).then(resp => {
if (!resp || resp.status !== 200 || resp.type !== 'basic') {
return resp;
}
let resp2 = resp.clone();
caches.open(CACHE_NAME).then(cache => cache.put(req2, resp2));
return resp;
});
})
);
});
|
package com.meterware.servletunit;
/********************************************************************************************************************
* $Id: XMLUtils.java 604 2004-02-26 17:36:47Z russgold $
*
* Copyright (c) 2004, <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
* to permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions
* of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
* THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
* CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*
*******************************************************************************************************************/
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import org.w3c.dom.Node;
import org.xml.sax.SAXException;
import java.util.ArrayList;
/**
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
**/
abstract class XMLUtils {
static String getChildNodeValue( Element root, String childNodeName ) throws SAXException {
return getChildNodeValue( root, childNodeName, null );
}
static String getChildNodeValue( Element root, String childNodeName, String defaultValue ) throws SAXException {
NodeList nl = root.getElementsByTagName( childNodeName );
if (nl.getLength() == 1) {
return getTextValue( nl.item( 0 ) ).trim();
} else if (defaultValue == null) {
throw new SAXException( "Node <" + root.getNodeName() + "> has no child named <" + childNodeName + ">" );
} else {
return defaultValue;
}
}
static String getTextValue( Node node ) throws SAXException {
Node textNode = node.getFirstChild();
if (textNode == null) return "";
if (textNode.getNodeType() != Node.TEXT_NODE) throw new SAXException( "No text value found for <" + node.getNodeName() + "> node" );
return textNode.getNodeValue();
}
static boolean hasChildNode( Element root, String childNodeName ) {
NodeList nl = root.getElementsByTagName( childNodeName );
return (nl.getLength() > 0);
}
}
|
#!/usr/bin/env bash
# This file:
#
# - Demos BASH3 Boilerplate (change this for your script)
#
# Usage:
#
# LOG_LEVEL=7 ./main.sh -f /tmp/x -d (change this for your script)
#
# Based on a template by BASH3 Boilerplate v2.4.1
# http://bash3boilerplate.sh/#authors
#
# The MIT License (MIT)
# Copyright (c) 2013 Kevin van Zonneveld and contributors
# You are not obligated to bundle the LICENSE file with your b3bp projects as long
# as you leave these references intact in the header comments of your source files.
# Exit on error. Append "|| true" if you expect an error.
set -o errexit
# Exit on error inside any functions or subshells.
set -o errtrace
# Do not allow use of undefined vars. Use ${VAR:-} to use an undefined VAR
set -o nounset
# Catch the error in case mysqldump fails (but gzip succeeds) in `mysqldump |gzip`
set -o pipefail
# Turn on traces, useful while debugging but commented out by default
# set -o xtrace
if [[ "${BASH_SOURCE[0]}" != "${0}" ]]; then
__i_am_main_script="0" # false
if [[ "${__usage+x}" ]]; then
if [[ "${BASH_SOURCE[1]}" = "${0}" ]]; then
__i_am_main_script="1" # true
fi
__b3bp_external_usage="true"
__b3bp_tmp_source_idx=1
fi
else
__i_am_main_script="1" # true
[[ "${__usage+x}" ]] && unset -v __usage
[[ "${__helptext+x}" ]] && unset -v __helptext
fi
# Set magic variables for current file, directory, os, etc.
__dir="$(cd "$(dirname "${BASH_SOURCE[${__b3bp_tmp_source_idx:-0}]}")" && pwd)"
__file="${__dir}/$(basename "${BASH_SOURCE[${__b3bp_tmp_source_idx:-0}]}")"
__base="$(basename "${__file}" .sh)"
# shellcheck disable=SC2034,SC2015
__invocation="$(printf %q "${__file}")$( (($#)) && printf ' %q' "$@" || true)"
# Define the environment variables (and their defaults) that this script depends on
LOG_LEVEL="${LOG_LEVEL:-6}" # 7 = debug -> 0 = emergency
NO_COLOR="${NO_COLOR:-}" # true = disable color. otherwise autodetected
### Functions
##############################################################################
function __b3bp_log () {
local log_level="${1}"
shift
# shellcheck disable=SC2034
local color_debug="\\x1b[35m"
# shellcheck disable=SC2034
local color_info="\\x1b[32m"
# shellcheck disable=SC2034
local color_notice="\\x1b[34m"
# shellcheck disable=SC2034
local color_warning="\\x1b[33m"
# shellcheck disable=SC2034
local color_error="\\x1b[31m"
# shellcheck disable=SC2034
local color_critical="\\x1b[1;31m"
# shellcheck disable=SC2034
local color_alert="\\x1b[1;37;41m"
# shellcheck disable=SC2034
local color_emergency="\\x1b[1;4;5;37;41m"
local colorvar="color_${log_level}"
local color="${!colorvar:-${color_error}}"
local color_reset="\\x1b[0m"
if [[ "${NO_COLOR:-}" = "true" ]] || { [[ "${TERM:-}" != "xterm"* ]] && [[ "${TERM:-}" != "screen"* ]]; } || [[ ! -t 2 ]]; then
if [[ "${NO_COLOR:-}" != "false" ]]; then
# Don't use colors on pipes or non-recognized terminals
color=""; color_reset=""
fi
fi
# all remaining arguments are to be printed
local log_line=""
while IFS=$'\n' read -r log_line; do
echo -e "$(date -u +"%Y-%m-%d %H:%M:%S UTC") ${color}$(printf "[%9s]" "${log_level}")${color_reset} ${log_line}" 1>&2
done <<< "${@:-}"
}
function emergency () { __b3bp_log emergency "${@}"; exit 1; }
function alert () { [[ "${LOG_LEVEL:-0}" -ge 1 ]] && __b3bp_log alert "${@}"; true; }
function critical () { [[ "${LOG_LEVEL:-0}" -ge 2 ]] && __b3bp_log critical "${@}"; true; }
function error () { [[ "${LOG_LEVEL:-0}" -ge 3 ]] && __b3bp_log error "${@}"; true; }
function warning () { [[ "${LOG_LEVEL:-0}" -ge 4 ]] && __b3bp_log warning "${@}"; true; }
function notice () { [[ "${LOG_LEVEL:-0}" -ge 5 ]] && __b3bp_log notice "${@}"; true; }
function info () { [[ "${LOG_LEVEL:-0}" -ge 6 ]] && __b3bp_log info "${@}"; true; }
function debug () { [[ "${LOG_LEVEL:-0}" -ge 7 ]] && __b3bp_log debug "${@}"; true; }
function help () {
echo "" 1>&2
echo " ${*}" 1>&2
echo "" 1>&2
echo " ${__usage:-No usage available}" 1>&2
echo "" 1>&2
if [[ "${__helptext:-}" ]]; then
echo " ${__helptext}" 1>&2
echo "" 1>&2
fi
exit 1
}
### Parse commandline options
##############################################################################
# Commandline options. This defines the usage page, and is used to parse cli
# opts & defaults from. The parsing is unforgiving so be precise in your syntax
# - A short option must be preset for every long option; but every short option
# need not have a long option
# - `--` is respected as the separator between options and arguments
# - We do not bash-expand defaults, so setting '~/app' as a default will not resolve to ${HOME}.
# you can use bash variables to work around this (so use ${HOME} instead)
# shellcheck disable=SC2015
[[ "${__usage+x}" ]] || read -r -d '' __usage <<-'EOF' || true # exits non-zero when EOF encountered
-t --tag [arg] Image name to use. Required.
-o --out [arg] Output directory to save report. Default="."
-c --cmd [arg] Testing command. Default="python -m pytest --html pytest-report.html -m 'not slowtest' -x"
-v Enable verbose mode, print script as it is executed
-d --debug Enables debug mode
-h --help This page
-n --no-color Disable color output
EOF
# shellcheck disable=SC2015
[[ "${__helptext+x}" ]] || read -r -d '' __helptext <<-'EOF' || true # exits non-zero when EOF encountered
This script dockerized tests for caldera.
EOF
# Translate usage string -> getopts arguments, and set $arg_<flag> defaults
while read -r __b3bp_tmp_line; do
if [[ "${__b3bp_tmp_line}" =~ ^- ]]; then
# fetch single character version of option string
__b3bp_tmp_opt="${__b3bp_tmp_line%% *}"
__b3bp_tmp_opt="${__b3bp_tmp_opt:1}"
# fetch long version if present
__b3bp_tmp_long_opt=""
if [[ "${__b3bp_tmp_line}" = *"--"* ]]; then
__b3bp_tmp_long_opt="${__b3bp_tmp_line#*--}"
__b3bp_tmp_long_opt="${__b3bp_tmp_long_opt%% *}"
fi
# map opt long name to+from opt short name
printf -v "__b3bp_tmp_opt_long2short_${__b3bp_tmp_long_opt//-/_}" '%s' "${__b3bp_tmp_opt}"
printf -v "__b3bp_tmp_opt_short2long_${__b3bp_tmp_opt}" '%s' "${__b3bp_tmp_long_opt//-/_}"
# check if option takes an argument
if [[ "${__b3bp_tmp_line}" =~ \[.*\] ]]; then
__b3bp_tmp_opt="${__b3bp_tmp_opt}:" # add : if opt has arg
__b3bp_tmp_init="" # it has an arg. init with ""
printf -v "__b3bp_tmp_has_arg_${__b3bp_tmp_opt:0:1}" '%s' "1"
elif [[ "${__b3bp_tmp_line}" =~ \{.*\} ]]; then
__b3bp_tmp_opt="${__b3bp_tmp_opt}:" # add : if opt has arg
__b3bp_tmp_init="" # it has an arg. init with ""
# remember that this option requires an argument
printf -v "__b3bp_tmp_has_arg_${__b3bp_tmp_opt:0:1}" '%s' "2"
else
__b3bp_tmp_init="0" # it's a flag. init with 0
printf -v "__b3bp_tmp_has_arg_${__b3bp_tmp_opt:0:1}" '%s' "0"
fi
__b3bp_tmp_opts="${__b3bp_tmp_opts:-}${__b3bp_tmp_opt}"
if [[ "${__b3bp_tmp_line}" =~ ^Can\ be\ repeated\. ]] || [[ "${__b3bp_tmp_line}" =~ \.\ *Can\ be\ repeated\. ]]; then
# remember that this option can be repeated
printf -v "__b3bp_tmp_is_array_${__b3bp_tmp_opt:0:1}" '%s' "1"
else
printf -v "__b3bp_tmp_is_array_${__b3bp_tmp_opt:0:1}" '%s' "0"
fi
fi
[[ "${__b3bp_tmp_opt:-}" ]] || continue
if [[ "${__b3bp_tmp_line}" =~ ^Default= ]] || [[ "${__b3bp_tmp_line}" =~ \.\ *Default= ]]; then
# ignore default value if option does not have an argument
__b3bp_tmp_varname="__b3bp_tmp_has_arg_${__b3bp_tmp_opt:0:1}"
if [[ "${!__b3bp_tmp_varname}" != "0" ]]; then
# take default
__b3bp_tmp_init="${__b3bp_tmp_line##*Default=}"
# strip double quotes from default argument
__b3bp_tmp_re='^"(.*)"$'
if [[ "${__b3bp_tmp_init}" =~ ${__b3bp_tmp_re} ]]; then
__b3bp_tmp_init="${BASH_REMATCH[1]}"
else
# strip single quotes from default argument
__b3bp_tmp_re="^'(.*)'$"
if [[ "${__b3bp_tmp_init}" =~ ${__b3bp_tmp_re} ]]; then
__b3bp_tmp_init="${BASH_REMATCH[1]}"
fi
fi
fi
fi
if [[ "${__b3bp_tmp_line}" =~ ^Required\. ]] || [[ "${__b3bp_tmp_line}" =~ \.\ *Required\. ]]; then
# remember that this option requires an argument
printf -v "__b3bp_tmp_has_arg_${__b3bp_tmp_opt:0:1}" '%s' "2"
fi
# Init var with value unless it is an array / a repeatable
__b3bp_tmp_varname="__b3bp_tmp_is_array_${__b3bp_tmp_opt:0:1}"
[[ "${!__b3bp_tmp_varname}" = "0" ]] && printf -v "arg_${__b3bp_tmp_opt:0:1}" '%s' "${__b3bp_tmp_init}"
done <<< "${__usage:-}"
# run getopts only if options were specified in __usage
if [[ "${__b3bp_tmp_opts:-}" ]]; then
# Allow long options like --this
__b3bp_tmp_opts="${__b3bp_tmp_opts}-:"
# Reset in case getopts has been used previously in the shell.
OPTIND=1
# start parsing command line
set +o nounset # unexpected arguments will cause unbound variables
# to be dereferenced
# Overwrite $arg_<flag> defaults with the actual CLI options
while getopts "${__b3bp_tmp_opts}" __b3bp_tmp_opt; do
[[ "${__b3bp_tmp_opt}" = "?" ]] && help "Invalid use of script: ${*} "
if [[ "${__b3bp_tmp_opt}" = "-" ]]; then
# OPTARG is long-option-name or long-option=value
if [[ "${OPTARG}" =~ .*=.* ]]; then
# --key=value format
__b3bp_tmp_long_opt=${OPTARG/=*/}
# Set opt to the short option corresponding to the long option
__b3bp_tmp_varname="__b3bp_tmp_opt_long2short_${__b3bp_tmp_long_opt//-/_}"
printf -v "__b3bp_tmp_opt" '%s' "${!__b3bp_tmp_varname}"
OPTARG=${OPTARG#*=}
else
# --key value format
# Map long name to short version of option
__b3bp_tmp_varname="__b3bp_tmp_opt_long2short_${OPTARG//-/_}"
printf -v "__b3bp_tmp_opt" '%s' "${!__b3bp_tmp_varname}"
# Only assign OPTARG if option takes an argument
__b3bp_tmp_varname="__b3bp_tmp_has_arg_${__b3bp_tmp_opt}"
__b3bp_tmp_varvalue="${!__b3bp_tmp_varname}"
[[ "${__b3bp_tmp_varvalue}" != "0" ]] && __b3bp_tmp_varvalue="1"
printf -v "OPTARG" '%s' "${@:OPTIND:${__b3bp_tmp_varvalue}}"
# shift over the argument if argument is expected
((OPTIND+=__b3bp_tmp_varvalue))
fi
# we have set opt/OPTARG to the short value and the argument as OPTARG if it exists
fi
__b3bp_tmp_value="${OPTARG}"
__b3bp_tmp_varname="__b3bp_tmp_is_array_${__b3bp_tmp_opt:0:1}"
if [[ "${!__b3bp_tmp_varname}" != "0" ]]; then
# repeatables
# shellcheck disable=SC2016
if [[ -z "${OPTARG}" ]]; then
# repeatable flags, they increcemnt
__b3bp_tmp_varname="arg_${__b3bp_tmp_opt:0:1}"
debug "cli arg ${__b3bp_tmp_varname} = (${__b3bp_tmp_default}) -> ${!__b3bp_tmp_varname}"
__b3bp_tmp_value=$((${!__b3bp_tmp_varname} + 1))
printf -v "${__b3bp_tmp_varname}" '%s' "${__b3bp_tmp_value}"
else
# repeatable args, they get appended to an array
__b3bp_tmp_varname="arg_${__b3bp_tmp_opt:0:1}[@]"
debug "cli arg ${__b3bp_tmp_varname} append ${__b3bp_tmp_value}"
declare -a "${__b3bp_tmp_varname}"='("${!__b3bp_tmp_varname}" "${__b3bp_tmp_value}")'
fi
else
# non-repeatables
__b3bp_tmp_varname="arg_${__b3bp_tmp_opt:0:1}"
__b3bp_tmp_default="${!__b3bp_tmp_varname}"
if [[ -z "${OPTARG}" ]]; then
__b3bp_tmp_value=$((__b3bp_tmp_default + 1))
fi
printf -v "${__b3bp_tmp_varname}" '%s' "${__b3bp_tmp_value}"
debug "cli arg ${__b3bp_tmp_varname} = (${__b3bp_tmp_default}) -> ${!__b3bp_tmp_varname}"
fi
done
set -o nounset # no more unbound variable references expected
shift $((OPTIND-1))
if [[ "${1:-}" = "--" ]] ; then
shift
fi
fi
### Automatic validation of required option arguments
##############################################################################
for __b3bp_tmp_varname in ${!__b3bp_tmp_has_arg_*}; do
# validate only options which required an argument
[[ "${!__b3bp_tmp_varname}" = "2" ]] || continue
__b3bp_tmp_opt_short="${__b3bp_tmp_varname##*_}"
__b3bp_tmp_varname="arg_${__b3bp_tmp_opt_short}"
[[ "${!__b3bp_tmp_varname}" ]] && continue
__b3bp_tmp_varname="__b3bp_tmp_opt_short2long_${__b3bp_tmp_opt_short}"
printf -v "__b3bp_tmp_opt_long" '%s' "${!__b3bp_tmp_varname}"
[[ "${__b3bp_tmp_opt_long:-}" ]] && __b3bp_tmp_opt_long=" (--${__b3bp_tmp_opt_long//_/-})"
help "Option -${__b3bp_tmp_opt_short}${__b3bp_tmp_opt_long:-} requires an argument"
done
### Cleanup Environment variables
##############################################################################
for __tmp_varname in ${!__b3bp_tmp_*}; do
unset -v "${__tmp_varname}"
done
unset -v __tmp_varname
### Externally supplied __usage. Nothing else to do here
##############################################################################
if [[ "${__b3bp_external_usage:-}" = "true" ]]; then
unset -v __b3bp_external_usage
return
fi
### Signal trapping and backtracing
##############################################################################
function __b3bp_cleanup_before_exit () {
info "Cleaning up. Done"
}
trap __b3bp_cleanup_before_exit EXIT
# requires `set -o errtrace`
__b3bp_err_report() {
local error_code=${?}
error "Error in ${__file} in function ${1} on line ${2}"
exit ${error_code}
}
# Uncomment the following line for always providing an error backtrace
# trap '__b3bp_err_report "${FUNCNAME:-.}" ${LINENO}' ERR
### Command-line argument switches (like -d for debugmode, -h for showing helppage)
##############################################################################
# debug mode
if [[ "${arg_d:?}" = "1" ]]; then
set -o xtrace
PS4='+(${BASH_SOURCE}:${LINENO}): ${FUNCNAME[0]:+${FUNCNAME[0]}(): }'
LOG_LEVEL="7"
# Enable error backtracing
trap '__b3bp_err_report "${FUNCNAME:-.}" ${LINENO}' ERR
fi
# verbose mode
if [[ "${arg_v:?}" = "1" ]]; then
set -o verbose
fi
# no color mode
if [[ "${arg_n:?}" = "1" ]]; then
NO_COLOR="true"
fi
# help mode
if [[ "${arg_h:?}" = "1" ]]; then
# Help exists with code 1
help "Help using ${0}"
fi
### Validation. Error out if the things required for your script are not present
##############################################################################
#[[ "${arg_f:-}" ]] || help "Setting a filename with -f or --file is required"
[[ "${LOG_LEVEL:-}" ]] || emergency "Cannot continue without LOG_LEVEL. "
### Runtime
##############################################################################
TAG=$arg_t
DEST=$arg_o
CMD=$arg_c
info "TAG: $TAG"
info "DEST: $DEST"
info "CMD: $CMD"
docker build . -f docker/cpu/Dockerfile -t $TAG
tmpfile=$(mktemp /tmp/caldera-dock-tests.XXXXXX)
exec 3>"$tmpfile"
rm "$tmpfile"
info "Running tests"
info "IMAGE ID: $TAG"
docker run --cidfile $tmpfile $TAG /bin/bash -c "$CMD"
CID=$(cat $tmpfile)
info "CONTAINER ID: $CID"
#docker cp $CID:/src/pytest-report.html $DEST
|
import { AfterContentInit, AfterViewInit, Component, ElementRef, EventEmitter, forwardRef, Input, OnInit, Output, ViewChild, ViewEncapsulation } from '@angular/core';
import { Tag } from '../../dto/Tag';
import { TagService } from '../../service/tag.service';
import { AbstractControl, ControlValueAccessor, NG_VALUE_ACCESSOR } from '@angular/forms';
@Component({
selector: 'app-tag-input',
templateUrl: './tag-input.component.html',
styleUrls: ['./tag-input.component.css'],
encapsulation: ViewEncapsulation.None,
providers: [
{
provide: NG_VALUE_ACCESSOR,
useExisting: forwardRef(() => TagInputComponent),
multi: true,
}]
})
export class TagInputComponent implements OnInit, ControlValueAccessor {
@Output() selectedTags = new EventEmitter<Tag[]>();
@Input() label?: string;
inputStyleClass = "FormInputTag";
tags: Tag[];
results: Tag[];
allTags: Tag[];
private tagService: TagService;
private onChange: any;
private onTouched: any;
constructor(tagService: TagService) {
this.tagService = tagService;
}
@Input("inputStyleClass") set setInputStyleClass(clazz : string) {
if(clazz) {
this.inputStyleClass = clazz;
}
}
ngOnInit() {
this.tagService.reload();
this.tagService.allTags.subscribe(tags => {
this.allTags = tags;
this.results = this.allTags;
});
}
@Input()
set originalTags(tags: Tag[] | undefined) {
if (tags) {
this.tags = tags;
}
}
search(event: any) {
const prefix = event.query;
if (prefix === '' || prefix.trim() === '') {
this.results = this.allTags.slice();
} else {
this.results = this.allTags.filter(tag => tag.name.toLocaleLowerCase().indexOf(prefix.toLocaleLowerCase()) >= 0);
this.results.push(new Tag(prefix));
}
}
onUnselect(text: string) {
this.updateChanges();
}
private updateChanges() {
this.onChange(this.tags);
if (this.selectedTags.emit) {
this.selectedTags.emit(this.tags);
}
}
onSelect(text: any) {
this.updateChanges();
}
onKeyUp(event: any) {
if (event.key && event.key === 'Escape') {
this.reset();
}
}
reset() {
this.tags = [];
if (this.selectedTags.emit) {
this.selectedTags.emit([]);
}
this.onChange([]);
}
registerOnChange(fn: any): void {
this.onChange = fn;
}
registerOnTouched(fn: any): void {
this.onTouched = fn;
}
setDisabledState(isDisabled: boolean): void {
}
writeValue(obj: any): void {
this.tags = obj;
}
}
|
const { session } = require('passport');
const passport = require('passport');
const UsuarioController = require('../controllers/PostController');
const middlewaresAutenticacao = require('../libs/middlewares-autenticacao');
const cors = require('cors');
module.exports = (app) => {
app.use(cors({ origin: 'http://localhost:3001' }));
app.get('/posts', UsuarioController.findAll);
app.get('/posts/:id', UsuarioController.findByID);
app.post('/posts', UsuarioController.criar);
app.put('/posts/:id', UsuarioController.atualizar);
app.delete('/posts/:id', UsuarioController.apagar);
};
|
package file
import (
"io"
"io/fs"
"os"
"path/filepath"
"strings"
"github.com/pkg/errors"
"github.com/projectdiscovery/fileutil"
"github.com/projectdiscovery/folderutil"
"github.com/projectdiscovery/gologger"
)
// getInputPaths parses the specified input paths and returns a compiled
// list of finished absolute paths to the files evaluating any allowlist, denylist,
// glob, file or folders, etc.
func (request *Request) getInputPaths(target string, callback func(string)) error {
processed := make(map[string]struct{})
// Template input includes a wildcard
if strings.Contains(target, "*") && !request.NoRecursive {
if err := request.findGlobPathMatches(target, processed, callback); err != nil {
return errors.Wrap(err, "could not find glob matches")
}
return nil
}
// Template input is either a file or a directory
file, err := request.findFileMatches(target, processed, callback)
if err != nil {
return errors.Wrap(err, "could not find file")
}
if file {
return nil
}
if request.NoRecursive {
return nil // we don't process dirs in no-recursive mode
}
// Recursively walk down the Templates directory and run all
// the template file checks
if err := request.findDirectoryMatches(target, processed, callback); err != nil {
return errors.Wrap(err, "could not find directory matches")
}
return nil
}
// findGlobPathMatches returns the matched files from a glob path
func (request *Request) findGlobPathMatches(absPath string, processed map[string]struct{}, callback func(string)) error {
matches, err := filepath.Glob(absPath)
if err != nil {
return errors.Errorf("wildcard found, but unable to glob: %s\n", err)
}
for _, match := range matches {
if !request.validatePath(absPath, match, false) {
continue
}
if _, ok := processed[match]; !ok {
processed[match] = struct{}{}
callback(match)
}
}
return nil
}
// findFileMatches finds if a path is an absolute file. If the path
// is a file, it returns true otherwise false with no errors.
func (request *Request) findFileMatches(absPath string, processed map[string]struct{}, callback func(string)) (bool, error) {
info, err := os.Stat(absPath)
if err != nil {
return false, err
}
if !info.Mode().IsRegular() {
return false, nil
}
if _, ok := processed[absPath]; !ok {
if !request.validatePath(absPath, absPath, false) {
return false, nil
}
processed[absPath] = struct{}{}
callback(absPath)
}
return true, nil
}
// findDirectoryMatches finds matches for templates from a directory
func (request *Request) findDirectoryMatches(absPath string, processed map[string]struct{}, callback func(string)) error {
err := filepath.WalkDir(
absPath,
func(path string, d fs.DirEntry, err error) error {
// continue on errors
if err != nil {
return nil
}
if d.IsDir() {
return nil
}
if !request.validatePath(absPath, path, false) {
return nil
}
if _, ok := processed[path]; !ok {
callback(path)
processed[path] = struct{}{}
}
return nil
},
)
return err
}
// validatePath validates a file path for blacklist and whitelist options
func (request *Request) validatePath(absPath, item string, inArchive bool) bool {
extension := filepath.Ext(item)
// extension check
if len(request.extensions) > 0 {
if _, ok := request.extensions[extension]; ok {
return true
} else if !request.allExtensions {
return false
}
}
var (
fileExists bool
dataChunk []byte
)
if !inArchive && request.MimeType {
// mime type check
// read first bytes to infer runtime type
fileExists = fileutil.FileExists(item)
if fileExists {
dataChunk, _ = readChunk(item)
if len(request.mimeTypesChecks) > 0 && matchAnyMimeTypes(dataChunk, request.mimeTypesChecks) {
return true
}
}
}
if matchingRule, ok := request.isInDenyList(absPath, item); ok {
gologger.Verbose().Msgf("Ignoring path %s due to denylist item %s\n", item, matchingRule)
return false
}
// denied mime type checks
if !inArchive && request.MimeType && fileExists {
if len(request.denyMimeTypesChecks) > 0 && matchAnyMimeTypes(dataChunk, request.denyMimeTypesChecks) {
return false
}
}
return true
}
func (request *Request) isInDenyList(absPath, item string) (string, bool) {
extension := filepath.Ext(item)
// check for possible deny rules
// - extension is in deny list
if _, ok := request.denyList[extension]; ok {
return extension, true
}
// - full path is in deny list
if _, ok := request.denyList[item]; ok {
return item, true
}
// file is in a forbidden subdirectory
filename := filepath.Base(item)
fullPathWithoutFilename := strings.TrimSuffix(item, filename)
relativePathWithFilename := strings.TrimPrefix(item, absPath)
relativePath := strings.TrimSuffix(relativePathWithFilename, filename)
// - filename is in deny list
if _, ok := request.denyList[filename]; ok {
return filename, true
}
// - relative path is in deny list
if _, ok := request.denyList[relativePath]; ok {
return relativePath, true
}
// relative path + filename are in the forbidden list
if _, ok := request.denyList[relativePathWithFilename]; ok {
return relativePathWithFilename, true
}
// root path + relative path are in the forbidden list
if _, ok := request.denyList[fullPathWithoutFilename]; ok {
return fullPathWithoutFilename, true
}
// check any progressive combined part of the relative and absolute path with filename for matches within rules prefixes
if pathTreeItem, ok := request.isAnyChunkInDenyList(relativePath, false); ok {
return pathTreeItem, true
}
if pathTreeItem, ok := request.isAnyChunkInDenyList(item, true); ok {
return pathTreeItem, true
}
return "", false
}
func readChunk(fileName string) ([]byte, error) {
r, err := os.Open(fileName)
if err != nil {
return nil, err
}
defer r.Close()
var buff [1024]byte
if _, err = io.ReadFull(r, buff[:]); err != nil {
return nil, err
}
return buff[:], nil
}
func (request *Request) isAnyChunkInDenyList(path string, splitWithUtils bool) (string, bool) {
var paths []string
if splitWithUtils {
pathInfo, _ := folderutil.NewPathInfo(path)
paths, _ = pathInfo.Paths()
} else {
pathTree := strings.Split(path, string(os.PathSeparator))
for i := range pathTree {
paths = append(paths, filepath.Join(pathTree[:i]...))
}
}
for _, pathTreeItem := range paths {
if _, ok := request.denyList[pathTreeItem]; ok {
return pathTreeItem, true
}
}
return "", false
}
|
#!/usr/bin/env bash
set -e
# usage function
usage() {
cat <<EOF
Usage: explain_compress_regex.sh [-h|--help] regex_model_checkpoint
Compress regex model(s) using a simplistic compression algorithm
Optional arguments:
-h, --help Show this help message and exit
Required arguments:
regex_model_checkpoint <glob_path> Path to regex model checkpoint(s)
with '.pt' extension
EOF
}
# check for help
check_help() {
for arg; do
if [ "$arg" == "--help" ] || [ "$arg" == "-h" ]; then
usage
exit 0
fi
done
}
# define function
explain_compress_regex() {
local regex_model_checkpoint
regex_model_checkpoint="$1"
python3 -m src.explain_compress_regex \
--regex-model-checkpoint "$regex_model_checkpoint"
}
# execute function
check_help "$@"
explain_compress_regex "$@"
|
terraform import morpheus_vsphere_instance.tf_example_vsphere_instance 1
|
/*
* Copyright (C) 2013 salesforce.com, inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*eslint-disable */
function contentTypeLibrary () {
var SEPARATOR = '/';
var COMA = ',';
var VALID_TYPE_LIST = [ 'application'
, 'audio'
, 'example'
, 'image'
, 'message'
, 'model'
, 'multipart'
, 'text'
, 'video'
, '*'
];
function Factory (contentTypes) {
return new ContentType(contentTypes);
}
// Constructor
function ContentType (contentTypes) {
this.setAccepted(contentTypes);
}
ContentType.prototype.accept = function (contentType) {
var type = _isValidContentType(contentType) ? contentType : _isFile(contentType) ? contentType.type : false;
return type && this._contentTypes.some(function (contentType) {
return _isSubset(type, contentType);
})
};
ContentType.prototype.setAccepted = function (contentTypes) {
if (Array.isArray(contentTypes)) {
this._contentTypes = _parseValidContentTypes(contentTypes);
return this;
}
if ($A.util.isString(contentTypes)) {
this._contentTypes = _parseValidContentTypes(contentTypes.split(COMA));
return this;
}
this._contentTypes = [];
return this;
};
function _parseValidContentTypes (contentTypesArr) {
return contentTypesArr.filter(function (contentType) {
return _isValidContentType(contentType);
});
}
function _isValidContentType (contentType) {
return $A.util.isString(contentType)
&& _hasTypeAndSubtypeOnly(contentType)
&& _hasValidType(contentType);
}
function _hasTypeAndSubtypeOnly (contentType) {
return contentType.split(SEPARATOR).length === 2;
}
function _hasValidType (contentType) {
var type = contentType.split(SEPARATOR)[0].toLowerCase();
return VALID_TYPE_LIST.indexOf(type) !== -1;
}
function _isSubset (type, superType) {
type = _splitContentType(type);
superType = _splitContentType(superType);
return (type.type === superType.type || superType.type === '*')
&& (type.subType === superType.subType || superType.subType === '*');
}
function _isFile (file) {
return file instanceof File;
}
function _splitContentType (contentType) {
return {
type : contentType.split(SEPARATOR)[0],
subType : contentType.split(SEPARATOR)[1]
};
}
return Factory;
} /*eslint-enable */ |
#! /bin/bash
if [ ! -f .env ]; then
echo "No '.env' file found!"
return 1
fi
echo "Setting Fixer API Key"
export FIXER_KEY=$(<./.env)
if [ "$1" != "" ]; then
echo "Setting environment to $1"
export NODE_ENV=$1
else
echo "Setting environment to dev"
export NODE_ENV=dev
fi |
#!/bin/bash
. ~/src/common/setup.sh
FULLPATH=$(pwd)
echo "7.3.1 Specifying environment variables in a container definition"
echo $HR_TOP
echo "kubectl apply -f $FULLPATH"
kubectl apply -f $FULLPATH
sleep 1
echo $HR
value=$(<set731-1-fortune-pod-args.yaml)
echo "$value"
enter
echo "kubectl wait --for=condition=Ready=True pod/fortune2s -n=chp07-set731 --timeout=20s"
kubectl wait --for=condition=Ready=True pod/fortune2s -n=chp07-set731 --timeout=20s
echo ""
echo "kubectl get pods -n=chp07-set731 --show-labels"
kubectl get pods -n=chp07-set731 --show-labels
echo $HR
POD_IP=$(kubectl get pod/fortune2s -n=chp07-set731 -o jsonpath='{.status.podIP}')
POD_NAME=$(kubectl get pod/fortune2s -n=chp07-set731 -o jsonpath='{.spec.containers[0].name}')
echo "Container with Env Var: $POD_NAME"
POD_ENV_NAME=$(kubectl get pod/fortune2s -n=chp07-set731 -o jsonpath='{.spec.containers[0].env[0].name}')
echo "Env Var Name: $POD_ENV_NAME"
POD_ENV_VALUE=$(kubectl get pod/fortune2s -n=chp07-set731 -o jsonpath='{.spec.containers[0].env[0].value}')
echo "Env Var Value: $POD_ENV_VALUE"
echo $HR
echo "kubectl exec -it pod/fortune2s -c=html-generator -n=chp07-set731 -- printenv"
kubectl exec -it pod/fortune2s -c=html-generator -n=chp07-set731 -- printenv
echo $HR
echo "Container script is generated every 1 second"
echo "curl $POD_IP"
curl $POD_IP
echo ""
sleep 1
echo "curl $POD_IP"
curl $POD_IP
echo $HR
echo "kubectl delete -f $FULLPATH"
kubectl delete -f $FULLPATH
|
import java.util.Date;
public class Consulta extends Controle_Atendimento {
//INSTANCIAS
Date data_inicio = new Date();
Date data_fim = new Date();
HistoricoAtendimento ha = new HistoricoAtendimento();
//GETTERS E SETTERS
@SuppressWarnings("deprecation")
public void setInicio(int d, int m, int y, int h, int mm) {
data_inicio.setDate(d);
data_inicio.setMonth(m);
data_inicio.setYear(y-1900);
data_inicio.setHours(h);
data_inicio.setMinutes(mm);
}
public Date getData_inicio() {
return data_inicio;
}
@SuppressWarnings("deprecation")
public void setFim(int d, int m, int y, int h, int mm) {
data_fim.setDate(d);
data_fim.setMonth(m);
data_fim.setYear(y-1900);
data_fim.setHours(h);
data_fim.setMinutes(mm);
}
public Date getData_fim() {
return data_fim;
}
//METODOS
public void finalizaConsulta(String nome_paciente, String nome_medico, Date h1, Date h2) {
ha.addHist(nome_paciente, nome_medico, h1, h2);
}
public String mostraHist() {
return ha.mostraHist();
}
}
|
import {
IQuestionnaire,
IQuestionnaireResponse,
IQuestionnaireResponse_Answer,
IQuestionnaireResponse_Item,
IQuestionnaire_Item,
Questionnaire_ItemTypeKind,
} from "@ahryman40k/ts-fhir-types/lib/R4";
import { isISODateString } from "nav-datovelger";
export const getInvalidQuestionnaireResponseItems = (
response: IQuestionnaireResponse,
questionnaire: IQuestionnaire
) => {
const invalidResponses: IQuestionnaire_Item[] = [];
questionnaire.item?.map((item) => {
if (item.required && item.type) {
const responseItem = response.item?.find((i) => i.linkId === item.linkId);
const answer = responseItem?.answer?.find((a) => a);
if (!validateResponseItemNotEmpty(item.type, answer)) {
invalidResponses.push(item);
}
}
});
return invalidResponses;
};
const validateResponseItemNotEmpty = (
type: Questionnaire_ItemTypeKind,
answer: IQuestionnaireResponse_Answer | undefined
) => {
if (!answer) return false;
switch (type) {
case Questionnaire_ItemTypeKind._boolean:
return answer.valueBoolean !== undefined;
case Questionnaire_ItemTypeKind._choice:
return answer.valueString !== "";
case Questionnaire_ItemTypeKind._date:
return isISODateString(answer.valueDate);
case Questionnaire_ItemTypeKind._integer:
return typeof answer.valueInteger === "number";
case Questionnaire_ItemTypeKind._string:
return answer.valueString !== "";
case Questionnaire_ItemTypeKind._text:
return answer.valueString !== "";
case Questionnaire_ItemTypeKind._reference:
return answer.valueReference?.reference;
default:
return false;
}
};
|
#!/bin/bash
DOCKER_VERSION=$(docker --version | grep -o "[0-9]*\.[0-9]*\.[0-9a-z\.-]*")
echo "DOCKER_VERSION: ${DOCKER_VERSION}"
BIN_DIR="tmp/bin"
mkdir -p ${BIN_DIR}
export DOCKER_BINARY="${PWD}/${BIN_DIR}/docker-${DOCKER_VERSION}"
echo "DOCKER_BINARY: ${DOCKER_BINARY}"
if [ ! -e ${DOCKER_BINARY} ] ; then
mkdir -p ${BIN_DIR}
cd ${BIN_DIR}
curl --progress -fsSLO https://download.docker.com/linux/static/stable/x86_64/docker-${DOCKER_VERSION}.tgz
tar -xvzf docker-${DOCKER_VERSION}.tgz docker/docker
mv docker/docker ${DOCKER_BINARY}
fi
export DATADIR=${PWD}/data
echo "DATADIR: ${DATADIR}"
|
import { Notifications } from 'twilio-notifications';
import { Transport } from '../interfaces/transport';
import { Configuration } from '../configuration';
export interface TypingIndicatorServices {
transport: Transport;
notificationClient: Notifications;
}
/**
* @class TypingIndicator
*
* @constructor
* @private
*/
declare class TypingIndicator {
private readonly services;
private readonly config;
private sentUpdates;
private getChannel;
private serviceTypingTimeout;
constructor(config: Configuration, services: TypingIndicatorServices, getChannel: any);
readonly typingTimeout: number;
/**
* Initialize TypingIndicator controller
* Registers for needed message types and sets listeners
* @private
*/
initialize(): void;
/**
* Remote members typing events handler
* @private
*/
handleRemoteTyping(message: any): void;
/**
* Send typing event for the given channel sid
* @param {String} channelSid
*/
send(channelSid: string): any;
private _send;
}
export { TypingIndicator };
|
#!/usr/bin/env bash
set -e
# Install ansible on new machine
if ! [ -x "$(command -v ansible)" ]; then
sudo pacman -S ansible
fi
export ANSIBLE_NOCOWS=1
# Install Ansible plugins
ansible-galaxy collection install community.general
ansible-galaxy install kewlfft.aur
# Run playbook
ansible-playbook playbook.yml -i hosts --ask-become-pass
# Notify
if command -v terminal-notifier 1> /dev/null 2>&1; then
terminal-notifier -title "dotfiles: Bootstrap complete" -message "Successfully set up dev environment."
fi
|
<reponame>mkmozgawa/luxmed-bot<gh_stars>10-100
package com.lbs.api.json.model
/**
{
"Cities": [
{
"Id": 5,
"Name": "Wrocław"
}
],
"Clinics": [
{
"Id": 1405,
"Name": "<NAME> - Legnicka 40"
},
{
"Id": 7,
"Name": "<NAME> - Kwidzyńska 6"
}
],
"DefaultPayer": {
"Id": 22222,
"Name": "FIRMA POLAND SP. Z O.O."
},
"Doctors": [
{
"Id": 38275,
"Name": "<NAME> lek. med."
},
{
"Id": 15565,
"Name": "<NAME> dr n. med."
}
],
"Languages": [
{
"Id": 11,
"Name": "english"
},
{
"Id": 10,
"Name": "polish"
}
],
"Payers": [
{
"Id": 22222,
"Name": "FIRMA POLAND SP. Z O.O."
}
],
"Services": [
{
"Id": 5857,
"Name": "Audiometr standardowy"
},
{
"Id": 7976,
"Name": "Audiometr standardowy - audiometria nadprogowa"
}
]
}
*/
case class ReservationFilterResponse(cities: List[IdName], clinics: List[IdName], defaultPayer: Option[IdName],
doctors: List[IdName], languages: List[IdName], payers: List[IdName],
services: List[IdName]) extends SerializableJsonObject
|
module GeneralUnits
module ActionViewExtension
extend ActiveSupport::Concern
included do
end
def weight_units_for_select(*args)
Weight.select_units(*args).map {|u| [u.to_s, u.code]}
end
def short_weight_units_for_select(*args)
Weight.select_units(*args).map {|u| [u.to_s(:format => :short), u.code]}
end
def length_units_for_select(*args)
Length.select_units(*args).map {|u| [u.to_s, u.code]}
end
def short_length_units_for_select(*args)
Length.select_units(*args).map {|u| [u.to_s(:format => :short), u.code]}
end
end
end
|
class Point {
constructor(x,y){
this.x = x;
this.y = y;
}
add(other){
return new Point(this.x + other.x, this.y + other.y);
}
subtract(other){
return new Point(this.x - other.x, this.y - other.y);
}
distance(other){
const xDist = this.x - other.x;
const yDist = this.y - other.y;
return Math.sqrt(xDist*xDist + yDist*yDist);
}
} |
<gh_stars>100-1000
/*
* Copyright © 2020 Lisk Foundation
*
* See the LICENSE file at the top-level directory of this distribution
* for licensing information.
*
* Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation,
* no part of this software, including this file, may be copied, modified,
* propagated, or distributed except according to the terms contained in the
* LICENSE file.
*
* Removal or modification of this copyright notice is prohibited.
*/
import { Request, Response, NextFunction } from 'express';
import { BaseChannel, PluginCodec } from 'lisk-framework';
interface DPoSAccountJSON {
dpos: {
delegate: {
username: string;
totalVotesReceived: string;
};
};
}
export const getForgers = (channel: BaseChannel, codec: PluginCodec) => async (
_req: Request,
res: Response,
next: NextFunction,
): Promise<void> => {
let forgersFrameworkInfo: ReadonlyArray<{ address: string; forging: boolean }>;
try {
forgersFrameworkInfo = await channel.invoke('app:getForgers');
} catch (err) {
next(err);
return;
}
try {
const forgerAccounts = await channel.invoke<string[]>('app:getAccounts', {
address: forgersFrameworkInfo.map(info => info.address),
});
const data = [];
for (let i = 0; i < forgerAccounts.length; i += 1) {
const account = codec.decodeAccount<DPoSAccountJSON>(forgerAccounts[i]);
data.push({
username: account.dpos.delegate.username,
totalVotesReceived: account.dpos.delegate.totalVotesReceived,
...forgersFrameworkInfo[i],
});
}
res.status(200).json({ data, meta: { count: forgerAccounts.length } });
} catch (err) {
next(err);
}
};
|
import torch
from tqdm import tqdm
def train_neural_network(model, train_loader, optimizer, criterion, lr_scheduler, num_epochs):
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model.to(device)
for epoch in range(num_epochs):
model.train()
running_loss = 0.0
correct_predictions = 0
total_samples = 0
for inputs, targets in tqdm(train_loader, desc=f'Epoch {epoch + 1}/{num_epochs}', unit='batches'):
inputs, targets = inputs.to(device), targets.to(device)
optimizer.zero_grad()
outputs = model(inputs)
loss = criterion(outputs, targets)
loss.backward()
optimizer.step()
lr_scheduler.step()
running_loss += loss.item() * inputs.size(0)
predictions = outputs.argmax(dim=1)
correct_predictions += (predictions == targets).sum().item()
total_samples += targets.size(0)
epoch_loss = running_loss / len(train_loader.dataset)
epoch_accuracy = correct_predictions / total_samples
print(f"Epoch {epoch + 1}/{num_epochs}, Loss: {epoch_loss:.4f}, Accuracy: {epoch_accuracy:.4f}") |
//
// Copyright 2019-2021 Nestybox, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Unit tests for idShiftUtils package
package idShiftUtils
import (
"io/ioutil"
"os"
"testing"
aclLib "github.com/joshlf/go-acl"
)
func TestShiftAclIds(t *testing.T) {
testDir, err := ioutil.TempDir("", "shiftAclTest")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(testDir)
// Access ACL to be set on testDir
aclUserEntry := aclLib.Entry{
Tag: aclLib.TagUser,
Qualifier: "1001",
Perms: 7,
}
aclGroupEntry := aclLib.Entry{
Tag: aclLib.TagGroup,
Qualifier: "1005",
Perms: 4,
}
aclMaskEntry := aclLib.Entry{
Tag: aclLib.TagMask,
Perms: 7,
}
// Default ACL to be set on testDir
aclDef := aclLib.ACL{
aclLib.Entry{
Tag: aclLib.TagUserObj,
Perms: 7,
},
aclLib.Entry{
Tag: aclLib.TagGroupObj,
Perms: 0,
},
aclLib.Entry{
Tag: aclLib.TagOther,
Perms: 0,
},
aclLib.Entry{
Tag: aclLib.TagUser,
Qualifier: "1002",
Perms: 5,
},
aclLib.Entry{
Tag: aclLib.TagGroup,
Qualifier: "1005",
Perms: 4,
},
aclLib.Entry{
Tag: aclLib.TagMask,
Perms: 7,
},
}
acl, err := aclLib.Get(testDir)
if err != nil {
t.Fatalf("failed to get ACL on %s: %s", testDir, err)
}
acl = append(acl, aclUserEntry, aclGroupEntry, aclMaskEntry)
if err := aclLib.Set(testDir, acl); err != nil {
t.Fatalf("failed to set ACL %v on %s: %s", acl, testDir, err)
}
if err := aclLib.SetDefault(testDir, aclDef); err != nil {
t.Fatalf("failed to set default ACL %v on %s: %s", aclDef, testDir, err)
}
// ShiftAcls by subtracting offset
uidOffset := uint32(1000)
gidOffset := uint32(1000)
if err := shiftAclIds(testDir, true, uidOffset, gidOffset, OffsetSub); err != nil {
t.Fatalf("shiftAclIds() failed: %s", err)
}
// Verify the ACL for the dir were modified as expected
newAcl := aclLib.ACL{}
newDefAcl := aclLib.ACL{}
newAcl, err = aclLib.Get(testDir)
if err != nil {
t.Fatalf("failed to get ACL on %s: %s", testDir, err)
}
newDefAcl, err = aclLib.GetDefault(testDir)
if err != nil {
t.Fatalf("failed to get default ACL on %s: %s", testDir, err)
}
wantAclUserEntry := aclLib.Entry{
Tag: aclLib.TagUser,
Qualifier: "1", // 1001 - 1000
Perms: 7,
}
wantAclGroupEntry := aclLib.Entry{
Tag: aclLib.TagGroup,
Qualifier: "5", // 1005 - 1000
Perms: 4,
}
wantAclDefUserEntry := aclLib.Entry{
Tag: aclLib.TagUser,
Qualifier: "2", // 1002 - 1000
Perms: 5,
}
wantAclDefGroupEntry := aclLib.Entry{
Tag: aclLib.TagGroup,
Qualifier: "5", // 1005 - 1000
Perms: 4,
}
for _, e := range newAcl {
if e.Tag == aclLib.TagUser {
if e != wantAclUserEntry {
t.Logf("acl mismatch: want %v, got %v", wantAclUserEntry, e)
}
}
if e.Tag == aclLib.TagGroup {
if e != wantAclGroupEntry {
t.Logf("acl mismatch: want %v, got %v", wantAclGroupEntry, e)
}
}
}
for _, e := range newDefAcl {
if e.Tag == aclLib.TagUser {
if e != wantAclDefUserEntry {
t.Logf("acl mismatch: want %v, got %v", wantAclDefUserEntry, e)
}
}
if e.Tag == aclLib.TagGroup {
if e != wantAclDefGroupEntry {
t.Logf("acl mismatch: want %v, got %v", wantAclDefGroupEntry, e)
}
}
}
// ShiftAcls by adding offset (revert back to original value)
uidOffset = uint32(1000)
gidOffset = uint32(1000)
if err := shiftAclIds(testDir, true, uidOffset, gidOffset, OffsetAdd); err != nil {
t.Fatalf("shiftAclIds() failed: %s", err)
}
newAcl, err = aclLib.Get(testDir)
if err != nil {
t.Fatalf("failed to get ACL on %s: %s", testDir, err)
}
newDefAcl, err = aclLib.GetDefault(testDir)
if err != nil {
t.Fatalf("failed to get default ACL on %s: %s", testDir, err)
}
wantAclUserEntry = aclUserEntry
wantAclGroupEntry = aclGroupEntry
wantAclDefUserEntry = aclLib.Entry{
Tag: aclLib.TagUser,
Qualifier: "1002",
Perms: 5,
}
wantAclDefGroupEntry = aclLib.Entry{
Tag: aclLib.TagGroup,
Qualifier: "1005",
Perms: 4,
}
for _, e := range newAcl {
if e.Tag == aclLib.TagUser {
if e != wantAclUserEntry {
t.Logf("acl mismatch: want %v, got %v", wantAclUserEntry, e)
}
}
if e.Tag == aclLib.TagGroup {
if e != wantAclGroupEntry {
t.Logf("acl mismatch: want %v, got %v", wantAclGroupEntry, e)
}
}
}
for _, e := range newDefAcl {
if e.Tag == aclLib.TagUser {
if e != wantAclDefUserEntry {
t.Logf("acl mismatch: want %v, got %v", wantAclDefUserEntry, e)
}
}
if e.Tag == aclLib.TagGroup {
if e != wantAclDefGroupEntry {
t.Logf("acl mismatch: want %v, got %v", wantAclDefGroupEntry, e)
}
}
}
}
|
#!/bin/bash -e
go build -tags gendoc -o upspin.gendoc
./upspin.gendoc gendoc
rm upspin.gendoc
|
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
function makeRoot() {
ALIAS=$1
DNAME=$2
PSWD=$3
if [[ ${ALIAS} == "" ]] || [[ ${DNAME} == "" ]] || [[ ${PSWD} == "" ]]
then
error "makeRoot: Need ALIAS, DNAME, PSWD"
fi
rm -f "${ALIAS}.jks"
rm -f "${ALIAS}.pem"
keytool -genkeypair -keystore "${ALIAS}.jks" -alias "${ALIAS}" -dname "${DNAME}" -ext bc:c -storepass "${PSWD}" \
-keypass "${PSWD}" -noprompt -v
keytool -keystore "${ALIAS}.jks" -storepass "${PSWD}" -keypass "${PSWD}" -alias "${ALIAS}" -exportcert \
-rfc -file "${ALIAS}.pem" -v
}
function makeCA() {
ROOT=$1
ALIAS=$2
DNAME=$3
PSWD=$4
if [[ "${ROOT}" == "" ]] || [[ "${ALIAS}" == "" ]] || [[ "${DNAME}" == "" ]] || [[ "${PSWD}" == "" ]]
then
error "makeCA: Need CA, ALIAS, DNAME, PSWD"
fi
rm -f "${ALIAS}.jks"
rm -f "${ALIAS}.pem"
keytool -genkeypair -keystore "${ALIAS}.jks" -alias "${ALIAS}" -dname "${DNAME}" -ext bc:c -storepass "${PSWD}" \
-keypass "${PSWD}" -noprompt -v
keytool -storepass "${PSWD}" -keypass "${PSWD}" -keystore "${ALIAS}.jks" -certreq -alias "${ALIAS}" \
| keytool -storepass "${PSWD}" -keypass "${PSWD}" -keystore "${ROOT}.jks" -gencert -alias "${ROOT}" \
-ext BC=0 -rfc -outfile "${ALIAS}.pem" -v
keytool -keystore "${ALIAS}.jks" -storepass "${PSWD}" -keypass "${PSWD}" -importcert -alias "${ROOT}" \
-file "${ROOT}.pem" -noprompt -v
keytool -keystore "${ALIAS}.jks" -storepass "${PSWD}" -keypass "${PSWD}" -importcert -alias "${ALIAS}" \
-file "${ALIAS}.pem" -noprompt -v
}
function mkCert() {
CA=$1
ALIAS=$2
DNAME=$3
PSWD=$4
if [[ ${CA} == "" ]] || [[ ${ALIAS} == "" ]] || [[ ${DNAME} == "" ]] || [[ ${PSWD} == "" ]]
then
error "mkCert: Need CA, ALIAS, DNAME, PSWD"
fi
rm -f "${ALIAS}.jks"
rm -f "${ALIAS}.pem"
rm -f "${ALIAS}.csr"
keytool -genkeypair -keystore "${ALIAS}.jks" -alias "${ALIAS}" -dname "${DNAME}" -keyalg RSA -keysize 2048 \
-keypass "${PSWD}" -storepass "${PSWD}" -noprompt -v || error
keytool -storepass "${PSWD}" -keystore "${ALIAS}.jks" -certreq -alias "${ALIAS}" -file "${ALIAS}.csr" -v || error
keytool -gencert -infile "${ALIAS}.csr" -keystore "${CA}.jks" -alias "${CA}" -storepass "${PSWD}" -rfc \
-outfile "${ALIAS}.pem" -v || error
keytool -keystore "${ALIAS}.jks" -importcert -alias "${ALIAS}" -storepass "${PSWD}" -file "${ALIAS}.pem" \
-noprompt -v || error
rm -f "${ALIAS}.csr"
rm -f "${ALIAS}.pem"
}
function makeTruststore() {
rm -f truststore.jks
# shellcheck disable=SC2068
for cert in $@ ; do
keytool -keystore truststore.jks -importcert -alias "${cert}" -storepass 123456 -file "${cert}.pem" \
-noprompt -v || error
done
}
function error() {
# shellcheck disable=SC2145
echo "¯\_(ツ)_/¯ Something went wrong: $@"
exit 1
}
|
#!/bin/bash
dieharder -d 207 -g 41 -S 1435483028
|
#!/usr/bin/env bash
# Copyright (C) 2020 IBM Corp.
# This program is Licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License. See accompanying LICENSE file.
set -xe
cd $HOME
if [ "${PACKAGE_BUILD}" == "OFF" ]; then
if [ "${TRAVIS_OS_NAME}" == "linux" ]; then
if [ "${TRAVIS_DIST}" == "bionic" ]; then
sudo apt-get -yq --no-install-suggests --no-install-recommends $(travis_apt_get_options) install m4 libgmp-dev
curl -O "https://www.shoup.net/ntl/ntl-11.4.3.tar.gz"
tar --no-same-owner -xf ntl-11.4.3.tar.gz
cd "$HOME/ntl-11.4.3/src"
./configure SHARED=on NTL_GMP_LIP=on NTL_THREADS=on NTL_THREAD_BOOST=on
make -j4
sudo make install
else
sudo apt-get -yq --no-install-suggests --no-install-recommends $(travis_apt_get_options) install libgmp-dev libntl-dev
fi
elif [ "${TRAVIS_OS_NAME}" == "osx" ]; then
# GMP will be installed as a dependency to NTL (if it is not already present)
brew install ntl
fi
else
if [ "${TRAVIS_OS_NAME}" == "linux" ]; then
sudo apt-get -yq --no-install-suggests --no-install-recommends $(travis_apt_get_options) install patchelf m4
elif [ "${TRAVIS_OS_NAME}" == "osx" ]; then
brew install m4
fi
fi
cd "$HOME"
|
<reponame>surfliner/surfliner-mirror<gh_stars>0
# frozen_string_literal: true
module Lark
##
# This service persists objects to the index.
#
# @example
# indexer = Indexer.new
#
# indexer.index(id: 'an_id')
#
class Indexer
##
# @!attribute [rw] adapter
# @return [Valkyrie::MetadataAdapter]
attr_accessor :adapter
##
# @param adapter [Valkyrie::MetadataAdapter]
def initialize(adapter:
Valkyrie::MetadataAdapter.find(Lark.config.index_adapter))
self.adapter = adapter
end
##
# @param id [#to_s]
#
# @return [Valkyrie::Resource]
# @raise [Valkyrie::Persistence::ObjectNotFoundError]
def find(id)
query_service.find_by(id: Valkyrie::ID.new(id.to_s))
end
##
# @param data [Valkyrie::Resource]
def index(data:)
raise ArgumentError, "ID missing: #{data.inspect}" unless data.id
persister.save(resource: data)
end
private
def persister
adapter.persister
end
def query_service
adapter.query_service
end
end
end
|
#!/bin/bash
# WARNING: this script is used by https://github.com/google/oss-fuzz/blob/master/projects/gdal/build.sh
# and should not be renamed or moved without updating the above
set -e
if [ "$OUT" == "" ]; then
echo "OUT env var not defined"
exit 1
fi
echo "Building gdal_translate_fuzzer_seed_corpus.zip"
cat > test.tar <<EOF
FUZZER_FRIENDLY_ARCHIVE
***NEWFILE***:cmd.txt
-outsize
20
20
-of
GTiff
-b
1
-ot
Byte
-r
nearest
-a_srs
EPSG:26711
-stats
-scale
-mo
FOO=BAR
-co
COMPRESS=NONE
-srcwin
0
0
20
20
***NEWFILE***:in
EOF
cat $(dirname $0)/../../autotest/gcore/data/byte.tif >> test.tar
rm -f $OUT/gdal_translate_fuzzer_seed_corpus.zip
zip -r $OUT/gdal_translate_fuzzer_seed_corpus.zip test.tar >/dev/null
rm test.tar
echo "Building gdal_vector_translate_fuzzer_seed_corpus.zip"
cat > test.tar <<EOF
FUZZER_FRIENDLY_ARCHIVE
***NEWFILE***:cmd.txt
non_significant_output_name
-f
Memory
-s_srs
EPSG:4326
-t_srs
EPSG:32631
first
second
***NEWFILE***:in/first.csv
int_field,float_field,string_field,WKT
1,2.34,\"foo\",\"POINT(1 2)\"
***NEWFILE***:in/first.csvt
Integer,Real,String,WKT
***NEWFILE***:in/second.csv
int_field,float_field,string_field,WKT
1,2.34,\"foo\",\"POINT(1 2)\"
***NEWFILE***:in/second.csvt
Integer,Real,String,WKT
EOF
rm -f $OUT/gdal_vector_translate_fuzzer_seed_corpus.zip
zip -r $OUT/gdal_vector_translate_fuzzer_seed_corpus.zip test.tar >/dev/null
rm test.tar
echo "Building gtiff_fuzzer_seed_corpus.zip"
rm -f $OUT/gtiff_fuzzer_seed_corpus.zip
cd $(dirname $0)/../../autotest/gcore/data
zip -r $OUT/gtiff_fuzzer_seed_corpus.zip ./*.tif >/dev/null
cd $OLDPWD
cd $(dirname $0)/../../autotest/gdrivers/data
zip -r $OUT/gtiff_fuzzer_seed_corpus.zip ./*.tif >/dev/null
cd $OLDPWD
echo "Building hfa_fuzzer_seed_corpus.zip"
rm -f $OUT/hfa_fuzzer_seed_corpus.zip
cd $(dirname $0)/../../autotest/gcore/data
zip -r $OUT/hfa_fuzzer_seed_corpus.zip ./*.img >/dev/null
cd $OLDPWD
cd $(dirname $0)/../../autotest/gdrivers/data/hfa
zip -r $OUT/hfa_fuzzer_seed_corpus.zip ./*.img >/dev/null
cd $OLDPWD
echo "Building adrg_fuzzer_seed_corpus.zip"
{
printf "FUZZER_FRIENDLY_ARCHIVE\\n"
printf "***NEWFILE***:ABCDEF01.GEN\\n"
cat $(dirname $0)/../../autotest/gdrivers/data/adrg/SMALL_ADRG/ABCDEF01.GEN
printf "***NEWFILE***:ABCDEF01.IMG\\n"
cat $(dirname $0)/../../autotest/gdrivers/data/adrg/SMALL_ADRG/ABCDEF01.IMG
} > adrg.tar
rm -f $OUT/adrg_fuzzer_seed_corpus.zip
zip -r $OUT/adrg_fuzzer_seed_corpus.zip adrg.tar >/dev/null
rm adrg.tar
echo "Building srp_fuzzer_seed_corpus.zip"
{
printf "FUZZER_FRIENDLY_ARCHIVE\\n"
printf "***NEWFILE***:FKUSRP01.GEN\\n"
cat $(dirname $0)/../../autotest/gdrivers/data/srp/USRP_PCB0/FKUSRP01.GEN
printf "***NEWFILE***:FKUSRP01.IMG\\n"
cat $(dirname $0)/../../autotest/gdrivers/data/srp/USRP_PCB0/FKUSRP01.IMG
printf "***NEWFILE***:FKUSRP01.QAL\\n"
cat $(dirname $0)/../../autotest/gdrivers/data/srp/USRP_PCB0/FKUSRP01.QAL
} > srp.tar
rm -f $OUT/srp_fuzzer_seed_corpus.zip
zip -r $OUT/srp_fuzzer_seed_corpus.zip srp.tar >/dev/null
rm srp.tar
echo "Building mrf_fuzzer_seed_corpus.zip"
rm -f $OUT/mrf_fuzzer_seed_corpus.zip
CUR_DIR=$PWD
cd $(dirname $0)/../../autotest/gdrivers/data/mrf
for subdir in *; do
(cd $subdir
printf "FUZZER_FRIENDLY_ARCHIVE\\n" > $CUR_DIR/mrf_$subdir.tar
for file in *; do
printf "***NEWFILE***:%s\\n" "$file" >> $CUR_DIR/mrf_$subdir.tar
cat $file >> $CUR_DIR/mrf_$subdir.tar
done
)
done
cd $CUR_DIR
zip -r $OUT/mrf_fuzzer_seed_corpus.zip mrf_*.tar >/dev/null
rm mrf_*.tar
echo "Building envi_fuzzer_seed_corpus.zip"
rm -f $OUT/envi_fuzzer_seed_corpus.zip
{
printf "FUZZER_FRIENDLY_ARCHIVE\\n"
printf "***NEWFILE***:my.hdr\\n"
cat $(dirname $0)/../../autotest/gdrivers/data/envi/aea.hdr
printf "***NEWFILE***:my.dat\\n" >> aea.tar
cat $(dirname $0)/../../autotest/gdrivers/data/envi/aea.dat
} > aea.tar
zip -r $OUT/envi_fuzzer_seed_corpus.zip aea.tar >/dev/null
rm aea.tar
{
printf "FUZZER_FRIENDLY_ARCHIVE\\n"
printf "***NEWFILE***:my.hdr\\n"
cat $(dirname $0)/../../autotest/gdrivers/data/envi/aea_compressed.hdr
printf "***NEWFILE***:my.dat\\n"
cat $(dirname $0)/../../autotest/gdrivers/data/envi/aea_compressed.dat
} > aea_compressed.tar
zip -r $OUT/envi_fuzzer_seed_corpus.zip aea_compressed.tar >/dev/null
rm aea_compressed.tar
echo "Building ehdr_fuzzer_seed_corpus.zip"
rm -f $OUT/ehdr_fuzzer_seed_corpus.zip
{
printf "FUZZER_FRIENDLY_ARCHIVE\\n"
printf "***NEWFILE***:my.hdr\\n"
cat $(dirname $0)/../../autotest/gdrivers/data/ehdr/ehdr11.hdr
printf "***NEWFILE***:my.dat\\n"
cat $(dirname $0)/../../autotest/gdrivers/data/ehdr/ehdr11.flt
} > ehdr11.tar
zip -r $OUT/ehdr_fuzzer_seed_corpus.zip ehdr11.tar >/dev/null
rm ehdr11.tar
echo "Building genbin_fuzzer_seed_corpus.zip"
rm -f $OUT/genbin_fuzzer_seed_corpus.zip
{
printf "FUZZER_FRIENDLY_ARCHIVE\\n"
printf "***NEWFILE***:my.hdr\\n"
cat $(dirname $0)/../../autotest/gdrivers/data/genbin/tm4628_96.hdr
printf "***NEWFILE***:my.bil\\n"
cat $(dirname $0)/../../autotest/gdrivers/data/genbin/tm4628_96.bil
} > genbin.tar
zip -r $OUT/genbin_fuzzer_seed_corpus.zip genbin.tar >/dev/null
rm genbin.tar
echo "Building isce_fuzzer_seed_corpus.zip"
rm -f $OUT/isce_fuzzer_seed_corpus.zip
{
printf "FUZZER_FRIENDLY_ARCHIVE\\n"
printf "***NEWFILE***:isce.slc\\n"
cat $(dirname $0)/../../autotest/gdrivers/data/isce/isce.slc
printf "***NEWFILE***:isce.slc.xml\\n"
cat $(dirname $0)/../../autotest/gdrivers/data/isce/isce.slc.xml
} > isce.tar
zip -r $OUT/isce_fuzzer_seed_corpus.zip isce.tar >/dev/null
rm isce.tar
echo "Building roipac_fuzzer_seed_corpus.zip"
rm -f $OUT/roipac_fuzzer_seed_corpus.zip
{
printf "FUZZER_FRIENDLY_ARCHIVE\\n"
printf "***NEWFILE***:srtm.dem\\n"
cat $(dirname $0)/../../autotest/gdrivers/data/roipac/srtm.dem
printf "***NEWFILE***:srtm.dem.rsc\\n"
cat $(dirname $0)/../../autotest/gdrivers/data/roipac/srtm.dem.rsc
} > roipac.tar
zip -r $OUT/roipac_fuzzer_seed_corpus.zip roipac.tar >/dev/null
rm roipac.tar
echo "Building rraster_fuzzer_seed_corpus.zip"
rm -f $OUT/rraster_fuzzer_seed_corpus.zip
{
printf "FUZZER_FRIENDLY_ARCHIVE\\n"
printf "***NEWFILE***:my.grd\\n"
cat $(dirname $0)/../../autotest/gdrivers/data/rraster/byte_rraster.grd
printf "***NEWFILE***:my.gri\\n"
cat $(dirname $0)/../../autotest/gdrivers/data/rraster/byte_rraster.gri
} > rraster.tar
zip -r $OUT/rraster_fuzzer_seed_corpus.zip rraster.tar >/dev/null
rm rraster.tar
echo "Building gdal_vrt_fuzzer_seed_corpus.zip"
rm -f $OUT/gdal_vrt_fuzzer_seed_corpus.zip
{
printf "FUZZER_FRIENDLY_ARCHIVE\\n"
printf "***NEWFILE***:byte.tif\\n"
cat $(dirname $0)/../../autotest/gcore/data/byte.tif
printf "***NEWFILE***:test.vrt\\n"
cat $(dirname $0)/../../autotest/gcore/data/byte.vrt
} > gdal_vrt.tar
zip -r $OUT/gdal_vrt_fuzzer_seed_corpus.zip gdal_vrt.tar >/dev/null
rm gdal_vrt.tar
{
printf "FUZZER_FRIENDLY_ARCHIVE\\n"
printf "***NEWFILE***:small.raw\\n"
cat $(dirname $0)/../../autotest/gdrivers/data/small.raw
printf "***NEWFILE***:test.vrt\\n"
cat $(dirname $0)/../../autotest/gdrivers/data/small.vrt
} > gdal_vrt_rawlink.tar
zip -r $OUT/gdal_vrt_fuzzer_seed_corpus.zip gdal_vrt_rawlink.tar >/dev/null
rm gdal_vrt_rawlink.tar
echo "Building aig_fuzzer_seed_corpus.zip"
printf "FUZZER_FRIENDLY_ARCHIVE\\n" > aig.tar
for x in hdr.adf sta.adf dblbnd.adf vat.adf w001001.adf abc3x1.clr prj.adf w001001x.adf; do
printf "***NEWFILE***:%s\\n" "$x" >> aig.tar
cat $(dirname $0)/../../autotest/gdrivers/data/aigrid/abc3x1/$x >> aig.tar
done
rm -f $OUT/aig_fuzzer_seed_corpus.zip
zip -r $OUT/aig_fuzzer_seed_corpus.zip aig.tar >/dev/null
rm aig.tar
echo "Building get_jpeg2000_structure_fuzzer_seed_corpus.zip"
rm -f $OUT/get_jpeg2000_structure_fuzzer_seed_corpus.zip
cd $(dirname $0)/../../autotest/gdrivers/data/jpeg2000
zip -r $OUT/get_jpeg2000_structure_fuzzer_seed_corpus.zip ./*.jp2 ./*.j2k >/dev/null
cd $OLDPWD
echo "Building gdal_fuzzer_seed_corpus.zip"
cd $(dirname $0)/../../autotest/gcore/data
rm -f $OUT/gdal_fuzzer_seed_corpus.zip
find . -type f -exec zip -j $OUT/gdal_fuzzer_seed_corpus.zip {} \; >/dev/null
cd $OLDPWD
cd $(dirname $0)/../../autotest/gdrivers/data
find . -type f -exec zip -j $OUT/gdal_fuzzer_seed_corpus.zip {} \; >/dev/null
cd $OLDPWD
echo "Building gdal_filesystem_fuzzer_seed_corpus.zip"
cp $OUT/gdal_fuzzer_seed_corpus.zip $OUT/gdal_filesystem_fuzzer_seed_corpus.zip
echo "Building gdal_sdts_fuzzer_seed_corpus.zip"
rm -f $OUT/gdal_sdts_fuzzer_seed_corpus.zip
CUR_DIR=$PWD
cd $(dirname $0)/../../autotest/gdrivers/data/STDS_1107834_truncated
printf "FUZZER_FRIENDLY_ARCHIVE\\n" > $CUR_DIR/gdal_sdts.tar
for file in *.DDF; do
printf "***NEWFILE***:%s\\n" "$file" >> $CUR_DIR/gdal_sdts.tar
cat $file >> $CUR_DIR/gdal_sdts.tar
done
cd $CUR_DIR
zip -r $OUT/gdal_sdts_fuzzer_seed_corpus.zip gdal_sdts.tar >/dev/null
rm gdal_sdts.tar
echo "Building ers_fuzzer_seed_corpus.zip"
rm -f $OUT/ers_fuzzer_seed_corpus.zip
CUR_DIR=$PWD
cd $(dirname $0)/../../autotest/gdrivers/data/ers
{
printf "FUZZER_FRIENDLY_ARCHIVE\\n"
printf "***NEWFILE***:%s\\n" "test.ers"
cat srtm.ers
printf "***NEWFILE***:%s\\n" "test"
cat srtm
} > $CUR_DIR/srtm.tar
{
printf "FUZZER_FRIENDLY_ARCHIVE\\n"
printf "***NEWFILE***:%s\\n" "test.ers"
cat ers_dem.ers
printf "***NEWFILE***:%s\\n" "test"
cat ers_dem
} > $CUR_DIR/ers_dem.tar
cd $CUR_DIR
zip -r $OUT/ers_fuzzer_seed_corpus.zip srtm.tar ers_dem.tar >/dev/null
rm srtm.tar ers_dem.tar
echo "Building ogr_sdts_fuzzer_seed_corpus.zip"
rm -f $OUT/ogr_sdts_fuzzer_seed_corpus.zip
CUR_DIR=$PWD
cd $(dirname $0)/../../autotest/ogr/data/sdts/D3607551_rd0s_1_sdts_truncated
printf "FUZZER_FRIENDLY_ARCHIVE\\n" > $CUR_DIR/ogr_sdts.tar
for file in *.DDF; do
printf "***NEWFILE***:%s\\n" "$file" >> $CUR_DIR/ogr_sdts.tar
cat $file >> $CUR_DIR/ogr_sdts.tar
done
cd $CUR_DIR
zip -r $OUT/ogr_sdts_fuzzer_seed_corpus.zip ogr_sdts.tar >/dev/null
rm ogr_sdts.tar
echo "Building ogr_fuzzer_seed_corpus.zip"
CUR_DIR=$PWD
cd $(dirname $0)/../../autotest/ogr/data
rm -f $OUT/ogr_fuzzer_seed_corpus.zip
find . -type f -exec zip -j $OUT/ogr_fuzzer_seed_corpus.zip {} \; >/dev/null
cd $CUR_DIR
echo "Building cad_fuzzer_seed_corpus.zip"
cd $(dirname $0)/../../autotest/ogr/data/cad
rm -f $OUT/cad_fuzzer_seed_corpus.zip
zip -r $OUT/cad_fuzzer_seed_corpus.zip . >/dev/null
cd $OLDPWD
echo "Building csv_fuzzer_seed_corpus.zip"
cd $(dirname $0)/../../autotest/ogr/data/csv
rm -f $OUT/csv_fuzzer_seed_corpus.zip
zip -r $OUT/csv_fuzzer_seed_corpus.zip ./*.csv >/dev/null
cd $OLDPWD
echo "Building bna_fuzzer_seed_corpus.zip"
cd $(dirname $0)/../../autotest/ogr/data/bna
rm -f $OUT/bna_fuzzer_seed_corpus.zip
zip -r $OUT/bna_fuzzer_seed_corpus.zip ./*.bna >/dev/null
cd $OLDPWD
echo "Building xlsx_fuzzer_seed_corpus.zip"
rm -f $OUT/xlsx_fuzzer_seed_corpus.zip
CUR_DIR=$PWD
cd $(dirname $0)/../../autotest/ogr/data/xlsx
for filename in *.xlsx; do
mkdir tmpxlsx
(cd tmpxlsx
unzip ../$filename >/dev/null
printf "FUZZER_FRIENDLY_ARCHIVE\\n" > $CUR_DIR/xlsx_$filename.tar
find . -type f | while read -r i ; do
printf "***NEWFILE***:%s\\n" "$i" >> $CUR_DIR/xlsx_$filename.tar
cat $i >> $CUR_DIR/xlsx_$filename.tar
done
)
rm -rf tmpxlsx
done
cd $CUR_DIR
zip -r $OUT/xlsx_fuzzer_seed_corpus.zip xlsx_*.tar >/dev/null
rm xlsx_*.tar
echo "Building ods_fuzzer_seed_corpus.zip"
rm -f $OUT/ods_fuzzer_seed_corpus.zip
CUR_DIR=$PWD
cd $(dirname $0)/../../autotest/ogr/data/ods
for filename in *.ods; do
mkdir tmpods
unzip -d tmpods $filename >/dev/null
printf "FUZZER_FRIENDLY_ARCHIVE\\n" > $CUR_DIR/ods_$filename.tar
find . -type f | while read -r i ; do
printf "***NEWFILE***:%s\\n" "$i" >> $CUR_DIR/ods_$filename.tar
cat $i >> $CUR_DIR/ods_$filename.tar
done
rm -rf tmpods
done
cd $CUR_DIR
zip -r $OUT/ods_fuzzer_seed_corpus.zip ods_*.tar >/dev/null
rm ods_*.tar
echo "Building rec_fuzzer_seed_corpus.zip"
cd $(dirname $0)/../../autotest/ogr/data/rec
rm -f $OUT/rec_fuzzer_seed_corpus.zip
zip -r $OUT/rec_fuzzer_seed_corpus.zip ./*.rec >/dev/null
cd $OLDPWD
echo "Building shape_fuzzer_seed_corpus.zip"
{
printf "FUZZER_FRIENDLY_ARCHIVE\\n" > poly_shp.tar
printf "***NEWFILE***:my.shp\\n" >> poly_shp.tar
cat $(dirname $0)/../../autotest/ogr/data/poly.shp
printf "***NEWFILE***:my.shx\\n"
cat $(dirname $0)/../../autotest/ogr/data/poly.shx
printf "***NEWFILE***:my.dbf\\n"
cat $(dirname $0)/../../autotest/ogr/data/poly.dbf
printf "***NEWFILE***:my.prj\\n"
cat $(dirname $0)/../../autotest/ogr/data/poly.PRJ
} > poly_shp.tar
rm -f $OUT/shape_fuzzer_seed_corpus.zip
zip -r $OUT/shape_fuzzer_seed_corpus.zip poly_shp.tar >/dev/null
rm poly_shp.tar
echo "Building mitab_tab_fuzzer_seed_corpus.zip"
printf "FUZZER_FRIENDLY_ARCHIVE\\n" > all_geoms_tab.tar
for ext in tab map dat id; do
printf "***NEWFILE***:my.%s\\n" "$ext" >> all_geoms_tab.tar
cat $(dirname $0)/../../autotest/ogr/data/mitab/all_geoms.$ext >> all_geoms_tab.tar
done
printf "FUZZER_FRIENDLY_ARCHIVE\\n" > poly_indexed.tar
for ext in tab map dat id; do
printf "***NEWFILE***:my.%s\\n" "$ext" >> poly_indexed.tar
cat $(dirname $0)/../../autotest/ogr/data/mitab/poly_indexed.$ext >> poly_indexed.tar
done
printf "FUZZER_FRIENDLY_ARCHIVE\\n" > view.tar
printf "***NEWFILE***:my.tab\\n" >> view.tar
cat $(dirname $0)/../../autotest/ogr/data/mitab/view_first_table_second_table.tab >> view.tar
for ext in tab map dat id ind; do
printf "***NEWFILE***:first_table.%s\\n" "$ext" >> view.tar
cat $(dirname $0)/../../autotest/ogr/data/mitab/first_table.$ext >> view.tar
done
for ext in tab map dat id ind; do
printf "***NEWFILE***:second_table.%s\\n" "$ext" >> view.tar
cat $(dirname $0)/../../autotest/ogr/data/mitab/second_table.$ext >> view.tar
done
rm -f $OUT/mitab_tab_fuzzer_seed_corpus.zip
zip -r $OUT/mitab_tab_fuzzer_seed_corpus.zip all_geoms_tab.tar poly_indexed.tar view.tar >/dev/null
rm all_geoms_tab.tar poly_indexed.tar view.tar
echo "Building mitab_mif_fuzzer_seed_corpus.zip"
{
printf "FUZZER_FRIENDLY_ARCHIVE\\n"
printf "***NEWFILE***:my.mif\\n"
cat $(dirname $0)/../../autotest/ogr/data/mitab/small.mif
printf "***NEWFILE***:my.mid\\n" >> small_mif.tar
cat $(dirname $0)/../../autotest/ogr/data/mitab/small.mid
} > small_mif.tar
rm -f $OUT/mitab_mif_fuzzer_seed_corpus.zip
zip -r $OUT/mitab_mif_fuzzer_seed_corpus.zip small_mif.tar >/dev/null
rm small_mif.tar
echo "Building openfilegdb_fuzzer_seed_corpus.zip"
rm -rf testopenfilegdb.gdb
unzip $(dirname $0)/../../autotest/ogr/data/filegdb/testopenfilegdb.gdb.zip >/dev/null
printf "FUZZER_FRIENDLY_ARCHIVE\\n" > testopenfilegdb.gdb.tar
for f in testopenfilegdb.gdb/*; do
printf "***NEWFILE***:%s\\n" "$f" >> testopenfilegdb.gdb.tar
cat $f >> testopenfilegdb.gdb.tar
done
rm -rf testopenfilegdb92.gdb
unzip $(dirname $0)/../../autotest/ogr/data/filegdb/testopenfilegdb92.gdb.zip >/dev/null
printf "FUZZER_FRIENDLY_ARCHIVE\\n" > testopenfilegdb92.gdb.tar
for f in testopenfilegdb92.gdb/*; do
printf "***NEWFILE***:%s\\n" "$f" >> testopenfilegdb92.gdb.tar
cat $f >> testopenfilegdb92.gdb.tar
done
rm -f $OUT/openfilegdb_fuzzer_seed_corpus.zip
zip -r $OUT/openfilegdb_fuzzer_seed_corpus.zip testopenfilegdb.gdb.tar testopenfilegdb92.gdb.tar >/dev/null
rm -r testopenfilegdb.gdb
rm testopenfilegdb.gdb.tar
rm -r testopenfilegdb92.gdb
rm testopenfilegdb92.gdb.tar
echo "Building avcbin_fuzzer_seed_corpus.zip"
rm -f $OUT/avcbin_fuzzer_seed_corpus.zip
CUR_DIR=$PWD
cd $(dirname $0)/../../autotest/ogr/data/avc/testavc
printf "FUZZER_FRIENDLY_ARCHIVE\\n" > $CUR_DIR/avcbin.tar
find . -type f | while read -r f ; do
printf "***NEWFILE***:%s\\n" "$f" >> $CUR_DIR/avcbin.tar
cat $f >> $CUR_DIR/avcbin.tar
done
cd $CUR_DIR
zip -r $OUT/avcbin_fuzzer_seed_corpus.zip avcbin.tar >/dev/null
rm avcbin.tar
echo "Building avce00_fuzzer_seed_corpus.zip"
cd $(dirname $0)/../../autotest/ogr/data/avc
rm -f $OUT/avce00_fuzzer_seed_corpus.zip
zip -r $OUT/avce00_fuzzer_seed_corpus.zip ./*.e00 >/dev/null
cd $OLDPWD
echo "Building gml_fuzzer_seed_corpus.zip"
rm -f $OUT/gml_fuzzer_seed_corpus.zip
{
printf "FUZZER_FRIENDLY_ARCHIVE\\n"
printf "***NEWFILE***:test.gml\\n"
cat $(dirname $0)/../../autotest/ogr/data/gml/archsites.gml
printf "***NEWFILE***:test.xsd\\n"
cat $(dirname $0)/../../autotest/ogr/data/gml/archsites.xsd
} > $CUR_DIR/archsites_gml.tar
zip -r $OUT/gml_fuzzer_seed_corpus.zip archsites_gml.tar >/dev/null
rm archsites_gml.tar
echo "Building fgb_fuzzer_seed_corpus.zip"
cd $(dirname $0)/../../autotest/ogr/data/testfgb
rm -f $OUT/fgb_fuzzer_seed_corpus.zip
zip -r $OUT/fgb_fuzzer_seed_corpus.zip ./*.fgb >/dev/null
cd $OLDPWD
echo "Building lvbag_fuzzer_seed_corpus.zip"
cd $(dirname $0)/../../autotest/ogr/data/lvbag
rm -f $OUT/lvbag_fuzzer_seed_corpus.zip
zip -r $OUT/lvbag_fuzzer_seed_corpus.zip ./*.xml >/dev/null
cd $OLDPWD
echo "Copying data to $OUT"
cp $(dirname $0)/../data/* $OUT
|
<filename>client/shared/src/search/query/diagnostics.ts
import * as Monaco from 'monaco-editor'
import { SearchPatternType } from '../../graphql-operations'
import { toMonacoRange } from './decoratedToken'
import { validateFilter } from './filters'
import { Token } from './token'
/**
* Returns the diagnostics for a scanned search query to be displayed in the Monaco query input.
*/
export function getDiagnostics(tokens: Token[], patternType: SearchPatternType): Monaco.editor.IMarkerData[] {
const diagnostics: Monaco.editor.IMarkerData[] = []
for (const token of tokens) {
if (token.type === 'filter') {
const { field, value } = token
const validationResult = validateFilter(field.value, value)
if (validationResult.valid) {
continue
}
diagnostics.push({
severity: Monaco.MarkerSeverity.Error,
message: validationResult.reason,
...toMonacoRange(field.range),
})
}
}
return diagnostics
}
|
<gh_stars>0
/*
* Copyright The Stargate Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.stargate.auth.api.impl;
import io.dropwizard.Application;
import io.dropwizard.cli.Cli;
import io.dropwizard.configuration.ResourceConfigurationSourceProvider;
import io.dropwizard.setup.Bootstrap;
import io.dropwizard.setup.Environment;
import io.dropwizard.util.JarLocation;
import io.stargate.auth.AuthenticationService;
import io.stargate.auth.api.AuthApiActivator;
import io.stargate.auth.api.resources.AuthResource;
import io.stargate.auth.api.swagger.SwaggerUIResource;
import io.stargate.core.metrics.api.HttpMetricsTagProvider;
import io.stargate.core.metrics.api.Metrics;
import io.stargate.metrics.jersey.MetricsBinder;
import io.swagger.config.ScannerFactory;
import io.swagger.jaxrs.config.BeanConfig;
import io.swagger.jaxrs.config.DefaultJaxrsScanner;
import io.swagger.jaxrs.listing.ApiListingResource;
import io.swagger.jaxrs.listing.SwaggerSerializers;
import java.util.EnumSet;
import javax.servlet.DispatcherType;
import javax.servlet.FilterRegistration;
import org.eclipse.jetty.servlets.CrossOriginFilter;
import org.glassfish.hk2.utilities.binding.AbstractBinder;
import org.glassfish.jersey.server.ServerProperties;
import org.osgi.framework.Bundle;
import org.osgi.framework.FrameworkUtil;
public class AuthApiServer extends Application<AuthApiServerConfiguration> {
AuthenticationService authenticationService;
private final Metrics metrics;
private final HttpMetricsTagProvider httpMetricsTagProvider;
public AuthApiServer(
AuthenticationService authenticationService,
Metrics metrics,
HttpMetricsTagProvider httpMetricsTagProvider) {
this.authenticationService = authenticationService;
this.metrics = metrics;
this.httpMetricsTagProvider = httpMetricsTagProvider;
BeanConfig beanConfig = new BeanConfig();
beanConfig.setSchemes(new String[] {"http"});
beanConfig.setBasePath("/");
ScannerFactory.setScanner(new DefaultJaxrsScanner());
}
/**
* The only reason we override this is to remove the call to {@code bootstrap.registerMetrics()}.
*
* <p>JVM metrics are registered once at the top level in the health-checker module.
*/
@Override
public void run(String... arguments) {
final Bootstrap<AuthApiServerConfiguration> bootstrap = new Bootstrap<>(this);
addDefaultCommands(bootstrap);
initialize(bootstrap);
final Cli cli = new Cli(new JarLocation(getClass()), bootstrap, System.out, System.err);
// only exit if there's an error running the command
cli.run(arguments).ifPresent(this::onFatalError);
}
@Override
public void run(
final AuthApiServerConfiguration authApiServerConfiguration, final Environment environment) {
environment
.jersey()
.register(
new AbstractBinder() {
@Override
protected void configure() {
bind(authenticationService).to(AuthenticationService.class);
}
});
environment.jersey().register(AuthResource.class);
environment.jersey().register(ApiListingResource.class);
environment.jersey().register(SwaggerSerializers.class);
environment.jersey().register(SwaggerUIResource.class);
environment
.jersey()
.register(
new AbstractBinder() {
@Override
protected void configure() {
bind(FrameworkUtil.getBundle(AuthApiActivator.class)).to(Bundle.class);
}
});
enableCors(environment);
MetricsBinder metricsBinder =
new MetricsBinder(metrics, httpMetricsTagProvider, AuthApiActivator.MODULE_NAME);
metricsBinder.register(environment.jersey());
// no html content
environment.jersey().property(ServerProperties.RESPONSE_SET_STATUS_OVER_SEND_ERROR, true);
}
@Override
public void initialize(final Bootstrap<AuthApiServerConfiguration> bootstrap) {
super.initialize(bootstrap);
bootstrap.setConfigurationSourceProvider(new ResourceConfigurationSourceProvider());
bootstrap.setMetricRegistry(metrics.getRegistry(AuthApiActivator.MODULE_NAME));
}
private void enableCors(Environment environment) {
FilterRegistration.Dynamic filter =
environment.servlets().addFilter("cors", CrossOriginFilter.class);
filter.setInitParameter(
CrossOriginFilter.ALLOWED_METHODS_PARAM, "POST,GET,OPTIONS,PUT,DELETE,PATCH");
filter.setInitParameter(CrossOriginFilter.ALLOWED_ORIGINS_PARAM, "*");
filter.setInitParameter(CrossOriginFilter.ACCESS_CONTROL_ALLOW_ORIGIN_HEADER, "*");
filter.setInitParameter(CrossOriginFilter.ALLOWED_HEADERS_PARAM, "*");
filter.setInitParameter(CrossOriginFilter.ALLOW_CREDENTIALS_PARAM, "true");
filter.setInitParameter(CrossOriginFilter.EXPOSED_HEADERS_PARAM, "Date");
filter.addMappingForUrlPatterns(EnumSet.allOf(DispatcherType.class), true, "/*");
}
@Override
protected void bootstrapLogging() {
// disable dropwizard logging, it will use external logback
}
}
|
#!/bin/bash
# Step 1: Activate the virtual environment
source ./bin/activate
# Step 2: Install Python 2 dependencies
pip2 install -r requirements.txt
# Step 3: Build the content using Pelican
./bin/pelican content -s pelicanconf.py || exit
# Step 4: Deploy the output to production server if REALLY_DEPLOY is set to "1"
if [ "$REALLY_DEPLOY" = "1" ]; then
rsync -e "ssh -i $HOME/.ssh/twir_deploy_key" -razvP --delete-after output/ <EMAIL>:production
fi |
from django.urls import path, include
from cradmin_legacy.apps.cradmin_authenticate.views import logout
from devilry.devilry_authenticate.views import CustomLoginView, allauth_views
urlpatterns = [
path('login', CustomLoginView.as_view(), name='cradmin-authenticate-login'),
path('logout', logout.cradmin_logoutview, name='cradmin-authenticate-logout'),
path('allauth/login/',
allauth_views.AllauthLoginView.as_view(),
name='account_login'),
path('allauth/logout/',
allauth_views.AllauthLogoutView.as_view(),
name='account_logout'),
path('allauth/', include('allauth.urls')),
]
# from django.conf.urls import url, include
# from cradmin_legacy.apps.cradmin_authenticate.views import logout
# from devilry.devilry_authenticate.views import CustomLoginView, allauth_views
# urlpatterns = [
# url(r'^login$', CustomLoginView.as_view(), name='cradmin-authenticate-login'),
# url(r'^logout$', logout.cradmin_logoutview, name='cradmin-authenticate-logout'),
# url(r"^allauth/login/$",
# allauth_views.AllauthLoginView.as_view(),
# name="account_login"),
# url(r"^allauth/logout/$",
# allauth_views.AllauthLogoutView.as_view(),
# name="account_logout"),
# url(r'^allauth/', include('allauth.urls')),
# ]
|
function BsModal(id) {
var instance = document.getElementById(id);
var template = '<div class="modal fade" id="'+id+'" tabindex="-1" role="dialog"> \
<div class="modal-dialog" role="document"> \
<div class="modal-content"> \
<div class="modal-header"> \
<h5 class="modal-title">Modal title</h5> \
<button type="button" class="close" data-dismiss="modal" aria-label="Close"> \
<span aria-hidden="true">×</span> \
</button> \
</div> \
<div class="modal-body"> \
</div> \
<div class="modal-footer"> \
<button type="button" class="btn btn-secondary" data-dismiss="modal">Close</button> \
</div> \
</div> \
</div> \
</div>';
var $element;
if (!instance) {
$(document.body).append(template);
$element = $(template);
} else {
$element = $(instance);
}
this.show = function(title, body) {
$element.find('.modal-body').html(body);
$element.find('.modal-title').html(title);
$element.modal('show');
};
this.hide = function() {
$element.modal('hide');
};
}
|
<filename>core/src/main/java/org/lint/azzert/command/ExemptDisabledMethodsCommand.java
package org.lint.azzert.command;
import org.lint.azzert.LintCommand;
import org.lint.azzert.context.Context;
public class ExemptDisabledMethodsCommand implements LintCommand<Void> {
@Override
//remove all disabled tests
public Void execute(final Context context){
context.getMethods().removeIf(method ->
method.getTestFramework().isDisabledMethod(method)
);
return null;
}
}
|
export m_room_alias=""
export m_username=""
export m_password="" |
# Python 3 program to check if an element
# exists in the given array
# function to check if an element exists in the array
def search(arr, item):
for element in arr:
if element == item:
return True
return False
# Driver code
arr = [1, 2, 3, 4, 5]
# Function Call
item = 5
if search(arr,item):
print("Element is present")
else:
print("Element is not present") |
<reponame>godfat/rest-gw2
# frozen_string_literal: true
module RestGW2
autoload :Client, 'rest-gw2/client'
autoload :Server, 'rest-gw2/server'
end
|
<reponame>larshelge/httpcomponents-core
/*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.hc.core5.util;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
/**
* Unit tests for {@link Args}.
*/
public class TestArgs {
@Test
public void testArgCheckPass() {
Args.check(true, "All is well");
}
@Test(expected=IllegalArgumentException.class)
public void testArgCheckFail() {
Args.check(false, "Oopsie");
}
@Test
public void testArgNotNullPass() {
final String stuff = "stuff";
Assert.assertSame(stuff, Args.notNull(stuff, "Stuff"));
}
@Test(expected=NullPointerException.class)
public void testArgNotNullFail() {
Args.notNull(null, "Stuff");
}
@Test
public void testArgNotEmptyPass() {
final String stuff = "stuff";
Assert.assertSame(stuff, Args.notEmpty(stuff, "Stuff"));
}
@Test(expected=NullPointerException.class)
public void testArgNotEmptyFail1() {
Args.notEmpty((String) null, "Stuff");
}
@Test(expected=IllegalArgumentException.class)
public void testArgNotEmptyFail2() {
Args.notEmpty("", "Stuff");
}
@Test(expected=NullPointerException.class)
public void testArgNotBlankFail1() {
Args.notBlank((String) null, "Stuff");
}
@Test(expected=IllegalArgumentException.class)
public void testArgNotBlankFail2() {
Args.notBlank("", "Stuff");
}
@Test(expected=IllegalArgumentException.class)
public void testArgNotBlankFail3() {
Args.notBlank(" \t \n\r", "Stuff");
}
@Test
public void testArgCollectionNotEmptyPass() {
final List<String> list = Arrays.asList("stuff");
Assert.assertSame(list, Args.notEmpty(list, "List"));
}
@Test(expected=NullPointerException.class)
public void testArgCollectionNotEmptyFail1() {
Args.notEmpty((List<?>) null, "List");
}
@Test(expected=IllegalArgumentException.class)
public void testArgCollectionNotEmptyFail2() {
Args.notEmpty(Collections.emptyList(), "List");
}
@Test
public void testPositiveIntPass() {
Assert.assertEquals(1, Args.positive(1, "Number"));
}
@Test(expected=IllegalArgumentException.class)
public void testPositiveIntFail1() {
Args.positive(-1, "Number");
}
@Test(expected=IllegalArgumentException.class)
public void testPositiveIntFail2() {
Args.positive(0, "Number");
}
@Test
public void testPositiveLongPass() {
Assert.assertEquals(1L, Args.positive(1L, "Number"));
}
@Test(expected=IllegalArgumentException.class)
public void testPositiveLongFail1() {
Args.positive(-1L, "Number");
}
@Test(expected=IllegalArgumentException.class)
public void testPositiveLongFail2() {
Args.positive(0L, "Number");
}
@Test
public void testNotNegativeIntPass1() {
Assert.assertEquals(1, Args.notNegative(1, "Number"));
}
@Test
public void testNotNegativeIntPass2() {
Assert.assertEquals(0, Args.notNegative(0, "Number"));
}
@Test(expected=IllegalArgumentException.class)
public void testNotNegativeIntFail1() {
Args.notNegative(-1, "Number");
}
@Test
public void testNotNegativeLongPass1() {
Assert.assertEquals(1L, Args.notNegative(1L, "Number"));
}
@Test
public void testNotNegativeLongPass2() {
Assert.assertEquals(0L, Args.notNegative(0L, "Number"));
}
@Test(expected=IllegalArgumentException.class)
public void testNotNegativeLongFail1() {
Args.notNegative(-1L, "Number");
}
//
public void testIntSmallestRangeOK() {
Args.checkRange(0, 0, 0, "Number");
}
@Test(expected = IllegalArgumentException.class)
public void testIntSmallestRangeFailLow() {
Args.checkRange(-1, 0, 0, "Number");
}
@Test(expected = IllegalArgumentException.class)
public void testIntRangeFailLow() {
Args.checkRange(-101, -100, 100, "Number");
}
@Test(expected = IllegalArgumentException.class)
public void testIntRangeFailHigh() {
Args.checkRange(101, -100, 100, "Number");
}
@Test(expected = IllegalArgumentException.class)
public void testIntSmallestRangeFailHigh() {
Args.checkRange(1, 0, 0, "Number");
}
public void testIntFullRangeOK() {
Args.checkRange(0, Integer.MIN_VALUE, Integer.MAX_VALUE, "Number");
}
//
public void testLongSmallestRangeOK() {
Args.checkRange(0L, 0L, 0L, "Number");
}
@Test(expected = IllegalArgumentException.class)
public void testLongSmallestRangeFailLow() {
Args.checkRange(-1L, 0L, 0L, "Number");
}
@Test(expected = IllegalArgumentException.class)
public void testLongRangeFailLow() {
Args.checkRange(-101L, -100L, 100L, "Number");
}
@Test(expected = IllegalArgumentException.class)
public void testLongRangeFailHigh() {
Args.checkRange(101L, -100L, 100L, "Number");
}
@Test(expected = IllegalArgumentException.class)
public void testLongSmallestRangeFailHigh() {
Args.checkRange(1L, 0L, 0L, "Number");
}
public void testLongFullRangeOK() {
Args.checkRange(0L, Long.MIN_VALUE, Long.MAX_VALUE, "Number");
}
}
|
<reponame>seekcx/egg-yup
'use strict';
const Controller = require('egg').Controller;
class UserController extends Controller {
async register() {
const { ctx, app: { yup } } = this;
await ctx.validate({
nickname: yup.string()
.required('nickname is required')
.matches(/^[a-z0-9]{4,12}$/, 'nickname invalid')
.test('uniqueNickname', 'nickname already taken', value => {
return new Promise(resolve => {
setTimeout(() => {
resolve(value === 'test');
}, 0);
});
}),
password: yup.string()
.required('password is required')
.matches(/^[a-z]{6,16}$/, 'password invalid'),
age: yup.number().min(18).max(80),
});
ctx.body = 'user created';
}
async registerSync() {
const { ctx, app: { yup } } = this;
await ctx.validateSync({
nickname: yup.string()
.required('nickname is required')
.matches(/^[a-z0-9]{4,12}$/, 'nickname invalid'),
password: yup.string()
.required('password is required')
.matches(/^[a-z]{6,16}$/, 'password invalid'),
age: yup.number().min(18).max(80),
});
ctx.body = 'user created';
}
}
module.exports = UserController;
|
#!/bin/bash
TMP_DIR="/tmp/Miguel-Dorta_gkup_install"
INSTALL_DIR="/opt/Miguel-Dorta/gkup"
ARCH="$(uname -m)"
OS="$(uname -s | tr '[:upper:]' '[:lower:]')"
case $ARCH in
x86_64)
ARCH="amd64"
;;
i?86)
ARCH="386"
;;
esac
# Download binaries
mkdir $TMP_DIR || exit 1
wget -qO- "https://github.com/Miguel-Dorta/gkup-cli/releases/latest/download/gkup_${OS}_${ARCH}.gz" | gzip -d > $TMP_DIR/gkup-cli || exit 1
wget -qO- "https://github.com/Miguel-Dorta/gkup-core/releases/latest/download/gkup-core_${OS}_${ARCH}.gz" | gzip -d > $TMP_DIR/gkup-core || exit 1
# Put binaries
sudo mkdir -p $INSTALL_DIR || exit 1
sudo mv $TMP_DIR/gkup-core $TMP_DIR/gkup-cli $INSTALL_DIR || exit 1
sudo ln -s $INSTALL_DIR/gkup-cli $INSTALL_DIR/gkup
sudo chown -R root:root $INSTALL_DIR || exit 1
sudo chmod -R 0755 $INSTALL_DIR || exit 1
# Clean up tmp dir
rm -Rf $TMP_DIR
# Set env vars and path
gkupPath="$INSTALL_DIR/gkup-core"
sudo bash -c "echo -e 'export GKUP_PATH=$INSTALL_DIR/gkup-core\nexport PATH=\$PATH:$INSTALL_DIR' > /etc/profile.d/gkup.sh"
sudo chmod 0755 $INSTALL_DIR/gkup-core
|
<gh_stars>0
package main
import (
"bufio"
"flag"
"io"
"os"
"github.com/skatsuta/gdisasm/disasm"
"github.com/skatsuta/gdisasm/log"
)
const bsize = 2
// logger is a logging object.
var logger log.Logger
func init() {
logger = log.NewLogger()
}
func main() {
flag.Parse()
file := flag.Args()[0]
fp, err := os.Open(file)
if err != nil {
logger.Err("os.Open(%v) failed: %v", file, err)
}
r := bufio.NewReader(fp)
w := bufio.NewWriter(os.Stdout)
d := disasm.New(r, w)
for {
s, err := d.Parse()
if err == io.EOF {
break
}
if s == "" {
continue
}
if _, e := w.WriteString(s + "\n"); e != nil {
logger.Err("Writer#WriteByte(%v) failed: %v", s, e)
return
}
// write out per line
if e := w.Flush(); e != nil {
logger.Err("Writer#Flush() failed: %v", e)
return
}
}
if e := w.Flush(); e != nil {
logger.Err("Writer#Flush() failed: %v", e)
}
}
|
#include <iostream>
void printInReverse(int my_array[], int n){
if (n < 0){
return;
}
printInReverse(my_array, n-1);
std::cout << my_array[n] << " ";
}
int main(){
int my_array[] = {1, 5, 7, 10, 15};
int n = sizeof(my_array) / sizeof(my_array[0]);
printInReverse(my_array, n-1);
std::cout << std::endl;
return 0;
} |
import { ObjectId } from 'mongodb';
import { config } from 'dotenv';
import { PhotosRepository } from '../../../src/api/photos/repository';
import { MongoDB } from '../../../src/database/MongoDB';
import { photos } from '../../../src/database/mongo/fixtures/photos';
import { PhotoStatus } from '../../../src/models/Photo';
import { getRandomString } from '../../utils';
config();
if (!process.env.DATABASE_URI) {
throw new Error('Missing DATABASE_URI env variable');
}
const database = new MongoDB(process.env.DATABASE_URI);
let repository: PhotosRepository;
const photoToBeDeleted = { ...photos[0] };
const existingPhoto = { ...photos[1] };
beforeAll((ready) => {
database.connect().then(() => {
repository = new PhotosRepository(database.getCollections().photos);
ready();
}).catch((err) => {
ready(err);
});
});
afterAll((finish) => {
database.disconnect().then(() => {
finish();
}).catch((err) => {
finish(err);
});
});
describe('Photos repository methods', () => {
it('getById()', async () => {
const alreadyExistentPhoto = existingPhoto;
const expected = {
...alreadyExistentPhoto,
id: alreadyExistentPhoto._id.toString(),
deviceId: alreadyExistentPhoto.deviceId.toString(),
userId: alreadyExistentPhoto.userId.toString()
};
const photo = await repository.getById(alreadyExistentPhoto._id.toString());
expect(photo).not.toBeNull();
const received = { ...photo, _id: alreadyExistentPhoto._id };
expect(received).toStrictEqual(expected);
});
it('get()', async () => {
const alreadyExistentPhoto = existingPhoto;
const expected = {
...alreadyExistentPhoto,
id: alreadyExistentPhoto._id.toString(),
deviceId: alreadyExistentPhoto.deviceId.toString(),
userId: alreadyExistentPhoto.userId.toString()
};
const [photo] = await repository.get({ name: alreadyExistentPhoto.name });
expect(photo).not.toBeNull();
const received = { ...photo, _id: alreadyExistentPhoto._id };
expect(received).toStrictEqual(expected);
});
it('create()', async () => {
const alreadyExistentPhoto = { ...photos[0] };
const received = await repository.create({
deviceId: alreadyExistentPhoto.deviceId.toString(),
fileId: alreadyExistentPhoto.fileId,
height: alreadyExistentPhoto.height,
name: getRandomString(10),
previewId: alreadyExistentPhoto.previewId,
size: alreadyExistentPhoto.size,
type: alreadyExistentPhoto.type,
userId: alreadyExistentPhoto.userId.toString(),
width: alreadyExistentPhoto.width,
hash: alreadyExistentPhoto.hash,
status: PhotoStatus.Exists,
takenAt: new Date(),
statusChangedAt: new Date()
});
expect(received).not.toBeNull();
expect(() => new ObjectId(received.id)).not.toThrow();
await repository.deleteById(received.id);
});
it('update()', () => {
expect(repository.update()).rejects.toEqual('Not implemented yet');
});
it('deleteById()', async () => {
const alreadyExistentPhoto = { ...photos[0] };
await repository.deleteById(alreadyExistentPhoto._id.toString('hex'));
const received = await repository.getById(alreadyExistentPhoto._id.toString());
expect(received).toBeNull();
});
it('delete()', async () => {
const alreadyExistentPhoto = photoToBeDeleted;
await repository.delete({ name: alreadyExistentPhoto.name });
const received = await repository.getById(alreadyExistentPhoto._id.toString());
expect(received).toBeNull();
});
});
|
'use strict';
//load dependencies
var gulp = require('gulp'),
git = require('gulp-git'),
bump = require('gulp-bump'),
del= require('del'),
semver = require('semver'),
less = require('gulp-less');
//get current app version
var version = require('./package.json').version;
//function for version lookup and tagging
function increment(release) {
//get new version number
var newVer = semver.inc(version, release);
// get all the files to bump version in
return gulp.src(['./package.json', './bower.json'])
// bump the version number in those files
.pipe(bump({ type: release }))
// save it back to filesystem
.pipe(gulp.dest('./'))
// commit the changed version number
.pipe(git.commit('Release v' + newVer))
// **tag it in the repository**
//.pipe(git.tag('v' + newVer));
.pipe(git.tag('v' + newVer, 'Version message', function (err) {
if (err) throw err;
}));
}
//tasks for version tags
gulp.task('patch', ['dist'], function () { return increment('patch'); });
gulp.task('feature', ['dist'], function () { return increment('minor'); });
gulp.task('release', ['dist'], function () { return increment('major'); });
gulp.task('push', function () {
console.info('Pushing...');
return git.push('upstream', 'master', { args: " --tags" }, function (err) {
if (err) {
console.error(err);
throw err;
} else {
console.info('done pushing to github!');
}
});
});
//less compilation
gulp.task('less-template', function () {
return gulp.src(['template/less/base.less'])
.pipe(less())
.pipe(gulp.dest('template/css'))
});
gulp.task('less-core', function () {
return gulp.src(['core/less/base.less'])
.pipe(less())
.pipe(gulp.dest('core/css'))
});
// Clean
gulp.task('clean', function (cb) {
del([
'css/**'
], cb);
});
// build dist
gulp.task('dist', ['less-template', 'less-core']);
// Default task
gulp.task('default', ['clean'], function () {
gulp.start('dist');
}); |
<filename>src/runtime/openfin/Window.js
/* global fin */
import windowmanager from './global';
import readySync from '../ready';
import { EventHandler, getUniqueWindowName, SyncCallback } from '../../utils/index';
import { BoundingBox, Position, Size, Vector } from '../../geometry/index';
const defaultConfig = {
defaultWidth: 600,
defaultHeight: 600,
frame: false,
resizable: true,
saveWindowState: false,
autoShow: true,
icon: location.href + 'favicon.ico'
};
const configMap = {
title: 'name',
left: 'defaultLeft',
top: 'defaultTop',
width: 'defaultWidth',
height: 'defaultHeight'
};
const acceptedEventHandlers = [
'ready',
'drag-start', 'drag-before', 'drag-stop',
'dock-before',
'move', 'move-before',
'resize-before',
'close',
'show', 'hide', 'restore', 'minimize', 'maximize',
'focus', 'blur'];
let currentWin;
function _setupDOM(config) {
let thisWindow = this;
// TODO: Rewrite to remove setTimeout for the following:
function setWindows() {
if (thisWindow._window.contentWindow.windowmanager) {
thisWindow._window.contentWindow.windowmanager._launcher = windowmanager._launcher;
thisWindow._window.contentWindow.windowmanager._windows = windowmanager._windows;
thisWindow._window.contentWindow.windowmanager._internalBus = windowmanager._internalBus;
} else {
setTimeout(setWindows, 5);
}
}
setWindows();
this._window.getBounds(function (bounds) {
bounds.right = bounds.left + bounds.width;
bounds.bottom = bounds.top + bounds.height;
thisWindow._bounds.set(new BoundingBox(bounds));
});
// Setup _window event listeners:
// TODO: look into moving these elsewhere, might not work if currentWin is closed, and thisWindow is not.
function onBoundsChange(event) {
event.right = event.left + event.width;
event.bottom = event.top + event.height;
thisWindow._bounds.set(new BoundingBox(event));
if (event.changeType !== 0) {
thisWindow.undock(); // Undock on resize. TODO: Allow resize with docking
}
if (event.changeType !== 1) {
thisWindow.emit('move'); // TODO: Pass what position it is at.
}
}
this._window.addEventListener('bounds-changing', onBoundsChange);
this._window.addEventListener('bounds-changed', onBoundsChange);
function onClose() {
// TODO: Is it possible that onClose might not be called when the window is closed?
// What if this event is set up on a window that has closed already, and then this window closes?
thisWindow._isClosed = true;
windowmanager._windows.delete(thisWindow._id);
// Undock:
thisWindow.undock();
// Move children to parent:
const parent = thisWindow.getParent();
for (const child of thisWindow.getChildren()) {
// We use getChildren to have a copy of the list, so child.setParent doesn't modify this loop's list!
// TODO: Optimize this loop, by not making a copy of children, and not executing splice in each setParent!
child.setParent(parent);
}
thisWindow.setParent(undefined); // Remove from parent
thisWindow.emit('close');
windowmanager._internalBus.emit('window-close', thisWindow);
thisWindow._window = undefined;
// TODO: Clean up ALL listeners
}
this._window.addEventListener('closed', onClose);
// Setup event listeners:
this._window.addEventListener('shown', () => {
thisWindow.emit('show');
});
this._window.addEventListener('hidden', () => {
thisWindow.emit('hide');
});
this._window.addEventListener('restored', () => {
thisWindow.emit('restore');
});
this._window.addEventListener('minimized', () => {
thisWindow.emit('minimize');
});
this._window.addEventListener('maximized', () => {
thisWindow.emit('maximize');
});
this._window.addEventListener('focused', () => {
thisWindow.emit('focus');
});
this._window.addEventListener('blurred', () => {
thisWindow.emit('blur');
});
// Setup title element:
this._titleEl = this._window.contentWindow.document.createElement('title');
this._titleEl.innerText = this._title;
this._window.contentWindow.document.head.appendChild(this._titleEl);
this._isFramed = config.frame;
this._ready = true;
this.emit('ready');
windowmanager._internalBus.emit('window-create', this);
};
class Window extends EventHandler {
constructor(config) {
// Call the parent constructor:
super(acceptedEventHandlers);
config = config || {}; // If no arguments are passed, assume we are creating a default blank window
const isArgConfig = (config.app_uuid === undefined);
// Setup private variables:
this._bounds = new BoundingBox();
this._ready = false;
// TODO: Identify current states.
this._isClosed = false;
this._isHidden = false;
this._isMinimized = false;
this._isMaximized = false;
this._dockedGroup = [this];
this._children = [];
this._parent = undefined;
this._title = undefined;
if (isArgConfig) {
for (const prop in config) {
if (config.hasOwnProperty(prop) && configMap[prop] !== undefined) {
config[configMap[prop]] = config[prop];
delete config[prop];
}
}
for (const prop in defaultConfig) {
if (defaultConfig.hasOwnProperty(prop)) {
config[prop] = (config[prop] != null ? config[prop] : defaultConfig[prop]);
}
}
this._id = getUniqueWindowName();
this._title = config.name == null ? this._id : config.name;
config.name = this._id; // Need name to be unique
if (config.parent) {
config.parent._children.push(this);
this._parent = config.parent;
// TODO: Emit event 'child-added' on parent
delete config.parent;
}
windowmanager._windows.set(this._id, this);
this._window = new fin.desktop.Window(config, _setupDOM.bind(this, config), function (err) {
console.error(err, config);
});
} else {
this._id = config._id || config.name;
this._title = this._id;
this._window = config;
windowmanager._windows.set(this._id, this);
this._window.getOptions(_setupDOM.bind(this), function (err) {
console.error(err);
});
}
// TODO: Ensure docking system
}
isReady() {
return this._ready;
}
onReady(callback) {
if (this.isClosed()) { throw new Error('onReady can\'t be called on a closed window'); }
if (this.isReady()) { return callback.call(this); }
this.once('ready', callback);
}
isClosed() {
return this._isClosed;
}
getPosition() {
return this._bounds.getPosition();
}
getWidth() {
return this._bounds.getWidth();
}
getHeight() {
return this._bounds.getHeight();
}
getSize() {
return this._bounds.getSize();
}
getBounds() {
return this._bounds.clone();
}
getParent() {
return this._parent;
}
setParent(parent) {
// TODO: Execute appropriate checks (if not closed, and is this new parent a window)
if (parent === this._parent) { return; }
if (this._parent) {
const index = this._parent._children.indexOf(this);
if (index >= 0) { this._parent._children.splice(index, 1); }
// TODO: Emit event 'child-removed' on current parent.
}
if (parent) {
this._parent = parent;
this._parent._children.push(this);
// TODO: Emit event 'child-added on parent'.
}
}
getChildren() {
return this._children.slice();
}
addChild(child) {
child.setParent(this);
}
getTitle() {
return this._title;
}
setTitle(newTitle) {
if (!newTitle) { throw new Error('setTitle requires one argument of type String'); }
this._titleEl.innerText = this._title = newTitle;
}
isHidden() {
return this._isHidden;
}
isShown() {
return !this._isHidden;
}
isMinimized() {
return this._isMinimized;
}
isMaximized() {
return this._isMaximized;
}
isRestored() {
return this.isShown() && !this.isMinimized() && !this.isMaximized();
}
close(callback) {
if (this.isClosed()) { return callback && callback(); }
this._window.close(callback);
}
minimize(callback) {
if (!this._ready) { throw new Error('minimize can\'t be called on an unready window'); }
callback = new SyncCallback(callback);
for (let window of this._dockedGroup) {
window._isMinimized = true;
window._window.minimize(callback.ref());
}
}
maximize(callback) {
if (!this._ready) { throw new Error('maximize can\'t be called on an unready window'); }
this._isMaximized = true;
this._window.maximize(callback);
}
show(callback) {
if (!this._ready) { throw new Error('show can\'t be called on an unready window'); }
callback = new SyncCallback(callback);
for (let window of this._dockedGroup) {
window._isHidden = false;
window._window.show(callback.ref());
}
}
hide(callback) {
if (!this._ready) { throw new Error('hide can\'t be called on an unready window'); }
callback = new SyncCallback(callback);
for (let window of this._dockedGroup) {
window._isHidden = true;
window._window.hide(callback.ref());
}
}
restore(callback) {
if (!this._ready) { throw new Error('restore can\'t be called on an unready window'); }
callback = new SyncCallback(callback);
for (let window of this._dockedGroup) {
window._isHidden = false;
window._isMinimized = false;
window._isMaximized = false;
window._window.restore(callback.ref());
}
}
resizable(resizable, callback) {
if (!this._ready) { throw new Error('restore can\'t be called on an unready window'); }
this._window.updateOptions({
resizable: resizable
}, callback);
}
bringToFront(callback) {
if (!this._ready) { throw new Error('bringToFront can\'t be called on an unready window'); }
let thisWindow = this;
let beforeCallback = new SyncCallback(function () {
thisWindow._window.bringToFront(callback);
});
for (let window of this._dockedGroup) {
if (window !== this) {
window._window.bringToFront(beforeCallback.ref());
}
}
}
focus(callback) {
if (!this._ready) { throw new Error('focus can\'t be called on an unready window'); }
let thisWindow = this;
let beforeCallback = new SyncCallback(function () {
thisWindow._window.focus(callback);
});
for (let window of this._dockedGroup) {
if (window !== this) {
window._window.focus(beforeCallback.ref());
}
}
}
resizeTo(width, height, callback) {
if (!this._ready) { throw new Error('resizeTo can\'t be called on an unready window'); }
if (!this.emit('resize-before')) { return; } // Allow preventing resize
let size = new Position(width, height);
this._window.resizeTo(size.left, size.top, 'top-left', callback);
}
moveTo(left, top, callback) {
if (!this._ready) { throw new Error('moveTo can\'t be called on an unready window'); }
if (!this.emit('move-before')) { return; } // Allow preventing move
let deltaPos = (new Position(left, top)).subtract(this.getPosition());
callback = new SyncCallback(callback);
for (let window of this._dockedGroup) {
let pos = window.getPosition().add(deltaPos);
window._bounds.moveTo(pos);
window._window.moveTo(pos.left, pos.top, callback.ref());
}
}
moveBy(deltaLeft, deltaTop, callback) {
if (!this._ready) { throw new Error('moveBy can\'t be called on an unready window'); }
if (!this.emit('move-before')) { return; } // Allow preventing move
let deltaPos = new Position(deltaLeft, deltaTop);
callback = new SyncCallback(callback);
for (let window of this._dockedGroup) {
let pos = window.getPosition().add(deltaPos);
window._bounds.moveTo(pos);
window._window.moveTo(pos.left, pos.top, callback.ref());
}
}
setSize(width, height, callback) {
if (!this._ready) { throw new Error('setSize can\'t be called on an unready window'); }
const size = new Size(width, height);
this._window.resizeTo(size.left, size.top, 'top-left', callback);
}
setBounds(left, top, right, bottom, callback) {
if (!this._ready) { throw new Error('resizeTo can\'t be called on an unready window'); }
let bounds = new BoundingBox(left, top, right, bottom);
this._window.setBounds(bounds.left, bounds.top, bounds.right, bounds.bottom, callback);
}
dock(other) {
if (!this.emit('dock-before')) { return; } // Allow preventing dock
if (other == null) { return; } // Failed to find other. TODO: Return error
if (this._isFramed || other._isFramed) return; // If window is framed, don't support dock system.
// If other is already in the group, return:
if (this._dockedGroup.indexOf(other) >= 0) { return; }
// Loop through all windows in otherGroup and add them to this's group:
for (let otherWin of other._dockedGroup) {
this._dockedGroup.push(otherWin);
// Sharing the array between window objects makes it easier to manage:
otherWin._dockedGroup = this._dockedGroup;
}
// TODO: Check if otherGroup is touching
}
undock(other) {
// Check to see if window is already undocked:
if (this._dockedGroup.length === 1) { return; }
// Undock this:
this._dockedGroup.splice(this._dockedGroup.indexOf(this), 1);
this._dockedGroup = [this];
// TODO: Redock those still touching, EXCEPT 'this'.
}
_dragStart() {
if (!this.emit('drag-start')) { return; } // Allow preventing drag
for (let window of this._dockedGroup) {
window._dragStartPos = window.getPosition();
}
}
_dragBy(deltaLeft, deltaTop) {
if (!this.emit('drag-before')) { return; } // Allow preventing drag
// Perform Snap:
const thisBounds = this.getBounds().moveTo(this._dragStartPos.left + deltaLeft,
this._dragStartPos.top + deltaTop);
let snapDelta = new Vector(NaN, NaN);
if (!this._isFramed) {
// If window is framed, don't support snap system.
for (const other of windowmanager._windows.values()) {
if (!other._isFramed && other._dockedGroup !== this._dockedGroup) {
snapDelta.setMin(thisBounds.getSnapDelta(other.getBounds()));
}
}
}
deltaLeft += snapDelta.left || 0;
deltaTop += snapDelta.top || 0;
for (let other of this._dockedGroup) {
let pos = other._dragStartPos;
// If other doesn't have a drag position, start it:
if (pos === undefined) {
pos = other._dragStartPos = other.getPosition();
pos.left -= deltaLeft;
pos.top -= deltaTop;
}
other._window.moveTo(pos.left + deltaLeft, pos.top + deltaTop);
}
}
_dragStop() {
// Dock to those it snapped to:
const thisBounds = this.getBounds();
if (!this._isFramed) {
// If window is framed, don't support dock system.
for (const other of windowmanager._windows.values()) {
if (!other._isFramed && thisBounds.isTouching(other.getBounds())) {
this.dock(other);
}
}
}
for (let window of this._dockedGroup) {
delete window._dragStartPos;
}
this.emit('drag-stop');
}
static getAll() {
return Array.from(windowmanager._windows.values());
}
static getByID(id) {
return windowmanager._windows.get(id);
}
static getMain(id) {
return windowmanager._windows.get(windowmanager._launcher.name);
}
static getCurrent() {
return Window.current;
}
}
function setupCurrentWindow() {
Window.current = windowmanager._windows.get(currentWin.name) || new Window(currentWin);
// Setup handlers on this window:
let wX = 0;
let wY = 0;
let dragging = false;
window.addEventListener('focus', function () {
Window.current.bringToFront();
});
window.addEventListener('mousedown', function onDragStart(event) {
if (event.target.classList && event.target.classList.contains('window-drag')) {
dragging = true;
wX = event.screenX;
wY = event.screenY;
Window.current._dragStart();
}
});
window.addEventListener('touchstart', function (event) {
if (event.target.classList && event.target.classList.contains('window-drag')) {
event.preventDefault();
dragging = true;
wX = event.touches[0].screenX;
wY = event.touches[0].screenY;
Window.current._dragStart();
}
});
window.addEventListener('mousemove', function (event) {
if (dragging) {
// Stop text selection:
window.getSelection().removeAllRanges();
// Drag:
Window.current._dragBy(event.screenX - wX, event.screenY - wY);
}
});
window.addEventListener('touchmove', function (event) {
if (dragging) {
event.preventDefault();
// Stop text selection:
window.getSelection().removeAllRanges();
// Drag:
Window.current._dragBy(event.touches[0].screenX - wX, event.touches[0].screenY - wY);
}
});
window.addEventListener('mouseup', function (event) {
if (dragging) {
dragging = false;
Window.current._dragStop();
}
});
window.addEventListener('touchend', function (event) {
if (dragging) {
event.preventDefault();
dragging = false;
Window.current._dragStop();
}
});
}
// Handle current window in this context:
// TODO: Rewrite to remove setTimeout for the following:
fin.desktop.main(readySync.ref(function () {
currentWin = fin.desktop.Window.getCurrent();
let currentReady = readySync.ref(setupCurrentWindow);
function getCurrent() {
if (windowmanager._windows) {
currentReady();
} else {
setTimeout(getCurrent, 5);
}
}
getCurrent();
}));
windowmanager.Window = Window;
export default Window;
|
#! /bin/sh
#
# SPDX-License-Identifier: BSD-2-Clause
#
# Copyright (c) 2018-2021 Gavin D. Howard and contributors.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
script="$0"
testdir=$(dirname "$script")
. "$testdir/../scripts/functions.sh"
# usage: history.sh dir -a|idx [exe args...]
# If Python does not exist, then just skip.
py=$(command -v python3)
err=$?
if [ "$err" -ne 0 ]; then
py=$(command -v python)
err=$?
if [ "$err" -ne 0 ]; then
printf 'Could not find Python 3.\n'
printf 'Skipping %s history tests...\n' "$d"
exit 0
fi
fi
# d is "bc" or "dc"
d="$1"
shift
# idx is either an index of the test to run or "-a". If it is "-a", then all
# tests are run.
idx="$1"
shift
if [ "$#" -gt 0 ]; then
# exe is the executable to run.
exe="$1"
shift
else
exe="$testdir/../bin/$d"
fi
if [ "$d" = "bc" ]; then
flip="! %s"
addone="%s + 1"
else
flip="%s Np"
addone="%s 1+p"
fi
# Set the test range correctly for all tests or one test. st is the start index.
if [ "$idx" = "-a" ]; then
idx=$("$py" "$testdir/history.py" "$d" -a)
idx=$(printf '%s - 1\n' "$idx" | bc)
st=0
else
st="$idx"
fi
# Run all of the tests.
for i in $(seq "$st" "$idx"); do
printf 'Running %s history test %d...' "$d" "$i"
for j in $(seq 1 5); do
"$py" "$testdir/history.py" "$d" "$i" "$exe" "$@"
err="$?"
if [ "$err" -eq 0 ]; then
break
fi
done
checktest_retcode "$d" "$err" "$d history test $i"
printf 'pass\n'
done
|
"""
Develop a tool to detect plagiarism in python programs
"""
import difflib
def detect_plagiarism(file1, file2):
file1_lines = open(file1).readlines()
file2_lines = open(file2).readlines()
similarity = difflib.SequenceMatcher(None, file1_lines, file2_lines).ratio()
print("Similarity between the two files is : " + str(similarity*100) + "%")
if similarity > 0.7:
print("Highly Plagiarised code!")
elif similarity <= 0.7 and similarity > 0.5:
print("Moderately Plagiarised Code")
else:
print("Unique code")
# Driver Code
detect_plagiarism("program1.py", "program2.py") |
#!/bin/bash
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
OPTIND=1
# Test variables
VERBOSE=0
DEFAULT_SEQUENCER=localhost:10000
SEQUENCER=$DEFAULT_SEQUENCER
DEFAULT_CLIENT_COUNT=1
CLIENT_COUNT=$DEFAULT_CLIENT_COUNT
DEFAULT_SERVER_COUNT=5
SERVER_COUNT=$DEFAULT_SERVER_COUNT
show_help() {
echo "Usage: $0 [-h] [-v] [-s hostname:port] [-c client_cnt] [-i server_cnt]"
echo " Run the stochastic test RPC pattern"
echo
echo " -h Display the usage help (this)"
echo " -s hostname:port Connect to the test sequencer located at hostname:port"
echo " default: $DEFAULT_SEQUENCER"
echo " -c client_cnt Indicate the number of client nodes"
echo " default: $DEFAULT_CLIENT_COUNT"
echo " -i server_cnt Indicate the number of server nodes"
echo " default: $DEFAULT_SERVER_COUNT"
echo " Note: you will need server_cnt+client_cnt node managers"
echo
}
while getopts "h?vs:c:i:" opt; do
case "$opt" in
h|\?)
show_help
exit 0
;;
v) VERBOSE=1
;;
s) SEQUENCER=$OPTARG
;;
c) CLIENT_COUNT=$OPTARG
;;
i) SERVER_COUNT=$OPTARG
;;
esac
done
shift $((OPTIND-1))
[ "${1:-}" = "--" ] && shift
if [[ "${VERBOSE}" = "1" ]]; then
echo Running the stochastic test RPC pattern
echo " VERBOSE=$VERBOSE"
echo " SEQUENCER=$SEQUENCER"
echo " CLIENT_COUNT=$CLIENT_COUNT"
echo " SERVER_COUNT=$SERVER_COUNT"
echo
fi
../bazel-bin/distbench run_tests --test_sequencer=$SEQUENCER \
<<EOF
tests {
services {
name: "client"
count: $CLIENT_COUNT
}
services {
name: "server"
count: $SERVER_COUNT
}
rpc_descriptions {
name: "client_server_rpc"
client: "client"
server: "server"
request_payload_name: "request_payload"
response_payload_name: "response_payload"
fanout_filter: "stochastic{0.7:1,0.3:4}"
}
payload_descriptions {
name: "request_payload"
size: 196
}
payload_descriptions {
name: "response_payload"
size: 262144
}
action_lists {
name: "client"
action_names: "run_queries"
}
actions {
name: "run_queries"
rpc_name: "client_server_rpc"
iterations {
max_iteration_count: 100
}
}
action_lists {
name: "client_server_rpc"
# No action on the server; just send the response
}
}
EOF
|
#!/bin/bash
# ========== Experiment Seq. Idx. 694 / 34.3.5 / N. 53/2/4 - _S=34.3.5 D1_N=53 a=-1 b=1 c=-1 d=1 e=-1 f=1 D3_N=2 g=-1 h=1 i=-1 D4_N=4 j=4 ==========
set -u
# Prints header
echo -e '\n\n========== Experiment Seq. Idx. 694 / 34.3.5 / N. 53/2/4 - _S=34.3.5 D1_N=53 a=-1 b=1 c=-1 d=1 e=-1 f=1 D3_N=2 g=-1 h=1 i=-1 D4_N=4 j=4 ==========\n\n'
if [[ "Yes" == "No" ]]; then
echo 'FATAL: This treatment did not include an SVM layer.'>&2
echo ' Something very wrong happened!'>&2
exit 161
fi
# Prepares all environment variables
JBHI_DIR="$HOME/jbhi-special-issue"
DATASET_DIR="$JBHI_DIR/data/edra-clinical-seg.305.tfr"
MODEL_DIR="$JBHI_DIR/models/deep.53"
SVM_DIR="$JBHI_DIR/svm-models"
SVM_PREFIX="$SVM_DIR/deep.53.layer.2.svm"
SVM_PATH="$SVM_PREFIX.pkl"
FEATURES_DIR="$JBHI_DIR/features"
TEST_FEATURES_PREFIX="$FEATURES_DIR/deep.53.layer.2.test.4.index.1847.test"
TEST_FEATURES_PATH="$TEST_FEATURES_PREFIX.feats.pkl"
RESULTS_DIR="$JBHI_DIR/results"
RESULTS_PREFIX="$RESULTS_DIR/deep.53.layer.2.test.4.index.1847.svm"
RESULTS_PATH="$RESULTS_PREFIX.results.txt"
# ...variables expected by jbhi-checks.include.sh and jbhi-footer.include.sh
SOURCES_GIT_DIR="$JBHI_DIR/jbhi-special-issue"
LIST_OF_INPUTS="$DATASET_DIR/finish.txt:$MODEL_DIR/finish.txt:$SVM_PREFIX.finish.txt"
START_PATH="$RESULTS_PREFIX.start.txt"
FINISH_PATH="$RESULTS_PREFIX.finish.txt"
LOCK_PATH="$RESULTS_PREFIX.running.lock"
LAST_OUTPUT="$RESULTS_PATH"
# ...creates mid-way checkpoint after the expensive test features extraction
SEMIFINISH_PATH="$TEST_FEATURES_PREFIX.finish.txt"
# EXPERIMENT_STATUS=1
# STARTED_BEFORE=No
mkdir -p "$FEATURES_DIR"
mkdir -p "$RESULTS_DIR"
#
# Assumes that the following environment variables where initialized
# SOURCES_GIT_DIR="$JBHI_DIR/jbhi-special-issue"
# LIST_OF_INPUTS="$DATASET_DIR/finish.txt:$MODELS_DIR/finish.txt:"
# START_PATH="$OUTPUT_DIR/start.txt"
# FINISH_PATH="$OUTPUT_DIR/finish.txt"
# LOCK_PATH="$OUTPUT_DIR/running.lock"
# LAST_OUTPUT="$MODEL_DIR/[[[:D1_MAX_NUMBER_OF_STEPS:]]].meta"
EXPERIMENT_STATUS=1
STARTED_BEFORE=No
# Checks if code is stable, otherwise alerts scheduler
pushd "$SOURCES_GIT_DIR" >/dev/null
GIT_STATUS=$(git status --porcelain)
GIT_COMMIT=$(git log | head -n 1)
popd >/dev/null
if [ "$GIT_STATUS" != "" ]; then
echo 'FATAL: there are uncommitted changes in your git sources file' >&2
echo ' for reproducibility, experiments only run on committed changes' >&2
echo >&2
echo ' Git status returned:'>&2
echo "$GIT_STATUS" >&2
exit 162
fi
# The experiment is already finished - exits with special code so scheduler won't retry
if [[ "$FINISH_PATH" != "-" ]]; then
if [[ -e "$FINISH_PATH" ]]; then
echo 'INFO: this experiment has already finished' >&2
exit 163
fi
fi
# The experiment is not ready to run due to dependencies - alerts scheduler
if [[ "$LIST_OF_INPUTS" != "" ]]; then
IFS=':' tokens_of_input=( $LIST_OF_INPUTS )
input_missing=No
for input_to_check in ${tokens_of_input[*]}; do
if [[ ! -e "$input_to_check" ]]; then
echo "ERROR: input $input_to_check missing for this experiment" >&2
input_missing=Yes
fi
done
if [[ "$input_missing" != No ]]; then
exit 164
fi
fi
# Sets trap to return error code if script is interrupted before successful finish
LOCK_SUCCESS=No
FINISH_STATUS=161
function finish_trap {
if [[ "$LOCK_SUCCESS" == "Yes" ]]; then
rmdir "$LOCK_PATH" &> /dev/null
fi
if [[ "$FINISH_STATUS" == "165" ]]; then
echo 'WARNING: experiment discontinued because other process holds its lock' >&2
else
if [[ "$FINISH_STATUS" == "160" ]]; then
echo 'INFO: experiment finished successfully' >&2
else
[[ "$FINISH_PATH" != "-" ]] && rm -f "$FINISH_PATH"
echo 'ERROR: an error occurred while executing the experiment' >&2
fi
fi
exit "$FINISH_STATUS"
}
trap finish_trap EXIT
# While running, locks experiment so other parallel threads won't attempt to run it too
if mkdir "$LOCK_PATH" --mode=u=rwx,g=rx,o=rx &>/dev/null; then
LOCK_SUCCESS=Yes
else
echo 'WARNING: this experiment is already being executed elsewhere' >&2
FINISH_STATUS="165"
exit
fi
# If the experiment was started before, do any cleanup necessary
if [[ "$START_PATH" != "-" ]]; then
if [[ -e "$START_PATH" ]]; then
echo 'WARNING: this experiment is being restarted' >&2
STARTED_BEFORE=Yes
fi
#...marks start
date -u >> "$START_PATH"
echo GIT "$GIT_COMMIT" >> "$START_PATH"
fi
#...gets closest checkpoint file
MODEL_CHECKPOINT=$(ls "$MODEL_DIR/"model.ckpt-*.index | \
sed 's/.*ckpt-\([0-9]*\)\..*/\1/' | \
sort -n | \
awk -v c=1 -v t=15000 \
'NR==1{d=$c-t;d=d<0?-d:d;v=$c;next}{m=$c-t;m=m<0?-m:m}m<d{d=m;v=$c}END{print v}')
MODEL_PATH="$MODEL_DIR/model.ckpt-$MODEL_CHECKPOINT"
echo "$MODEL_PATH" >> "$START_PATH"
if [[ ! -f "$SEMIFINISH_PATH" ]]; then
#...performs preliminary feature extraction
echo Extracting SVM test features with "$MODEL_PATH"
python \
"$SOURCES_GIT_DIR/predict_image_classifier.py" \
--model_name="resnet_v2_101_seg" \
--checkpoint_path="$MODEL_PATH" \
--dataset_name=skin_lesions \
--task_name=label \
--dataset_split_name=test \
--preprocessing_name=dermatologic \
--aggressive_augmentation="True" \
--add_rotations="True" \
--minimum_area_to_crop="0.20" \
--normalize_per_image="0" \
--batch_size=1 \
--id_field_name=id \
--pool_features=avg \
--extract_features \
--output_format=pickle \
--add_scores_to_features=none \
--eval_replicas="1" \
--output_file="$TEST_FEATURES_PATH" \
--dataset_dir="$DATASET_DIR"
# Tip: leave last the arguments that make the command fail if they're absent,
# so if there's a typo or forgotten \ the entire thing fails
EXPERIMENT_STATUS="$?"
if [[ "$EXPERIMENT_STATUS" != "0" || ! -e "$TEST_FEATURES_PATH" ]]; then
exit
fi
date -u >> "$SEMIFINISH_PATH"
echo GIT "$GIT_COMMIT" >> "$SEMIFINISH_PATH"
else
echo Reloading features from "$TEST_FEATURES_PATH"
fi
#...performs prediction with SVM model
python \
"$SOURCES_GIT_DIR/predict_svm_layer.py" \
--output_file "$RESULTS_PATH" \
--input_test "$TEST_FEATURES_PATH" \
--input_model "$SVM_PATH"
# Tip: leave last the arguments that make the command fail if they're absent,
# so if there's a typo or forgotten \ the entire thing fails
EXPERIMENT_STATUS="$?"
#
#...starts training
if [[ "$EXPERIMENT_STATUS" == "0" ]]; then
if [[ "$LAST_OUTPUT" == "" || -e "$LAST_OUTPUT" ]]; then
if [[ "$FINISH_PATH" != "-" ]]; then
date -u >> "$FINISH_PATH"
echo GIT "$GIT_COMMIT" >> "$FINISH_PATH"
fi
FINISH_STATUS="160"
fi
fi
|
package com.nepxion.discovery.common.util;
/**
* <p>Title: Nepxion Discovery</p>
* <p>Description: Nepxion Discovery</p>
* <p>Copyright: Copyright (c) 2017-2050</p>
* <p>Company: Nepxion</p>
* @author <NAME>
* @version 1.0
*/
import org.apache.commons.lang3.StringUtils;
import org.springframework.web.client.ResponseErrorHandler;
import org.springframework.web.client.RestTemplate;
import com.fasterxml.jackson.core.type.TypeReference;
import com.nepxion.discovery.common.handler.RestErrorHandler;
public class RestUtil {
public static <T> T fromJson(RestTemplate restTemplate, String result, TypeReference<T> typeReference) {
try {
return JsonUtil.fromJson(result, typeReference);
} catch (Exception e) {
String cause = getCause(restTemplate);
if (StringUtils.isNotEmpty(cause)) {
throw new IllegalArgumentException(cause);
}
throw e;
}
}
public static String getCause(RestTemplate restTemplate) {
ResponseErrorHandler responseErrorHandler = restTemplate.getErrorHandler();
if (responseErrorHandler instanceof RestErrorHandler) {
RestErrorHandler errorHandler = (RestErrorHandler) responseErrorHandler;
return errorHandler.getCause();
}
return null;
}
} |
#!/bin/bash
RETURN_OK=0
TOO_MANY_USB_DRIVE=1
NO_USB_DRIVE=2
#Colors setting
RED='\033[0;31m'
GREEN='\33[0;32m'
NC='\033[0m' # No Color
#set -x
read -s -p "Enter Password for sudo: " sudoPW
#Check how many USB Mass Storage are connected
printf "Step 1: check no many than one USB Mass Storage is connected --> "
NUMBER_OF_DEVICES=$(file /sys/block/sd* | grep usb | sed 's|.*/||' | wc -l)
if [ "$NUMBER_OF_DEVICES" -gt 1 ]; then
printf "${RED}Error: Too many USB drive. Detected $NUMBER_OF_DEVICES.${NC}\n"; exit $TOO_MANY_USB_DRIVE
else
printf "${GREEN}Passed.${NC}\n"
fi
#Check at least one USB Mass Storage is connected
printf "Step 2: check at least one USB Mass Storage is connected --> "
if [ "$NUMBER_OF_DEVICES" -gt 0 ]; then
printf "${GREEN}Passed.${NC}\n"
else
printf "${RED}Error: NO USB drive detected.${NC}\n"; exit $NO_USB_DRIVE
fi
MASS_STORAGE_DRIVE_NAME=$(file /sys/block/sd* | grep usb | sed 's|.*/||')
echo $MASS_STORAGE_DRIVE_NAME
echo $sudoPW | sudo -S umount /dev/$MASS_STORAGE_DRIVE_NAME*
unzip -e $1 -d /tmp/
echo $sudoPW | pv -tpreb /tmp/$(basename $1 .zip).img | sudo -S dd of=/dev/$MASS_STORAGE_DRIVE_NAME bs=4M conv=notrunc,noerror
rm /tmp/$(basename $1 .zip).img
mkdir /tmp/boot_sd
echo $sudoPW | sudo -S mount -o rw /dev/$(echo $MASS_STORAGE_DRIVE_NAME)$(echo '1') /tmp/boot_sd
echo $sudoPW | sudo -S cp /media/sf_shared_folder/RPI/scripts/wpa_supplicant.conf /tmp/boot_sd
echo $sudoPW | sudo -S touch /tmp/boot_sd/ssh
echo $sudoPW | sudo -S umount /tmp/boot_sd
rm -rf /tmp/boot_sd
|
<filename>src/model/my-module.rt.d.ts
/* GENERATED FILE - DO NOT EDIT */
import { Stage, Entity, ClassSpec, Property, PrimitiveValue, ReferenceValue, InversedSet } from '@frusal/library-for-browser';
declare module './my-module' {
/** Created and maintained by deploy-my-schema.js */
interface NamedEntity extends Entity {
/** The entity name. (e.g., Product Name, Order Number, etc) */
name: string;
}
// NamedEntity instance metadata
interface NamedEntity extends Entity {
readonly name_val: PrimitiveValue<string>;
}
// NamedEntity class metadata
namespace NamedEntity {
/** Named Entity class spec ID (b0o8l2). */
const id: string;
function classSpec(stage: Stage): ClassSpec;
const name_prop: Property;
}
/** Created and maintained by deploy-my-schema.js */
interface Product extends NamedEntity {
/** The entity name. (e.g., Product Name, Order Number, etc) */
name: string;
/** The price per unit. */
price: number;
}
// Product instance metadata
interface Product extends NamedEntity {
readonly name_val: PrimitiveValue<string>;
readonly price_val: PrimitiveValue<number>;
}
// Product class metadata
namespace Product {
/** Product class spec ID (b0o8l3). */
const id: string;
function classSpec(stage: Stage): ClassSpec;
const name_prop: Property;
const price_prop: Property;
}
/** Created and maintained by deploy-my-schema.js */
interface Order extends NamedEntity {
/** The entity name. (e.g., Product Name, Order Number, etc) */
name: string;
/** Collection of order lines, which links the products and quantities to this order. */
orderLines: InversedSet<OrderLine>;
/** Delivery address for the order. */
deliveryAddress: string;
}
// Order instance metadata
interface Order extends NamedEntity {
readonly name_val: PrimitiveValue<string>;
readonly deliveryAddress_val: PrimitiveValue<string>;
}
// Order class metadata
namespace Order {
/** Order class spec ID (b0o8l4). */
const id: string;
function classSpec(stage: Stage): ClassSpec;
const name_prop: Property;
const orderLines_prop: Property;
const deliveryAddress_prop: Property;
}
/** Created and maintained by deploy-my-schema.js */
interface OrderLine extends NamedEntity {
/** The entity name. (e.g., Product Name, Order Number, etc) */
name: string;
/** Order this line belongs to. */
order: Order;
/** Product this line orders with quantity. */
product: Product;
/** Quantity of product to be delivered. */
quantity: number;
}
// OrderLine instance metadata
interface OrderLine extends NamedEntity {
readonly name_val: PrimitiveValue<string>;
readonly order_ref: ReferenceValue<Order>;
readonly product_ref: ReferenceValue<Product>;
readonly quantity_val: PrimitiveValue<number>;
}
// OrderLine class metadata
namespace OrderLine {
/** Order Line class spec ID (b0o8l5). */
const id: string;
function classSpec(stage: Stage): ClassSpec;
const name_prop: Property;
const order_prop: Property;
const product_prop: Property;
const quantity_prop: Property;
}
}
|
def _add_decoration(self, cursor):
"""
Adds a decoration for the word under ``cursor``.
"""
if self.select_word(cursor):
self._deco.set_foreground(Qt.blue) # Set foreground color to blue
self._deco.set_as_underlined() # Set the decoration as underlined
self.editor.decorations.append(self._deco) # Add the decoration to the editor's list
self.editor.set_mouse_cursor(Qt.PointingHandCursor) # Set mouse cursor to pointing hand
else:
self.editor.set_mouse_cursor(Qt.IBeamCursor) # Set mouse cursor to I-beam cursor |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.painless.action.PainlessContextClassBindingInfo;
import org.elasticsearch.painless.action.PainlessContextClassInfo;
import org.elasticsearch.painless.action.PainlessContextConstructorInfo;
import org.elasticsearch.painless.action.PainlessContextFieldInfo;
import org.elasticsearch.painless.action.PainlessContextInfo;
import org.elasticsearch.painless.action.PainlessContextInstanceBindingInfo;
import org.elasticsearch.painless.action.PainlessContextMethodInfo;
import java.io.IOException;
import java.io.PrintStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLConnection;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
/**
* The gradle task generateContextDoc uses ContextDocGenerator to rebuild
* the Painless API documentation from a clean state after the
* existing documentation is deleted. The following pages are generated:
* <ul>
* <li>An index page with each context and links to the APIs</li>
* <li>A high-level overview page of shared API for all contexts</li>
* <li>A detailed page per package per context of shared API for all contexts</li>
* <li>A high-level overview page of specialized API for each context</li>
* <li>A detailed page per package per context of specialized API for each context</li>
* </ul>
* Use the docs build to generate HTML pages from the resultant asciidoc files.
*/
public final class ContextDocGenerator {
private static final String SHARED_HEADER = "painless-api-reference-shared";
private static final String SHARED_NAME = "Shared";
public static void main(String[] args) throws IOException {
List<PainlessContextInfo> contextInfos = getContextInfos();
Set<Object> sharedStaticInfos = createSharedStatics(contextInfos);
Set<PainlessContextClassInfo> sharedClassInfos = createSharedClasses(contextInfos);
Path rootDir = resetRootDir();
Path sharedDir = createSharedDir(rootDir);
List<Object> staticInfos = sortStaticInfos(Collections.emptySet(), new ArrayList<>(sharedStaticInfos));
List<PainlessContextClassInfo> classInfos = sortClassInfos(Collections.emptySet(), new ArrayList<>(sharedClassInfos));
Map<String, String> javaNamesToDisplayNames = getDisplayNames(classInfos);
printSharedIndexPage(sharedDir, javaNamesToDisplayNames, staticInfos, classInfos);
printSharedPackagesPages(sharedDir, javaNamesToDisplayNames, classInfos);
Set<PainlessContextInfo> isSpecialized = new HashSet<>();
for (PainlessContextInfo contextInfo : contextInfos) {
staticInfos = createContextStatics(contextInfo);
staticInfos = sortStaticInfos(sharedStaticInfos, staticInfos);
classInfos = sortClassInfos(sharedClassInfos, new ArrayList<>(contextInfo.getClasses()));
if (staticInfos.isEmpty() == false || classInfos.isEmpty() == false) {
Path contextDir = createContextDir(rootDir, contextInfo);
isSpecialized.add(contextInfo);
javaNamesToDisplayNames = getDisplayNames(contextInfo.getClasses());
printContextIndexPage(contextDir, javaNamesToDisplayNames, contextInfo, staticInfos, classInfos);
printContextPackagesPages(contextDir, javaNamesToDisplayNames, sharedClassInfos, contextInfo, classInfos);
}
}
printRootIndexPage(rootDir, contextInfos, isSpecialized);
}
@SuppressForbidden(reason = "retrieving data from an internal API not exposed as part of the REST client")
private static List<PainlessContextInfo> getContextInfos() throws IOException {
URLConnection getContextNames = new URL(
"http://" + System.getProperty("cluster.uri") + "/_scripts/painless/_context").openConnection();
XContentParser parser = JsonXContent.jsonXContent.createParser(null, null, getContextNames.getInputStream());
parser.nextToken();
parser.nextToken();
@SuppressWarnings("unchecked")
List<String> contextNames = (List<String>)(Object)parser.list();
parser.close();
((HttpURLConnection)getContextNames).disconnect();
List<PainlessContextInfo> contextInfos = new ArrayList<>();
for (String contextName : contextNames) {
URLConnection getContextInfo = new URL(
"http://" + System.getProperty("cluster.uri") + "/_scripts/painless/_context?context=" + contextName).openConnection();
parser = JsonXContent.jsonXContent.createParser(null, null, getContextInfo.getInputStream());
contextInfos.add(PainlessContextInfo.fromXContent(parser));
((HttpURLConnection)getContextInfo).disconnect();
}
contextInfos.sort(Comparator.comparing(PainlessContextInfo::getName));
return contextInfos;
}
private static Set<Object> createSharedStatics(List<PainlessContextInfo> contextInfos) {
Map<Object, Integer> staticInfoCounts = new HashMap<>();
for (PainlessContextInfo contextInfo : contextInfos) {
for (PainlessContextMethodInfo methodInfo : contextInfo.getImportedMethods()) {
staticInfoCounts.merge(methodInfo, 1, Integer::sum);
}
for (PainlessContextClassBindingInfo classBindingInfo : contextInfo.getClassBindings()) {
staticInfoCounts.merge(classBindingInfo, 1, Integer::sum);
}
for (PainlessContextInstanceBindingInfo instanceBindingInfo : contextInfo.getInstanceBindings()) {
staticInfoCounts.merge(instanceBindingInfo, 1, Integer::sum);
}
}
return staticInfoCounts.entrySet().stream().filter(
e -> e.getValue() == contextInfos.size()
).map(Map.Entry::getKey).collect(Collectors.toSet());
}
private static List<Object> createContextStatics(PainlessContextInfo contextInfo) {
List<Object> staticInfos = new ArrayList<>();
staticInfos.addAll(contextInfo.getImportedMethods());
staticInfos.addAll(contextInfo.getClassBindings());
staticInfos.addAll(contextInfo.getInstanceBindings());
return staticInfos;
}
private static Set<PainlessContextClassInfo> createSharedClasses(List<PainlessContextInfo> contextInfos) {
Map<PainlessContextClassInfo, Integer> classInfoCounts = new HashMap<>();
for (PainlessContextInfo contextInfo : contextInfos) {
for (PainlessContextClassInfo classInfo : contextInfo.getClasses()) {
classInfoCounts.merge(classInfo, 1, Integer::sum);
}
}
return classInfoCounts.entrySet().stream().filter(
e -> e.getValue() == contextInfos.size()
).map(Map.Entry::getKey).collect(Collectors.toSet());
}
@SuppressForbidden(reason = "resolve api docs directory with environment")
private static Path resetRootDir() throws IOException {
Path rootDir = PathUtils.get("../../docs/painless/painless-api-reference");
IOUtils.rm(rootDir);
Files.createDirectories(rootDir);
return rootDir;
}
private static Path createSharedDir(Path rootDir) throws IOException {
Path sharedDir = rootDir.resolve(SHARED_HEADER);
Files.createDirectories(sharedDir);
return sharedDir;
}
private static Path createContextDir(Path rootDir, PainlessContextInfo info) throws IOException {
Path contextDir = rootDir.resolve(getContextHeader(info));
Files.createDirectories(contextDir);
return contextDir;
}
private static void printAutomatedMessage(PrintStream stream) {
stream.println("// This file is auto-generated. Do not edit.");
stream.println();
}
private static void printSharedIndexPage(Path sharedDir, Map<String, String> javaNamesToDisplayNames,
List<Object> staticInfos, List<PainlessContextClassInfo> classInfos) throws IOException {
Path sharedIndexPath = sharedDir.resolve("index.asciidoc");
try (PrintStream sharedIndexStream = new PrintStream(
Files.newOutputStream(sharedIndexPath, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE),
false, StandardCharsets.UTF_8.name())) {
printAutomatedMessage(sharedIndexStream);
sharedIndexStream.println("[[" + SHARED_HEADER + "]]");
sharedIndexStream.println("=== " + SHARED_NAME + " API");
sharedIndexStream.println();
sharedIndexStream.println("The following API is available in all contexts.");
printIndex(sharedIndexStream, SHARED_HEADER, javaNamesToDisplayNames, staticInfos, classInfos);
}
}
private static void printContextIndexPage(Path contextDir, Map<String, String> javaNamesToDisplayNames,
PainlessContextInfo contextInfo, List<Object> staticInfos, List<PainlessContextClassInfo> classInfos) throws IOException {
Path contextIndexPath = contextDir.resolve("index.asciidoc");
try (PrintStream contextIndexStream = new PrintStream(
Files.newOutputStream(contextIndexPath, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE),
false, StandardCharsets.UTF_8.name())) {
printAutomatedMessage(contextIndexStream);
contextIndexStream.println("[[" + getContextHeader(contextInfo) + "]]");
contextIndexStream.println("=== " + getContextName(contextInfo) + " API");
contextIndexStream.println();
contextIndexStream.println("The following specialized API is available in the " + getContextName(contextInfo) + " context.");
contextIndexStream.println();
contextIndexStream.println(
"* See the <<" + SHARED_HEADER + ", " + SHARED_NAME + " API>> for further API available in all contexts.");
printIndex(contextIndexStream, getContextHeader(contextInfo), javaNamesToDisplayNames, staticInfos, classInfos);
}
}
private static void printIndex(PrintStream indexStream, String contextHeader, Map<String, String> javaNamesToDisplayNames,
List<Object> staticInfos, List<PainlessContextClassInfo> classInfos) {
String currentPackageName = null;
if (staticInfos.isEmpty() == false) {
indexStream.println();
indexStream.println("==== Static Methods");
indexStream.println("The following methods are directly callable without a class/instance qualifier. " +
"Note parameters denoted by a (*) are treated as read-only values.");
indexStream.println();
for (Object staticInfo : staticInfos) {
if (staticInfo instanceof PainlessContextMethodInfo) {
printMethod(indexStream, javaNamesToDisplayNames, false, (PainlessContextMethodInfo)staticInfo);
} else if (staticInfo instanceof PainlessContextClassBindingInfo) {
printClassBinding(indexStream, javaNamesToDisplayNames, (PainlessContextClassBindingInfo)staticInfo);
} else if (staticInfo instanceof PainlessContextInstanceBindingInfo) {
printInstanceBinding(indexStream, javaNamesToDisplayNames, (PainlessContextInstanceBindingInfo)staticInfo);
} else {
throw new IllegalArgumentException("unexpected static info type");
}
}
}
if (classInfos.isEmpty() == false) {
indexStream.println();
indexStream.println("==== Classes By Package");
indexStream.println("The following classes are available grouped by their respective packages. Click on a class " +
"to view details about the available methods and fields.");
indexStream.println();
for (PainlessContextClassInfo classInfo : classInfos) {
String classPackageName = classInfo.getName().substring(0, classInfo.getName().lastIndexOf('.'));
if (classPackageName.equals(currentPackageName) == false) {
currentPackageName = classPackageName;
indexStream.println();
indexStream.println("==== " + currentPackageName);
indexStream.println("<<" + getPackageHeader(contextHeader, currentPackageName) + ", " +
"Expand details for " + currentPackageName + ">>");
indexStream.println();
}
String className = getType(javaNamesToDisplayNames, classInfo.getName());
indexStream.println("* <<" + getClassHeader(contextHeader, className) + ", " + className + ">>");
}
}
indexStream.println();
indexStream.println("include::packages.asciidoc[]");
indexStream.println();
}
private static void printSharedPackagesPages(
Path sharedDir, Map<String, String> javaNamesToDisplayNames, List<PainlessContextClassInfo> classInfos) throws IOException {
Path sharedClassesPath = sharedDir.resolve("packages.asciidoc");
try (PrintStream sharedPackagesStream = new PrintStream(
Files.newOutputStream(sharedClassesPath, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE),
false, StandardCharsets.UTF_8.name())) {
printAutomatedMessage(sharedPackagesStream);
printPackages(sharedPackagesStream, SHARED_NAME, SHARED_HEADER, javaNamesToDisplayNames, Collections.emptySet(), classInfos);
}
}
private static void printContextPackagesPages(Path contextDir, Map<String, String> javaNamesToDisplayNames,
Set<PainlessContextClassInfo> excludes, PainlessContextInfo contextInfo, List<PainlessContextClassInfo> classInfos)
throws IOException {
Path contextPackagesPath = contextDir.resolve("packages.asciidoc");
try (PrintStream contextPackagesStream = new PrintStream(
Files.newOutputStream(contextPackagesPath, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE),
false, StandardCharsets.UTF_8.name())) {
printAutomatedMessage(contextPackagesStream);
printPackages(contextPackagesStream,
getContextName(contextInfo), getContextHeader(contextInfo), javaNamesToDisplayNames, excludes, classInfos);
}
}
private static void printPackages(PrintStream packagesStream, String contextName, String contextHeader,
Map<String, String> javaNamesToDisplayNames, Set<PainlessContextClassInfo> excludes, List<PainlessContextClassInfo> classInfos)
{
String currentPackageName = null;
for (PainlessContextClassInfo classInfo : classInfos) {
if (excludes.contains(classInfo)) {
continue;
}
String classPackageName = classInfo.getName().substring(0, classInfo.getName().lastIndexOf('.'));
if (classPackageName.equals(currentPackageName) == false) {
currentPackageName = classPackageName;
packagesStream.println();
packagesStream.println("[role=\"exclude\",id=\"" + getPackageHeader(contextHeader, currentPackageName) + "\"]");
packagesStream.println("=== " + contextName + " API for package " + currentPackageName);
packagesStream.println("See the <<" + contextHeader + ", " + contextName + " API>> " +
"for a high-level overview of all packages and classes.");
}
String className = getType(javaNamesToDisplayNames, classInfo.getName());
packagesStream.println();
packagesStream.println("[[" + getClassHeader(contextHeader, className) + "]]");
packagesStream.println("==== " + className + "");
for (PainlessContextFieldInfo fieldInfo : classInfo.getStaticFields()) {
printField(packagesStream, javaNamesToDisplayNames, true, fieldInfo);
}
for (PainlessContextMethodInfo methodInfo : classInfo.getStaticMethods()) {
printMethod(packagesStream, javaNamesToDisplayNames, true, methodInfo);
}
for (PainlessContextFieldInfo fieldInfo : classInfo.getFields()) {
printField(packagesStream, javaNamesToDisplayNames, false, fieldInfo);
}
for (PainlessContextConstructorInfo constructorInfo : classInfo.getConstructors()) {
printConstructor(packagesStream, javaNamesToDisplayNames, className, constructorInfo);
}
for (PainlessContextMethodInfo methodInfo : classInfo.getMethods()) {
printMethod(packagesStream, javaNamesToDisplayNames, false, methodInfo);
}
packagesStream.println();
}
packagesStream.println();
}
private static void printRootIndexPage(Path rootDir,
List<PainlessContextInfo> contextInfos, Set<PainlessContextInfo> isSpecialized) throws IOException {
Path rootIndexPath = rootDir.resolve("index.asciidoc");
try (PrintStream rootIndexStream = new PrintStream(
Files.newOutputStream(rootIndexPath, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE),
false, StandardCharsets.UTF_8.name())) {
printAutomatedMessage(rootIndexStream);
rootIndexStream.println("[cols=\"<3,^3,^3\"]");
rootIndexStream.println("|====");
for (PainlessContextInfo contextInfo : contextInfos) {
String contextName = getContextName(contextInfo);
String contextHeader = getContextHeader(contextInfo);
rootIndexStream.print("|" + contextName + " ");
rootIndexStream.print("| <<" + SHARED_HEADER + ", " + SHARED_NAME + " API>> ");
if (isSpecialized.contains(contextInfo)) {
rootIndexStream.println("| <<" + contextHeader + ", Specialized API>>");
} else {
rootIndexStream.println("| ");
}
}
rootIndexStream.println("|====");
rootIndexStream.println();
rootIndexStream.println("include::" + SHARED_HEADER + "/index.asciidoc[]");
for (PainlessContextInfo contextInfo : contextInfos) {
if (isSpecialized.contains(contextInfo)) {
rootIndexStream.println("include::" + getContextHeader(contextInfo) + "/index.asciidoc[]");
}
}
}
}
private static void printConstructor(
PrintStream stream, Map<String, String> javaNamesToDisplayNames,
String className, PainlessContextConstructorInfo constructorInfo) {
stream.print("* ");
if (constructorInfo.getDeclaring().startsWith("java.")) {
stream.print(getConstructorJavaDocLink(constructorInfo) + "[" + className + "]");
} else {
stream.print(className);
}
stream.print("(");
for (int parameterIndex = 0;
parameterIndex < constructorInfo.getParameters().size();
++parameterIndex) {
stream.print(getType(javaNamesToDisplayNames, constructorInfo.getParameters().get(parameterIndex)));
if (parameterIndex + 1 < constructorInfo.getParameters().size()) {
stream.print(", ");
}
}
stream.println(")");
}
private static void printMethod(
PrintStream stream, Map<String, String> javaNamesToDisplayNames,
boolean isStatic, PainlessContextMethodInfo methodInfo) {
stream.print("* " + (isStatic ? "static " : ""));
stream.print(getType(javaNamesToDisplayNames, methodInfo.getRtn()) + " ");
if (methodInfo.getDeclaring().startsWith("java.")) {
stream.print(getMethodJavaDocLink(methodInfo) + "[" + methodInfo.getName() + "]");
} else {
stream.print(methodInfo.getName());
}
stream.print("(");
for (int parameterIndex = 0;
parameterIndex < methodInfo.getParameters().size();
++parameterIndex) {
stream.print(getType(javaNamesToDisplayNames, methodInfo.getParameters().get(parameterIndex)));
if (parameterIndex + 1 < methodInfo.getParameters().size()) {
stream.print(", ");
}
}
stream.println(")");
}
private static void printClassBinding(
PrintStream stream, Map<String, String> javaNamesToDisplayNames, PainlessContextClassBindingInfo classBindingInfo) {
stream.print("* " + getType(javaNamesToDisplayNames, classBindingInfo.getRtn()) + " " + classBindingInfo.getName() + "(");
for (int parameterIndex = 0; parameterIndex < classBindingInfo.getParameters().size(); ++parameterIndex) {
// temporary fix to not print org.elasticsearch.script.ScoreScript parameter until
// class instance bindings are created and the information is appropriately added to the context info classes
if ("org.elasticsearch.script.ScoreScript".equals(
getType(javaNamesToDisplayNames, classBindingInfo.getParameters().get(parameterIndex)))) {
continue;
}
stream.print(getType(javaNamesToDisplayNames, classBindingInfo.getParameters().get(parameterIndex)));
if (parameterIndex < classBindingInfo.getReadOnly()) {
stream.print(" *");
}
if (parameterIndex + 1 < classBindingInfo.getParameters().size()) {
stream.print(", ");
}
}
stream.println(")");
}
private static void printInstanceBinding(
PrintStream stream, Map<String, String> javaNamesToDisplayNames, PainlessContextInstanceBindingInfo instanceBindingInfo) {
stream.print("* " + getType(javaNamesToDisplayNames, instanceBindingInfo.getRtn()) + " " + instanceBindingInfo.getName() + "(");
for (int parameterIndex = 0; parameterIndex < instanceBindingInfo.getParameters().size(); ++parameterIndex) {
stream.print(getType(javaNamesToDisplayNames, instanceBindingInfo.getParameters().get(parameterIndex)));
if (parameterIndex + 1 < instanceBindingInfo.getParameters().size()) {
stream.print(", ");
}
}
stream.println(")");
}
private static void printField(
PrintStream stream, Map<String, String> javaNamesToDisplayNames,
boolean isStatic, PainlessContextFieldInfo fieldInfo) {
stream.print("* " + (isStatic ? "static " : ""));
stream.print(getType(javaNamesToDisplayNames, fieldInfo.getType()) + " ");
if (fieldInfo.getDeclaring().startsWith("java.")) {
stream.println(getFieldJavaDocLink(fieldInfo) + "[" + fieldInfo.getName() + "]");
} else {
stream.println(fieldInfo.getName());
}
}
private static String getType(Map<String, String> javaNamesToDisplayNames, String javaType) {
int arrayDimensions = 0;
while (javaType.charAt(arrayDimensions) == '[') {
++arrayDimensions;
}
if (arrayDimensions > 0) {
if (javaType.charAt(javaType.length() - 1) == ';') {
javaType = javaType.substring(arrayDimensions + 1, javaType.length() - 1);
} else {
javaType = javaType.substring(arrayDimensions);
}
}
if ("Z".equals(javaType) || "boolean".equals(javaType)) {
javaType = "boolean";
} else if ("V".equals(javaType) || "void".equals(javaType)) {
javaType = "void";
} else if ("B".equals(javaType) || "byte".equals(javaType)) {
javaType = "byte";
} else if ("S".equals(javaType) || "short".equals(javaType)) {
javaType = "short";
} else if ("C".equals(javaType) || "char".equals(javaType)) {
javaType = "char";
} else if ("I".equals(javaType) || "int".equals(javaType)) {
javaType = "int";
} else if ("J".equals(javaType) || "long".equals(javaType)) {
javaType = "long";
} else if ("F".equals(javaType) || "float".equals(javaType)) {
javaType = "float";
} else if ("D".equals(javaType) || "double".equals(javaType)) {
javaType = "double";
} else if ("org.elasticsearch.painless.lookup.def".equals(javaType)) {
javaType = "def";
} else {
javaType = javaNamesToDisplayNames.get(javaType);
}
while (arrayDimensions-- > 0) {
javaType += "[]";
}
return javaType;
}
private static String getFieldJavaDocLink(PainlessContextFieldInfo fieldInfo) {
return "{java11-javadoc}/java.base/" + fieldInfo.getDeclaring().replace('.', '/') + ".html#" + fieldInfo.getName();
}
private static String getConstructorJavaDocLink(PainlessContextConstructorInfo constructorInfo) {
StringBuilder javaDocLink = new StringBuilder();
javaDocLink.append("{java11-javadoc}/java.base/");
javaDocLink.append(constructorInfo.getDeclaring().replace('.', '/'));
javaDocLink.append(".html#<init>(");
for (int parameterIndex = 0;
parameterIndex < constructorInfo.getParameters().size();
++parameterIndex) {
javaDocLink.append(getLinkType(constructorInfo.getParameters().get(parameterIndex)));
if (parameterIndex + 1 < constructorInfo.getParameters().size()) {
javaDocLink.append(",");
}
}
javaDocLink.append(")");
return javaDocLink.toString();
}
private static String getMethodJavaDocLink(PainlessContextMethodInfo methodInfo) {
StringBuilder javaDocLink = new StringBuilder();
javaDocLink.append("{java11-javadoc}/java.base/");
javaDocLink.append(methodInfo.getDeclaring().replace('.', '/'));
javaDocLink.append(".html#");
javaDocLink.append(methodInfo.getName());
javaDocLink.append("(");
for (int parameterIndex = 0;
parameterIndex < methodInfo.getParameters().size();
++parameterIndex) {
javaDocLink.append(getLinkType(methodInfo.getParameters().get(parameterIndex)));
if (parameterIndex + 1 < methodInfo.getParameters().size()) {
javaDocLink.append(",");
}
}
javaDocLink.append(")");
return javaDocLink.toString();
}
private static String getLinkType(String javaType) {
int arrayDimensions = 0;
while (javaType.charAt(arrayDimensions) == '[') {
++arrayDimensions;
}
if (arrayDimensions > 0) {
if (javaType.charAt(javaType.length() - 1) == ';') {
javaType = javaType.substring(arrayDimensions + 1, javaType.length() - 1);
} else {
javaType = javaType.substring(arrayDimensions);
}
}
if ("Z".equals(javaType) || "boolean".equals(javaType)) {
javaType = "boolean";
} else if ("V".equals(javaType) || "void".equals(javaType)) {
javaType = "void";
} else if ("B".equals(javaType) || "byte".equals(javaType)) {
javaType = "byte";
} else if ("S".equals(javaType) || "short".equals(javaType)) {
javaType = "short";
} else if ("C".equals(javaType) || "char".equals(javaType)) {
javaType = "char";
} else if ("I".equals(javaType) || "int".equals(javaType)) {
javaType = "int";
} else if ("J".equals(javaType) || "long".equals(javaType)) {
javaType = "long";
} else if ("F".equals(javaType) || "float".equals(javaType)) {
javaType = "float";
} else if ("D".equals(javaType) || "double".equals(javaType)) {
javaType = "double";
} else if ("org.elasticsearch.painless.lookup.def".equals(javaType)) {
javaType = "java.lang.Object";
}
while (arrayDimensions-- > 0) {
javaType += "%5B%5D";
}
return javaType;
}
private static String getContextHeader(PainlessContextInfo contextInfo) {
return "painless-api-reference-" + contextInfo.getName().replace(" ", "-").replace("_", "-");
}
private static String getPackageHeader(String contextHeader, String packageName) {
return contextHeader + "-" + packageName.replace('.', '-');
}
private static String getClassHeader(String contextHeader, String className) {
return contextHeader + "-" + className.replace('.', '-');
}
private static String getContextName(PainlessContextInfo contextInfo) {
String[] split = contextInfo.getName().split("[_-]");
StringBuilder contextNameBuilder = new StringBuilder();
for (String part : split) {
contextNameBuilder.append(Character.toUpperCase(part.charAt(0)));
contextNameBuilder.append(part.substring(1));
contextNameBuilder.append(' ');
}
return contextNameBuilder.substring(0, contextNameBuilder.length() - 1);
}
private static List<Object> sortStaticInfos(Set<Object> staticExcludes, List<Object> staticInfos) {
staticInfos = new ArrayList<>(staticInfos);
staticInfos.removeIf(staticExcludes::contains);
staticInfos.sort((si1, si2) -> {
String sv1;
String sv2;
if (si1 instanceof PainlessContextMethodInfo) {
sv1 = ((PainlessContextMethodInfo)si1).getSortValue();
} else if (si1 instanceof PainlessContextClassBindingInfo) {
sv1 = ((PainlessContextClassBindingInfo)si1).getSortValue();
} else if (si1 instanceof PainlessContextInstanceBindingInfo) {
sv1 = ((PainlessContextInstanceBindingInfo)si1).getSortValue();
} else {
throw new IllegalArgumentException("unexpected static info type");
}
if (si2 instanceof PainlessContextMethodInfo) {
sv2 = ((PainlessContextMethodInfo)si2).getSortValue();
} else if (si2 instanceof PainlessContextClassBindingInfo) {
sv2 = ((PainlessContextClassBindingInfo)si2).getSortValue();
} else if (si2 instanceof PainlessContextInstanceBindingInfo) {
sv2 = ((PainlessContextInstanceBindingInfo)si2).getSortValue();
} else {
throw new IllegalArgumentException("unexpected static info type");
}
return sv1.compareTo(sv2);
});
return staticInfos;
}
private static List<PainlessContextClassInfo> sortClassInfos(
Set<PainlessContextClassInfo> classExcludes, List<PainlessContextClassInfo> classInfos) {
classInfos = new ArrayList<>(classInfos);
classInfos.removeIf(v ->
"void".equals(v.getName()) || "boolean".equals(v.getName()) || "byte".equals(v.getName()) ||
"short".equals(v.getName()) || "char".equals(v.getName()) || "int".equals(v.getName()) ||
"long".equals(v.getName()) || "float".equals(v.getName()) || "double".equals(v.getName()) ||
"org.elasticsearch.painless.lookup.def".equals(v.getName()) ||
isInternalClass(v.getName()) || classExcludes.contains(v)
);
classInfos.sort((c1, c2) -> {
String n1 = c1.getName();
String n2 = c2.getName();
boolean i1 = c1.isImported();
boolean i2 = c2.isImported();
String p1 = n1.substring(0, n1.lastIndexOf('.'));
String p2 = n2.substring(0, n2.lastIndexOf('.'));
int compare = p1.compareTo(p2);
if (compare == 0) {
if (i1 && i2) {
compare = n1.substring(n1.lastIndexOf('.') + 1).compareTo(n2.substring(n2.lastIndexOf('.') + 1));
} else if (i1 == false && i2 == false) {
compare = n1.compareTo(n2);
} else {
compare = Boolean.compare(i1, i2) * -1;
}
}
return compare;
});
return classInfos;
}
private static Map<String, String> getDisplayNames(List<PainlessContextClassInfo> classInfos) {
Map<String, String> javaNamesToDisplayNames = new HashMap<>();
for (PainlessContextClassInfo classInfo : classInfos) {
String className = classInfo.getName();
if (classInfo.isImported()) {
javaNamesToDisplayNames.put(className,
className.substring(className.lastIndexOf('.') + 1).replace('$', '.'));
} else {
javaNamesToDisplayNames.put(className, className.replace('$', '.'));
}
}
return javaNamesToDisplayNames;
}
private static boolean isInternalClass(String javaName) {
return javaName.equals("org.elasticsearch.script.ScoreScript") ||
javaName.equals("org.elasticsearch.script.ScoreScript$ExplanationHolder");
}
private ContextDocGenerator() {
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.