text
stringlengths 1
1.05M
|
|---|
#!/bin/sh -l
sh -c "echo Hello world my name is $That_1_Bull"
|
#!/usr/bin/env sh
echo "[luacheck]"
echo
luacheck $@ .
|
module.exports = {
defineTemplateBodyVisitor: require("./utils/defineTemplateBodyVisitor"),
getAttributeName: require("./utils/getAttributeName"),
getAttributeValue: require("./utils/getAttributeValue"),
getElementAttribute: require("./utils/getElementAttribute"),
getElementAttributeValue: require("./utils/getElementAttributeValue"),
getElementType: require("./utils/getElementType"),
getLiteralAttributeValue: require("./utils/getLiteralAttributeValue"),
hasAccessibleChild: require("./utils/hasAccessibleChild"),
hasAriaLabel: require("./utils/hasAriaLabel"),
hasContent: require("./utils/hasContent"),
hasOnDirective: require("./utils/hasOnDirective"),
hasOnDirectives: require("./utils/hasOnDirectives"),
isAttribute: require("./utils/isAttribute"),
isHiddenFromScreenReader: require("./utils/isHiddenFromScreenReader"),
isInteractiveElement: require("./utils/isInteractiveElement"),
isPresentationRole: require("./utils/isPresentationRole"),
makeDocsURL: require("./utils/makeDocsURL"),
makeKebabCase: require("./utils/makeKebabCase"),
matchesElementRole: require("./utils/matchesElementRole")
};
|
<filename>data/brand/colorData.js
import Image from 'next/image'
import BrandBadExample from '@/components/brand/BrandBadExample'
import ColourExample from '@/components/brand/ColourExample'
import { primaryColors, secondaryColors, spotColors } from '@/data/colors'
const badexamples = [
{
src: '/images/brand/colors/do-not.webp',
alt: 'omen css do not with colors',
id: 'greenCSS-do-not-1',
width: 318,
height: 394,
childTwo: 'Avoid improper usage of the primary colours (especially green - as a background colour).'
},
{
src: '/images/brand/colors/do-not-2.webp',
alt: 'omen css do not with colors',
id: 'greenCSS-do-not-2',
width: 318,
height: 394,
childTwo: 'Do not create effects or new colour variations.'
},
{
src: '/images/brand/colors/do-not-3.webp',
alt: 'omen css do not with colors',
id: 'greenCSS-do-not-3',
width: 318,
height: 394,
childTwo:
'Do not use too many secondary colours in a component. Never use a secondary or spot colour as a background colour.'
},
{
src: '/images/brand/colors/do-not-4.webp',
alt: 'omen css do not with colors',
id: 'greenCSS-do-not-4',
width: 318,
height: 395,
childTwo: 'Do not use not enough contrast between text and background.'
},
{
src: '/images/brand/colors/do-not-5.webp',
alt: 'omen css do not with colors',
id: 'greenCSS-do-not-5',
width: 318,
height: 395,
childTwo: 'Avoid coloured headings or texts.'
}
]
const goodexamples = [
{
src: '/images/brand/colors/ok.webp',
alt: 'omen css design with colors',
id: 'greenCSS-do-1',
width: 273,
height: 323,
childTwo: 'Color in photography.'
},
{
src: '/images/brand/colors/ok-2.webp',
alt: 'omen css design with colors',
id: 'greenCSS-do-2',
width: 273,
height: 323,
childTwo: 'Color in illustrations / vector files.'
},
{
src: '/images/brand/colors/ok-3.webp',
alt: 'omen css design with colors',
id: 'greenCSS-do-3',
width: 318,
height: 395,
childTwo: 'Grey text (black-10) or the primary greencss/green in a header to highlight certain words or subjects.'
}
]
export const brandColorSections = [
{
title: 'Colours',
description:
'Colors determine charisma, emotion, feelings, and memories. In total, greenCSS offers a color palette of over 11 colors, whereby these are further broken down into 100 different lighter tones. The primary colours are - black, white and greencss/green - build on our existing intensity and represent the elegance of greenCSS.'
},
{
isLight: true,
title: 'Primary brand colors',
description:
'The primary brand colours are black, white and green. As part of the corporate identity, it should be used in, texts, illustrations or as background colours. Descending black colors components can be used as gray shades. Whereby a greyish colour can also be used to emphasise text passages or headlines but must be applied with sufficient contrast difference. As green color variations, both creations of green can be used or combined, since the colour originates from the same shades.',
children: (
<>
<div className='grid grid-col-3 gap-30px sm:gap-0px sm:grid-col-1 md:grid-col-2 w-100per overflow-x-hidden pt-100px'>
{primaryColors.map((item, index) => {
return (
<ColourExample
key={index}
className={item.className}
colorName={item.colorName}
colorHex={item.colorHex}
/>
)
})}
</div>
</>
)
},
{
title: 'Secondary colors',
description:
'The secondary colours contain a high spectrum of diversity. They should be used wisely in illustrations, images and publications to maintain their significance and impact.'
},
{
isLight: true,
className: 'max-w-90rem py-100px m-auto',
children: (
<div className='grid grid-col-3 gap-30px sm:gap-0px sm:grid-col-1 md:grid-col-2 w-100per overflow-x-hidden'>
{secondaryColors.map((item, index) => {
return (
<ColourExample key={index} className={item.className} colorName={item.colorName} colorHex={item.colorHex} />
)
})}
</div>
)
},
{
title: 'Utilisation',
description:
'White plays a very important role in all brand communication and should balance black and green. Secondary colours are only used in sensitive cases where caution is needed or where their use enhances the composition.'
},
{
title: 'Spot Colours',
description:
'Spot colours are intended for images or illustrations that require tone-on-tone pairings, product designs that demand variations in hue and opacity, or buttons with active, focus or hover states. Spot colours are lightened secondary colours.'
},
{
isLight: true,
className: 'max-w-90rem py-100px m-auto',
children: (
<div className='grid grid-col-5 gap-30px sm:gap-0px sm:grid-col-1 md:grid-col-2 w-100per overflow-x-hidden'>
{spotColors.map((item, index) => {
return (
<ColourExample key={index} className={item.className} colorName={item.colorName} colorHex={item.colorHex} />
)
})}
</div>
)
},
{
title: 'Dont’s'
},
{
isLight: true,
children: (
<div className='m-auto grid grid-col-2 gap-30px sm:gap-0px sm:grid-col-1 md:grid-col-1'>
{badexamples.map((bad, index) => {
return (
<BrandBadExample
key={index}
childOne={
<Image
quality={100}
height={bad.height}
width={bad.width}
src={`${bad.src}`}
alt={`${bad.alt}`}
className='w-100per'
placeholder='blur'
blurDataURL={`/_next/image?url=${bad.src}&w=16&q=1`}
/>
}
classOne='py-10px'
childTwo={bad.childTwo}
/>
)
})}
</div>
)
},
{
title: 'Ways to apply colour'
},
{
isLight: true,
children: (
<div className='m-auto grid grid-col-2 gap-30px sm:gap-0px sm:grid-col-1 md:grid-col-1'>
{goodexamples.map((good, index) => {
return (
<BrandBadExample
key={index}
childOne={
<Image
quality={100}
height={good.height}
width={good.width}
src={`${good.src}`}
alt={`${good.alt}`}
className='w-100per'
placeholder='blur'
blurDataURL={`/_next/image?url=${good.src}&w=16&q=1`}
/>
}
classOne='py-10px'
childTwo={good.childTwo}
/>
)
})}
</div>
)
}
]
|
module RailsTypedSettings
module Types
class Float < Base
def self.===(value)
::Float === value
end
def self.coerce(value)
return nil if value.nil?
if [::String, ::Integer, Fixnum].include? value.class
return value.to_f
end
value
end
end
end
end
|
const DrawCard = require('../../../drawcard.js');
class BearIsland extends DrawCard {
setupCardAbilities(ability) {
this.reaction({
when: {
onCardEntersPlay: event => event.card.getType() !== 'plot' && event.card.controller === this.controller && event.card.isLoyal() && event.playingType === 'marshal'
},
limit: ability.limit.perPhase(2),
handler: () => {
this.game.addGold(this.controller, 1);
this.game.addMessage('{0} uses {1} to gain 1 gold', this.controller, this);
}
});
}
}
BearIsland.code = '04042';
module.exports = BearIsland;
|
const request = require('request');
request('https://financialmodelingprep.com/api/v3/stock/real-time-price', function (error, response, body) {
let data = JSON.parse(body);
console.log('Current Stock Prices');
data.forEach(function (company) {
console.log(company.ticker + ': ' + company.price);
});
});
|
#!/bin/bash
filename='myfile.txt'
# Get the file extension
extension="${filename##*.}"
if [[ "${extension}" == "txt" ]]
then
echo "File extension is valid"
else
echo "File extension is invalid"
fi
|
package solidtxsample;
import org.binarybabel.solidtx.TxException;
import org.binarybabel.solidtx.TxFn;
import org.binarybabel.solidtx.TxStack;
public class Main {
public static void main(String[] args) {
TxStack.debug = true;
Stack s = new Stack();
s.callObject(Person.class, 1, new TxFn() {
@Override
public void run(TxStack stack, Object obj, TxException e) {
Person p = (Person) obj;
TxStack.debug(p.firstName, null);
TxStack.debug(p.lastName, null);
TxStack.debug(p.hometown.name, null);
}
});
s.callObject(Person.class, 2, new TxFn() {
@Override
public void run(TxStack stack, Object obj, TxException e) {
Person p = (Person) obj;
TxStack.debug(p.firstName, null);
TxStack.debug(p.lastName, null);
for (String c : p.favorite_colors) {
TxStack.debug(c, null);
}
for (Thing t : p.favorite_things) {
TxStack.debug(t.name, null);
}
}
});
s.callObject(Person.class, 99, new TxFn() {
@Override
public void run(TxStack stack, Object obj, TxException e) {
if (obj == null) {
TxStack.debug(e.getMessage(), null);
}
}
});
s.callObject(Thing.class, "8639d2c5-94ff-4350-8239-358839a5c0fa", new TxFn() {
@Override
public void run(TxStack stack, Object obj, TxException e) {
Thing t = (Thing) obj;
TxStack.debug(t.name, null);
}
});
try {
s.sync();
} catch (TxException e) {
e.printStackTrace();
}
s.run();
}
}
|
<reponame>FrankT-WP/nodejs-boilerplate
import { createController, generateRequiredSchemaItems } from "./helper"
import { JobModel } from "../models/job.schema"
import { Collection } from "../utilities/database"
import { tokenKey } from "../config"
import { pipe, zip, of } from "rxjs"
import { mergeMap } from "rxjs/operators"
import { generatePassword } from "../utilities/security"
import { sendMessage } from "../utilities/messaging"
const jwt = require('jsonwebtoken');
const jobs = new Collection(JobModel)
// add a Job CRUD operation
const AddJobOperation = {
requestValidationSchema: generateRequiredSchemaItems([
'body.Name',
'body.Description',
]),
request_mapper: (req) => req.body,
processor: mergeMap(props => {
return jobs.ADD({
Name: props.Name,
Description: props.Description
})
}),
response_mapper: (req, res) => (val) => {
res.send({
data: val,
message: "Successfully added a Job!",
})
}
}
// GET a particular Job's details operation
const GetJobOperation = {
requestValidationSchema: generateRequiredSchemaItems([
'body.Name'
]),
request_mapper: (req) => req.body,
processor: mergeMap(props => jobs.GET_ONE({ Name: props.Name })),
response_mapper: (req, res) => (val) => {
res.send(val)
}
}
// GET all Jobs operation
const GetJobsOperation = {
// get specific job
// processor: mergeMap(props => jobs.GET({ Name: "Full Stack Developer" })),
// get all
processor: mergeMap(props => jobs.GET()),
response_mapper: (req, res) => (val) => {
res.send(val)
}
}
// Update a Job operation
const UpdateJobOperation = {
requestValidationSchema: generateRequiredSchemaItems([
'body._id',
'body.Name',
'body.Description',
]),
request_mapper: (req) => req.body,
processor: pipe(
mergeMap(props => {
return zip(
jobs.UPDATE({
identifier: {
_id: props._id
},
data: {
Name: props.Name,
Description: props.Description
}
}),
of(props)
)
}),
mergeMap(([update_status, props]) => jobs.GET_ONE({ _id: props._id }))
),
response_mapper: (req, res) => (val) => {
console.log(val);
res.send({
// data: val,
val: val,
message: "Successfully updated a Job!",
})
}
}
// delete A Job Operation
const DeleteJobOperation = {
requestValidationSchema: generateRequiredSchemaItems([
'body.Name'
]),
request_mapper: (req) => req.body,
processor: mergeMap(props => jobs.DELETE_ONE({
Name: props.Name
})),
response_mapper: (req, res) => (val) => {
res.send({
data: val,
message: "Successfully Deleted a Job!",
})
}
}
// delete All Jobs Operation
const DeleteJobsOperation = {
processor: mergeMap(props => jobs.DELETE()),
response_mapper: (req, res) => (val) => {
res.send({
data: val,
message: "Successfully Deleted all Jobs!",
})
}
}
export const addJobController = createController(AddJobOperation)
export const getJobsController = createController(GetJobsOperation)
export const getJobController = createController(GetJobOperation)
export const updateJobController = createController(UpdateJobOperation)
export const deleteJobController = createController(DeleteJobOperation)
export const deleteJobsController = createController(DeleteJobsOperation)
|
<filename>src/store.js<gh_stars>0
import Vue from "vue";
import Vuex from "vuex";
Vue.use(Vuex);
function uuidv4() {
return ([1e7] + -1e3 + -4e3 + -8e3 + -1e11).replace(/[018]/g, c =>
(
c ^
(crypto.getRandomValues(new Uint8Array(1))[0] & (15 >> (c / 4)))
).toString(16)
);
}
const state = {
todos: []
};
const mutations = {
setTodoDataById(state, payload) {
const t = state.todos.filter(todo => todo.id === payload.id)[0];
t.value = payload.value;
t.state = payload.state;
},
addNewTodo(state, payload) {
state.todos.push(payload);
}
};
const actions = {
addNewTodo(context, payload) {
const newTodo = { id: uuidv4(), state: false, value: payload };
context.commit("addNewTodo", newTodo);
}
};
const getters = {
todos(state) {
return state.todos;
},
getTodoById(state) {
return function(payload) {
return state.todos.filter(todo => todo.id === payload)[0];
};
}
};
export default new Vuex.Store({
state,
mutations,
actions,
getters
});
|
<filename>source/infrastructure/lib/custom-resource/custom-resources.ts
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
import { CfnCondition, CfnCustomResource, CustomResource, Duration, Stack } from 'aws-cdk-lib';
import { Effect, Policy, PolicyDocument, PolicyStatement, Role, ServicePrincipal } from 'aws-cdk-lib/aws-iam';
import { CfnPolicyPrincipalAttachment } from 'aws-cdk-lib/aws-iot';
import { Code, Function as LambdaFunction, Runtime } from 'aws-cdk-lib/aws-lambda';
import { IBucket } from 'aws-cdk-lib/aws-s3';
import { NagSuppressions } from 'cdk-nag';
import { Construct } from 'constructs';
import { addCfnSuppressRules } from '../../utils/utils';
export interface CustomResourcesConstructProps {
readonly cloudWatchLogsPolicy: PolicyDocument;
readonly existingKinesisStream: string;
readonly existingTimestreamDatabase: string;
readonly sendAnonymousUsageCondition: CfnCondition;
readonly solutionConfig: {
loggingLevel: string;
solutionId: string;
solutionVersion: string;
sourceCodeBucket: IBucket;
sourceCodePrefix: string;
};
}
interface CustomResourceSetupUiProps {
apiEndpoint: string;
identityPoolId: string;
loggingLevel: string;
resourceS3Bucket: IBucket;
uiBucket: IBucket;
userPoolId: string;
webClientId: string;
}
interface CustomResourceSetupGreengrassV2Props {
greengrassIoTPolicyName: string;
greengrassV2ResourceBucket: IBucket;
iotCredentialsRoleArn: string;
iotPolicyName: string;
iotRoleAliasName: string;
}
/**
* Creates a custom resource Lambda function, a solution UUID, a custom resource to send anonymous usage, and a role.
*/
export class CustomResourcesConstruct extends Construct {
public customResourceFunction: LambdaFunction;
public customResourceFunctionRole: Role;
public iotCertificateArn: string;
public iotCredentialProviderEndpoint: string;
public iotDataAtsEndpoint: string;
private sourceCodeBucket: IBucket;
private sourceCodePrefix: string;
public uuid: string;
constructor(scope: Construct, id: string, props: CustomResourcesConstructProps) {
super(scope, id);
this.sourceCodeBucket = props.solutionConfig.sourceCodeBucket;
this.sourceCodePrefix = props.solutionConfig.sourceCodePrefix;
this.customResourceFunctionRole = new Role(this, 'CustomResourceFunctionRole', {
assumedBy: new ServicePrincipal('lambda.amazonaws.com'),
path: '/',
inlinePolicies: {
CloudWatchPolicy: props.cloudWatchLogsPolicy,
GreengrassIoTPolicy: new PolicyDocument({
statements: [
new PolicyStatement({
actions: [
'iot:CreateKeysAndCertificate',
'iot:DescribeEndpoint',
'iot:UpdateCertificate',
'iot:UpdateThingShadow',
'iot:DeleteCertificate'
],
effect: Effect.ALLOW,
resources: ['*']
}),
new PolicyStatement({
actions: ['iot:CreateRoleAlias', 'iot:DeleteRoleAlias'],
effect: Effect.ALLOW,
resources: [
Stack.of(this).formatArn({
service: 'iot',
resource: 'rolealias',
resourceName: '*'
})
]
})
]
})
}
});
addCfnSuppressRules(this.customResourceFunctionRole, [
{ id: 'W11', reason: 'IoT actions cannot specify the resource.' }
]);
this.customResourceFunction = new LambdaFunction(this, 'CustomResourceFunction', {
description: 'Machine to Cloud Connectivity custom resource function',
handler: 'custom-resource/index.handler',
runtime: Runtime.NODEJS_14_X,
code: Code.fromBucket(this.sourceCodeBucket, `${this.sourceCodePrefix}/custom-resource.zip`),
timeout: Duration.seconds(240),
role: this.customResourceFunctionRole,
environment: {
LOGGING_LEVEL: props.solutionConfig.loggingLevel,
SOLUTION_ID: props.solutionConfig.solutionId,
SOLUTION_VERSION: props.solutionConfig.solutionVersion
}
});
this.sourceCodeBucket.grantRead(this.customResourceFunction, `${this.sourceCodePrefix}/*`);
const customUuid = new CustomResource(this, 'UUID', {
serviceToken: this.customResourceFunction.functionArn,
properties: {
Resource: 'CreateUUID'
}
});
this.uuid = customUuid.getAtt('UUID').toString();
const sendAnonymousMetrics = new CustomResource(this, 'SendAnonymousMetrics', {
serviceToken: this.customResourceFunction.functionArn,
properties: {
Resource: 'SendAnonymousMetrics',
ExistingKinesisStream: props.existingKinesisStream,
ExistingTimestreamDatabase: props.existingTimestreamDatabase,
SolutionUUID: this.uuid
}
});
const cfnSendAnonymousMetrics = <CfnCustomResource>sendAnonymousMetrics.node.defaultChild;
cfnSendAnonymousMetrics.cfnOptions.condition = props.sendAnonymousUsageCondition;
const describeIoTEndpoint = new CustomResource(this, 'DescribeIoTEndpoint', {
serviceToken: this.customResourceFunction.functionArn,
properties: {
Resource: 'DescribeIoTEndpoint'
}
});
this.iotCredentialProviderEndpoint = describeIoTEndpoint.getAttString('CredentialProviderEndpoint');
this.iotDataAtsEndpoint = describeIoTEndpoint.getAttString('DataAtsEndpoint');
// cdk-nag suppressions
NagSuppressions.addResourceSuppressions(
this.customResourceFunctionRole,
[
{
id: 'AwsSolutions-IAM5',
reason: 'IoT actions cannot specify the resource. It does not allow wildcard permissions either.'
}
],
true
);
}
/**
* Sets up the UI assets and UI configuration.
* @param props Custom resource setup UI props
*/
public setupUi(props: CustomResourceSetupUiProps): void {
props.uiBucket.grantPut(this.customResourceFunction);
new CustomResource(this, 'CopyUiAssets', {
serviceToken: this.customResourceFunction.functionArn,
properties: {
Resource: 'CopyUIAssets',
DestinationBucket: props.uiBucket.bucketName,
ManifestFile: 'manifest.json',
SourceBucket: this.sourceCodeBucket.bucketName,
SourcePrefix: this.sourceCodePrefix
}
});
new CustomResource(this, 'CreateUiConfig', {
serviceToken: this.customResourceFunction.functionArn,
properties: {
Resource: 'CreateUIConfig',
ApiEndpoint: props.apiEndpoint,
ConfigFileName: 'aws-exports.js',
DestinationBucket: props.uiBucket.bucketName,
IdentityPoolId: props.identityPoolId,
LoggingLevel: props.loggingLevel,
S3Bucket: props.resourceS3Bucket.bucketName,
UserPoolId: props.userPoolId,
WebClientId: props.webClientId
}
});
}
/**
* Sets up Greengrass v2 resources.
* @param props Custom resource setup Greengrass v2 props
*/
public setupGreengrassV2(props: CustomResourceSetupGreengrassV2Props): void {
props.greengrassV2ResourceBucket.grantPut(this.customResourceFunction);
props.greengrassV2ResourceBucket.grantRead(this.customResourceFunction);
const greengrassV2CustomResourcePolicy = new Policy(this, 'GreengrassV2CustomResourcePolicy', {
statements: [
new PolicyStatement({
effect: Effect.ALLOW,
actions: ['iam:PassRole'],
resources: [props.iotCredentialsRoleArn]
})
]
});
this.customResourceFunctionRole.attachInlinePolicy(greengrassV2CustomResourcePolicy);
new CustomResource(this, 'ManageIoTRoleAlias', {
serviceToken: this.customResourceFunction.functionArn,
properties: {
Resource: 'ManageIoTRoleAlias',
RoleAliasName: props.iotRoleAliasName,
RoleArn: props.iotCredentialsRoleArn
}
});
new CustomResource(this, 'CopyGreengrassComponentsArtifact', {
serviceToken: this.customResourceFunction.functionArn,
properties: {
Resource: 'CopyGreengrassComponentsArtifact',
Artifacts: {
OpcDaConnectorArtifact: 'm2c2_opcda_connector.zip',
PublisherArtifact: 'm2c2_publisher.zip'
},
DestinationBucket: props.greengrassV2ResourceBucket.bucketName,
SourceBucket: this.sourceCodeBucket.bucketName,
SourcePrefix: this.sourceCodePrefix
}
});
const createGreengrassInstallationScripts = new CustomResource(this, 'CreateGreengrassInstallationScripts', {
serviceToken: this.customResourceFunction.functionArn,
properties: {
Resource: 'CreateGreengrassInstallationScripts',
CredentialProviderEndpoint: this.iotCredentialProviderEndpoint,
DataAtsEndpoint: this.iotDataAtsEndpoint,
DestinationBucket: props.greengrassV2ResourceBucket.bucketName,
IoTRoleAlias: props.iotRoleAliasName
}
});
this.iotCertificateArn = createGreengrassInstallationScripts.getAttString('CertificateArn');
const greengrassV2DeletePolicy = new Policy(this, 'GreengrassV2DeletePolicy', {
statements: [
new PolicyStatement({
effect: Effect.ALLOW,
resources: [this.iotCertificateArn],
actions: ['iot:DetachThingPrincipal', 'iot:ListPrincipalThings']
})
]
});
this.customResourceFunctionRole.attachInlinePolicy(greengrassV2DeletePolicy);
const deleteIoTCertificate = new CustomResource(this, 'DeleteIoTCertificate', {
serviceToken: this.customResourceFunction.functionArn,
properties: {
Resource: 'DeleteIoTCertificate',
CertificateArn: this.iotCertificateArn,
CertificateId: createGreengrassInstallationScripts.getAttString('CertificateId')
}
});
deleteIoTCertificate.node.addDependency(greengrassV2DeletePolicy);
new CfnPolicyPrincipalAttachment(this, 'PolicyPrincipalAttachment', {
policyName: props.iotPolicyName,
principal: this.iotCertificateArn
});
new CfnPolicyPrincipalAttachment(this, 'GreengrassPolicyPrincipalAttachment', {
policyName: props.greengrassIoTPolicyName,
principal: this.iotCertificateArn
});
}
}
|
<filename>core/src/main/java/io/machinecode/then/core/DeferredImpl.java
/*
* Copyright 2015 <NAME> and other contributors
* as indicated by the @authors tag. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.machinecode.then.core;
import io.machinecode.then.api.Deferred;
import io.machinecode.then.api.FailureException;
import io.machinecode.then.api.ListenerException;
import io.machinecode.then.api.OnCancel;
import io.machinecode.then.api.OnComplete;
import io.machinecode.then.api.OnProgress;
import io.machinecode.then.api.OnReject;
import io.machinecode.then.api.OnResolve;
import io.machinecode.then.api.Progress;
import io.machinecode.then.api.Promise;
import io.machinecode.then.api.Reject;
import io.machinecode.then.api.Resolve;
import org.jboss.logging.Logger;
import java.util.Iterator;
import java.util.NoSuchElementException;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
/**
* <p>A thread safe {@link Deferred} implementation that silently drops multiple calls to terminal methods.</p>
*
* It will not report progress to a listener if the listener is added after the call to {@link #progress(Object)}
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @since 1.0
*/
public class DeferredImpl<T,F,P> implements Deferred<T,F,P> {
private static final Logger log = Logger.getLogger(DeferredImpl.class);
protected static final byte ON_RESOLVE = 100;
protected static final byte ON_REJECT = 101;
protected static final byte ON_CANCEL = 102;
protected static final byte ON_COMPLETE = 103;
protected static final byte ON_PROGRESS = 104;
protected static final byte ON_GET = 105;
protected volatile byte state = PENDING;
protected T value;
protected F failure;
protected final Object lock = new Object();
private Event[] events;
private int length = 0;
private static class Event {
public final byte event;
public final Object value;
private Event(final byte event, final Object value) {
this.event = event;
this.value = value;
}
}
protected void addEvent(final byte event, final Object that) {
synchronized (lock) {
if (length >= events.length) {
final Event[] events = new Event[this.events.length * 2];
System.arraycopy(this.events, 0, events, 0, length);
this.events = events;
}
events[length++] = new Event(event, that);
}
}
protected <T> Iterable<T> getEvents(final byte event) {
synchronized (lock) {
return new EventIterable<>(event, DeferredImpl.this.events, DeferredImpl.this.length);
}
}
public DeferredImpl() {
this(2);
}
public DeferredImpl(final int hint) {
this.events = new Event[hint];
}
protected boolean setValue(final T value) {
switch (this.state) {
case RESOLVED:
case REJECTED:
case CANCELLED:
return true;
}
this.value = value;
this.state = RESOLVED;
return false;
}
protected boolean setFailure(final F failure) {
switch (this.state) {
case RESOLVED:
case REJECTED:
case CANCELLED:
return true;
}
this.failure = failure;
this.state = REJECTED;
return false;
}
protected boolean setCancelled() {
switch (this.state) {
case CANCELLED:
case REJECTED:
case RESOLVED:
return true;
}
this.state = CANCELLED;
return false;
}
@Override
public Promise<T,F,P> promise() {
return this;
}
@Override
public void resolve(final T value) throws ListenerException {
log().tracef(getResolveLogMessage(), value);
final Event[] events;
final int length;
final int state;
synchronized (lock) {
if (setValue(value)) {
return;
}
events = this.events;
length = this.length;
state = this.state;
}
ListenerException exception = null;
for (int i = 0; i < length; ++i) {
final Event event = events[i];
if (event.event != ON_RESOLVE) {
continue;
}
@SuppressWarnings("unchecked")
final OnResolve<T> on = ((OnResolve<T>)event.value);
exception = _callOnResolve(exception, on);
}
for (int i = 0; i < length; ++i) {
final Event event = events[i];
if (event.event != ON_COMPLETE) {
continue;
}
exception = _callOnComplete(exception, ((OnComplete) event.value), state);
}
synchronized (lock) {
lock.notifyAll();
}
if (exception != null) {
throw exception;
}
}
@Override
public void reject(final F failure) {
log().tracef(getRejectLogMessage(), failure);
final Event[] events;
final int length;
final int state;
synchronized (lock) {
if (setFailure(failure)) {
return;
}
events = this.events;
length = this.length;
state = this.state;
}
ListenerException exception = null;
for (int i = 0; i < length; ++i) {
final Event event = events[i];
if (event.event != ON_REJECT) {
continue;
}
@SuppressWarnings("unchecked")
final OnReject<F> on = ((OnReject<F>)event.value);
exception = _callOnReject(exception, on);
}
for (int i = 0; i < length; ++i) {
final Event event = events[i];
if (event.event != ON_COMPLETE) {
continue;
}
exception = _callOnComplete(exception, (OnComplete) event.value, state);
}
synchronized (lock) {
lock.notifyAll();
}
if (exception != null) {
throw exception;
}
}
@Override
public void progress(final P that) {
log().tracef(getProgressLogMessage(), that);
final Event[] events;
final int length;
synchronized (lock) {
events = this.events;
length = this.length;
}
ListenerException exception = null;
for (int i = 0; i < length; ++i) {
final Event event = events[i];
if (event.event != ON_PROGRESS) {
continue;
}
@SuppressWarnings("unchecked")
final OnProgress<P> on = ((OnProgress<P>)event.value);
exception = _callOnProgress(exception, on, that);
}
if (exception != null) {
throw exception;
}
}
@Override
public boolean cancel(final boolean interrupt) throws ListenerException {
log().tracef(getCancelLogMessage());
final Event[] events;
final int length;
final int state;
synchronized (lock) {
if (setCancelled()) {
return isCancelled();
}
events = this.events;
length = this.length;
state = this.state;
}
ListenerException exception = null;
for (int i = 0; i < length; ++i) {
final Event event = events[i];
if (event.event != ON_CANCEL) {
continue;
}
exception = _callOnCancel(exception, (OnCancel) event.value, interrupt);
}
for (int i = 0; i < length; ++i) {
final Event event = events[i];
if (event.event != ON_COMPLETE) {
continue;
}
exception = _callOnComplete(exception, (OnComplete) event.value, state);
}
synchronized (lock) {
lock.notifyAll();
}
if (exception != null) {
throw exception;
}
return true;
}
@Override
public boolean isDone() {
switch (this.state) {
case RESOLVED:
case REJECTED:
case CANCELLED:
return true;
default:
return false;
}
}
@Override
public boolean isCancelled() {
return this.state == CANCELLED;
}
@Override
public boolean isRejected() {
return this.state == REJECTED;
}
@Override
public boolean isResolved() {
return this.state == RESOLVED;
}
@Override
public Deferred<T,F,P> onResolve(final OnResolve<? super T> then) {
if (then == null) {
throw new IllegalArgumentException(Messages.format("THEN-000400.promise.argument.required", "onResolve"));
}
boolean run = false;
synchronized (lock) {
switch (this.state) {
case REJECTED:
case CANCELLED:
return this;
case RESOLVED:
run = true;
case PENDING:
default:
addEvent(ON_RESOLVE, then);
}
}
if (run) {
then.resolve(this.value);
}
return this;
}
@Override
public Deferred<T,F,P> onReject(final OnReject<? super F> then) {
if (then == null) {
throw new IllegalArgumentException(Messages.format("THEN-000400.promise.argument.required", "onReject"));
}
boolean run = false;
synchronized (lock) {
switch (this.state) {
case RESOLVED:
case CANCELLED:
return this;
case REJECTED:
run = true;
case PENDING:
default:
addEvent(ON_REJECT, then);
}
}
if (run) {
then.reject(this.failure);
}
return this;
}
@Override
public Deferred<T,F,P> onCancel(final OnCancel then) {
if (then == null) {
throw new IllegalArgumentException(Messages.format("THEN-000400.promise.argument.required", "onCancel"));
}
boolean run = false;
synchronized (lock) {
switch (this.state) {
case RESOLVED:
case REJECTED:
return this;
case CANCELLED:
run = true;
case PENDING:
default:
addEvent(ON_CANCEL, then);
}
}
if (run) {
then.cancel(true);
}
return this;
}
@Override
public Deferred<T,F,P> onComplete(final OnComplete then) {
if (then == null) {
throw new IllegalArgumentException(Messages.format("THEN-000400.promise.argument.required", "onComplete"));
}
boolean run = false;
synchronized (lock) {
switch (this.state) {
case REJECTED:
case CANCELLED:
case RESOLVED:
run = true;
case PENDING:
default:
addEvent(ON_COMPLETE, then);
}
}
if (run) {
then.complete(this.state);
}
return this;
}
@Override
public Deferred<T, F, P> onProgress(final OnProgress<? super P> then) {
if (then == null) {
throw new IllegalArgumentException(Messages.format("THEN-000400.promise.argument.required", "onProgress"));
}
synchronized (lock) {
addEvent(ON_PROGRESS, then);
}
return this;
}
@Override
public Deferred<T,F,P> onGet(final Future<?> then) {
if (then == null) {
throw new IllegalArgumentException(Messages.format("THEN-000400.promise.argument.required", "onGet"));
}
synchronized (lock) {
addEvent(ON_GET, then);
}
return this;
}
@Override
public <Tx> Promise<Tx,F,P> then(final Resolve<? super T,Tx,F,P> then) {
final DeferredImpl<Tx,F,P> next = new DeferredImpl<>();
final OnResolve<T> callback = new OnResolve<T>() {
@Override
public void resolve(final T that) {
then.resolve(that, next);
}
};
this.onResolve(callback)
.onReject(next)
.onProgress(next)
.onCancel(next)
.onGet(next);
return next;
}
@Override
public <Tx,Fx> Promise<Tx,Fx,P> then(final Reject<? super T,? super F,Tx,Fx,P> then) {
final DeferredImpl<Tx,Fx,P> next = new DeferredImpl<>();
final _OnReject<T,F> callback = new _OnReject<T,F>() {
@Override
public void resolve(final T that) {
then.resolve(that, next);
}
@Override
public void reject(final F fail) {
then.reject(fail, next);
}
};
this.onResolve(callback)
.onReject(callback)
.onProgress(next)
.onCancel(next)
.onGet(next);
return next;
}
@Override
public <Tx,Fx,Px> Promise<Tx,Fx,Px> then(final Progress<? super T,? super F,? super P,Tx,Fx,Px> then) {
final DeferredImpl<Tx,Fx,Px> next = new DeferredImpl<>();
final _OnProgress<T,F,P> callback = new _OnProgress<T,F,P>() {
@Override
public void resolve(final T that) {
then.resolve(that, next);
}
@Override
public void reject(final F fail) {
then.reject(fail, next);
}
@Override
public void progress(final P that) {
then.progress(that, next);
}
};
this.onResolve(callback)
.onReject(callback)
.onProgress(callback)
.onCancel(next)
.onGet(next);
return next;
}
@Override
public T get() throws InterruptedException, ExecutionException {
return _get();
}
protected T _get() throws InterruptedException, ExecutionException {
if (Thread.interrupted()) {
throw new InterruptedException(getInterruptedExceptionMessage());
}
final Event[] events;
final int length;
final int state;
synchronized (lock) {
loop: for (;;) {
switch (this.state) {
case CANCELLED:
case REJECTED:
case RESOLVED:
break loop;
}
lock.wait();
}
state = this.state;
events = this.events;
length = this.length;
}
try {
switch (state) {
case CANCELLED:
throw _onGet(events, length, new CancellationException(Messages.format("THEN-000202.promise.cancelled")));
case REJECTED:
final String msg = Messages.format("THEN-000201.promise.rejected");
throw _onGet(events, length, new ExecutionException(msg, _getFailureCause(msg)));
case RESOLVED:
_onGet(events, length, null);
return value;
default:
throw new IllegalStateException(Messages.format("THEN-000200.promise.illegal.state", _stateToString(state)));
}
} catch (final InterruptedException | ExecutionException | RuntimeException e) {
throw e;
} catch (final Exception e) {
throw new IllegalStateException(e);
}
}
@Override
public T get(final long timeout, final TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException {
return _get(timeout, unit);
}
protected T _get(final long timeout, final TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException {
if (Thread.interrupted()) {
throw new InterruptedException(getInterruptedExceptionMessage());
}
final Event[] events;
final int length;
final long end = System.currentTimeMillis() + unit.toMillis(timeout);
final byte state;
synchronized (lock) {
loop: for (;;) {
switch (this.state) {
case CANCELLED:
case REJECTED:
case RESOLVED:
break loop;
}
lock.wait(_tryTimeout(end));
}
state = this.state;
events = this.events;
length = this.length;
}
try {
switch (state) {
case CANCELLED:
throw _onTimedGet(events, length, end, new CancellationException(Messages.format("THEN-000202.promise.cancelled")));
case REJECTED:
final String msg = Messages.format("THEN-000201.promise.rejected");
throw _onTimedGet(events, length, end, new ExecutionException(msg, _getFailureCause(msg)));
case RESOLVED:
_onTimedGet(events, length, end, null);
return value;
default:
throw new IllegalStateException(Messages.format("THEN-000200.promise.illegal.state", _stateToString(state)));
}
} catch (final InterruptedException | ExecutionException | TimeoutException | RuntimeException e) {
throw e;
} catch (final Exception e) {
throw new IllegalStateException(e);
}
}
protected ListenerException _callOnResolve(ListenerException exception, final OnResolve<T> on) {
try {
on.resolve(this.value);
} catch (final Throwable e) {
if (exception == null) {
exception = new ListenerException(Messages.format("THEN-000300.promise.on.resolve.exception"), e);
} else {
exception.addSuppressed(e);
}
}
return exception;
}
protected ListenerException _callOnComplete(ListenerException exception, final OnComplete on, final int state) {
try {
on.complete(state);
} catch (final Throwable e) {
if (exception == null) {
exception = new ListenerException(Messages.format("THEN-000303.promise.on.complete.exception"), e);
} else {
exception.addSuppressed(e);
}
}
return exception;
}
protected ListenerException _callOnReject(ListenerException exception, final OnReject<F> on) {
try {
on.reject(this.failure);
} catch (final Throwable e) {
if (exception == null) {
exception = new ListenerException(Messages.format("THEN-000301.promise.on.reject.exception"), e);
} else {
exception.addSuppressed(e);
}
}
return exception;
}
protected ListenerException _callOnCancel(ListenerException exception, final OnCancel on, final boolean interrupt) {
try {
on.cancel(interrupt);
} catch (final Throwable e) {
if (exception == null) {
exception = new ListenerException(Messages.format("THEN-000302.promise.on.cancel.exception"), e);
} else {
exception.addSuppressed(e);
}
}
return exception;
}
protected ListenerException _callOnProgress(ListenerException exception, final OnProgress<P> on, final P that) {
try {
on.progress(that);
} catch (final Throwable e) {
if (exception == null) {
exception = new ListenerException(Messages.format("THEN-000304.promise.on.progress.exception"), e);
} else {
exception.addSuppressed(e);
}
}
return exception;
}
protected Exception _callOnGet(Exception exception, final Future<?> on) {
try {
on.get();
} catch (final Throwable e) {
if (exception == null) {
exception = new ListenerException(Messages.format("THEN-000401.promise.get.exception"), e);
} else {
exception.addSuppressed(e);
}
}
return exception;
}
protected Exception _callOnTimedGet(Exception exception, final Future<?> on, final long end) {
try {
on.get(_tryTimeout(end), MILLISECONDS);
} catch (final Throwable e) {
if (exception == null) {
exception = new ListenerException(Messages.format("THEN-000401.promise.get.exception"), e);
} else {
exception.addSuppressed(e);
}
}
return exception;
}
protected Exception _onGet(final Event[] events, final int length, Exception exception) throws Exception {
for (int i = 0; i < length; ++i) {
final Event event = events[i];
if (event.event != ON_GET) {
continue;
}
@SuppressWarnings("unchecked")
final Future<?> on = ((Future<?>)event.value);
exception = _callOnGet(exception, on);
}
return exception;
}
protected Exception _onTimedGet(final Event[] events, final int length, final long end, Exception exception) throws Exception {
for (int i = 0; i < length; ++i) {
final Event event = events[i];
if (event.event != ON_GET) {
continue;
}
@SuppressWarnings("unchecked")
final Future<?> on = ((Future<?>)event.value);
exception = _callOnTimedGet(exception, on, end);
}
return exception;
}
protected Throwable _getFailureCause(final String msg) {
return failure instanceof Throwable
? (Throwable)failure
: new FailureException(msg, failure);
}
protected long _tryTimeout(final long end) throws TimeoutException {
final long timeout = end - System.currentTimeMillis();
if (timeout <= 0) {
throw new TimeoutException(getTimeoutExceptionMessage());
}
return timeout;
}
protected String getResolveLogMessage() {
return Messages.get("THEN-000000.promise.resolve");
}
protected String getRejectLogMessage() {
return Messages.get("THEN-000001.promise.reject");
}
protected String getCancelLogMessage() {
return Messages.get("THEN-000002.promise.cancel");
}
protected String getProgressLogMessage() {
return Messages.get("THEN-000003.promise.progress");
}
protected String getTimeoutExceptionMessage() {
return Messages.get("THEN-000100.promise.timeout");
}
protected String getInterruptedExceptionMessage() {
return Messages.format("THEN-000101.promise.interrupted");
}
protected Logger log() {
return log;
}
protected String _stateToString(final int state) {
switch (state) {
case PENDING: return "PENDING";
case RESOLVED: return "RESOLVED";
case REJECTED: return "REJECTED";
case CANCELLED: return "CANCELLED";
default: return "UNKNOWN";
}
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("DeferredImpl{");
sb.append("state=").append(state).append(" (").append(_stateToString(state)).append(")");
sb.append('}');
return sb.toString();
}
private interface _OnReject<T,F> extends OnResolve<T>, OnReject<F> {}
private interface _OnProgress<T,F,P> extends OnResolve<T>, OnReject<F>, OnProgress<P> {}
private static class EventIterable<T> implements Iterable<T> {
final Event[] events;
final int length;
final byte type;
private EventIterable(final byte type, final Event[] events, final int length) {
this.type = type;
this.events = events;
this.length = length;
}
@Override
public Iterator<T> iterator() {
return new Iterator<T>() {
int i = 0;
@Override
public boolean hasNext() {
for (; i < length; ++i) {
final Event event = events[i];
if (event.event == type) {
return true;
}
}
return false;
}
@Override
@SuppressWarnings("unchecked")
public T next() throws NoSuchElementException {
if (i >= length) {
throw new NoSuchElementException(Messages.get("THEN-000402.promise.interator"));
}
return (T)events[i++].value;
}
@Override
public void remove() {
throw new UnsupportedOperationException(Messages.get("THEN-000403.promise.interator.remove"));
}
};
}
}
}
|
<reponame>xeqlol/musicql
import {
GraphQLObjectType,
GraphQLInt,
GraphQLString,
GraphQLList,
GraphQLSchema,
GraphQLFloat
} from 'graphql';
import Album from './album';
const Artist = new GraphQLObjectType({
name: 'Artist',
description: 'Artist',
fields: () => {
return {
artistId: {
type: GraphQLInt,
resolve: artist => artist.artistId
},
name: {
type: GraphQLString,
resolve: artist => artist.name
},
albums: {
type: new GraphQLList(Album),
resolve: artist => artist.getAlbums()
}
}
}
});
export default Artist;
|
# https://github.com/tadija/AEDotFiles
# my.sh
alias ssh-reload="cd ~/.ssh && fd -e pub -x ssh-add -K {.} && cd -"
function my-radio() {
echo "configuring radio..."
cd ~/Downloads
curl http://tadija.net/random/radio.zip > radio.zip
unzip -qq radio.zip
yes | cp -rf Radio.app /Applications
rm radio.zip
rm -rf Radio.app
open -a /Applications/Radio.app
cd -
echo "ready to play! (check the menu bar)"
}
function my-dotfiles() {
ln -s ~/Developer/GitHub/AEDotFiles ~/.dotfiles
la ~/.dotfiles
}
function my-cloud() {
ln -s ~/Library/Mobile\ Documents/com~apple~CloudDocs ~/Cloud
la ~/Cloud
}
function my-ssh() {
cp -rf ~/Cloud/Documents/Sync/.ssh ~/.ssh
cd ~/.ssh && la
# chmod 400 each private key
fd -e pub -x chmod 400 {.}
# ssh-add -K each private key manually
}
function my-tmux() {
# install tmux plugin manager
git clone https://github.com/tmux-plugins/tpm ~/.tmux/plugins/tpm
if tmux ls; then
# reload tmux
tmux source-file ~/.tmux.conf
echo "tmux reloaded"
else
# start new session
tmux new-session
fi
# prefix + I (load plugins)
# https://github.com/tmux-plugins/tmux-resurrect
# https://github.com/tmux-plugins/tmux-continuum
}
function my-plugins() {
setup-defaults
echo ""
setup-fzf
echo ""
setup-rbenv
echo ""
setup-fastlane
echo ""
gem install cocoapods
echo ""
}
function my-services() {
sudo rm -rf ~/Library/Services
ln -s ~/Cloud/Documents/Sync/Services ~/Library/Services
la ~/Library/Services
}
function my-sublime() {
rm -rf ~/Library/Application\ Support/Sublime\ Text\ 3/Packages/User
ln -s ~/Cloud/Documents/Sync/Sublime/User ~/Library/Application\ Support/Sublime\ Text\ 3/Packages/User
la ~/Library/Application\ Support/Sublime\ Text\ 3/Packages/User
}
function my-xcode() {
cd ~/Library/Developer/Xcode/UserData
rmd CodeSnippets
ln -s ~/Cloud/Documents/Sync/Xcode/CodeSnippets CodeSnippets
rmd FontAndColorThemes
ln -s ~/Cloud/Documents/Sync/Xcode/FontAndColorThemes FontAndColorThemes
rmd KeyBindings
ln -s ~/Cloud/Documents/Sync/Xcode/KeyBindings KeyBindings
rmd xcdebugger
ln -s ~/Cloud/Documents/Sync/Xcode/xcdebugger xcdebugger
la ~/Library/Developer/Xcode/UserData
cd -
# select command line tools version
# import accounts for code signing
}
function my-sketch() {
cd ~/Library/Application\ Support/com.bohemiancoding.sketch3
rmd Libraries
ln -s ~/Cloud/Documents/Sync/Sketch/Libraries Libraries
rmd Plugins
ln -s ~/Cloud/Documents/Sync/Sketch/Plugins Plugins
rmd Templates
ln -s ~/Cloud/Documents/Sync/Sketch/Templates Templates
la ~/Library/Application\ Support/com.bohemiancoding.sketch3
}
function my-teacode() {
rmd ~/Library/Application\ Support/com.apptorium.TeaCode-dm
ln -s ~/Cloud/Documents/Sync/TeaCode/com.apptorium.TeaCode-dm ~/Library/Application\ Support/com.apptorium.TeaCode-dm
la ~/Library/Application\ Support/com.apptorium.TeaCode-dm
}
function my-homebridge() {
sudo npm install -g --unsafe-perm homebridge homebridge-config-ui-x
sudo hb-service install
}
|
#!/usr/bin/env bash
set -e
DOC_FOLDER="docs"
SITE_FOLDER="site"
MAIN_BRANCH="master"
UPSTREAM="https://$GITHUB_TOKEN@github.com/$TRAVIS_REPO_SLUG.git"
MESSAGE="Rebuild doc for revision $TRAVIS_COMMIT: $TRAVIS_COMMIT_MESSAGE"
AUTHOR="$USER <>"
if [ "$TRAVIS_PULL_REQUEST" != "false" ];then
echo "Documentation won't build on pull request"
exit 0
fi
if [ "$TRAVIS_BRANCH" != "$MAIN_BRANCH" ];then
echo "Documentation won't build: Not on branch $MAIN_BRANCH"
exit 0
fi
function setup() {
npm install -g gitbook-cli
}
function buildDevDocs() {
pushd "$DOC_FOLDER"
gitbook install
gitbook build
popd
}
function merge() {
rm -Rf "$SITE_FOLDER"
mkdir "$SITE_FOLDER"
pushd "$SITE_FOLDER"
cp -a ../"$DOC_FOLDER"/_book/. .
popd
}
function publish() {
pushd "$SITE_FOLDER"
git init
git remote add upstream "$UPSTREAM"
git fetch --prune upstream
git reset upstream/gh-pages
git add --all .
if git commit --message "$MESSAGE" --author "$AUTHOR" ; then
git push --quiet upstream HEAD:gh-pages
fi
popd
}
function main() {
setup && buildDevDocs && merge && publish
}
main
|
import mongoose from "mongoose";
export const HistoryModel = mongoose.model("History");
|
package chylex.hee.world.feature.stronghold.rooms.traps;
import java.util.List;
import java.util.Random;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.world.EnumDifficulty;
import net.minecraft.world.World;
import chylex.hee.entity.mob.EntityMobSilverfish;
import chylex.hee.entity.technical.EntityTechnicalTrigger;
import chylex.hee.entity.technical.EntityTechnicalTrigger.TriggerBase;
import chylex.hee.system.abstractions.Pos.PosMutable;
import chylex.hee.system.abstractions.entity.EntitySelector;
import chylex.hee.world.feature.stronghold.rooms.StrongholdRoom;
import chylex.hee.world.structure.StructureWorld;
import chylex.hee.world.structure.dungeon.StructureDungeonPieceInst;
import chylex.hee.world.util.Size;
public class StrongholdRoomSilverfishTrap extends StrongholdRoom{
public StrongholdRoomSilverfishTrap(){
super(new Size(13, 7, 13));
}
@Override
public void generate(StructureDungeonPieceInst inst, StructureWorld world, Random rand, int x, int y, int z){
super.generate(inst, world, rand, x, y, z);
PosMutable mpos = new PosMutable();
// silverfish
for(int cornerX = 0; cornerX < 2; cornerX++){
for(int cornerZ = 0; cornerZ < 2; cornerZ++){
mpos.setX(x+3+6*cornerX).setZ(z+1+10*cornerZ);
placeLine(world, rand, placeStoneBrick, mpos.x, y+1, mpos.z, mpos.x, y+maxY-1, mpos.z);
mpos.setX(x+3+6*cornerX).setZ(z+2+8*cornerZ);
placeLine(world, rand, placeStoneBrick, mpos.x, y+2, mpos.z, mpos.x, y+maxY-1, mpos.z);
mpos.setX(x+3+6*cornerX).setZ(z+3+6*cornerZ);
placeLine(world, rand, placeStoneBrick, mpos.x, y+1, mpos.z, mpos.x, y+maxY-1, mpos.z);
mpos.setX(x+2+8*cornerX).setZ(z+3+6*cornerZ);
placeLine(world, rand, placeStoneBrick, mpos.x, y+2, mpos.z, mpos.x, y+maxY-1, mpos.z);
mpos.setX(x+1+10*cornerX).setZ(z+3+6*cornerZ);
placeLine(world, rand, placeStoneBrick, mpos.x, y+1, mpos.z, mpos.x, y+maxY-1, mpos.z);
}
}
// spawner
world.addEntity(new EntityTechnicalTrigger(null, x+maxX/2+0.5F, y+1, z+maxZ/2+0.5F, new TriggerSilverfish()));
}
public static class TriggerSilverfish extends TriggerBase{
private int checkTimer = 0;
private int spawnsLeft = -1;
@Override
protected void update(EntityTechnicalTrigger entity, World world, Random rand){
if (world.difficultySetting == EnumDifficulty.PEACEFUL)return;
if (spawnsLeft != -1){
List<EntityPlayer> players = EntitySelector.players(world, entity.boundingBox.expand(7D, 4.5D, 7D).offset(0D, 2D, 0D));
if (players.isEmpty() || rand.nextInt(3) != 0)return;
for(int cycle = 0; cycle < 1+rand.nextInt(2); cycle++){
EntityMobSilverfish silverfish = new EntityMobSilverfish(world);
silverfish.setPositionAndRotation(entity.posX+4.5D*(rand.nextInt(2)*2-1), entity.posY+4D, entity.posZ+4.5D*(rand.nextInt(2)*2-1), rand.nextFloat()*360F-180F, 0F);
silverfish.setAttackTarget(players.get(rand.nextInt(players.size())));
silverfish.setCanSummonSilverfish(false);
silverfish.setCanHideInBlocks(false);
world.spawnEntityInWorld(silverfish);
if (--spawnsLeft == 0){
entity.setDead();
break;
}
}
}
else if (++checkTimer > 10){
checkTimer = 0;
if (world.getClosestPlayerToEntity(entity, 5.5D) != null)spawnsLeft = 6+world.difficultySetting.getDifficultyId()*2+rand.nextInt(4);
}
}
}
}
|
class BankAccount:
def __init__(self, initial_balance=0):
self.balance = initial_balance
self.total_transactions = 0
def deposit(self, amount):
self.balance += amount
self.total_transactions += 1
def withdraw(self, amount):
if amount > self.balance:
print("Insufficient funds")
else:
self.balance -= amount
self.total_transactions += 1
def get_balance(self):
return self.balance
def get_total_transactions(self):
return self.total_transactions
|
package cm.xxx.minos.leetcode;
/**
* 二叉树的最大深度
* Author: lishangmin
* Created: 2018-08-23 10:16
*/
public class Solution73 {
public int maxDepth(TreeNode root) {
if (root == null) return 0;
return Math.max(maxDepth(root.left),maxDepth(root.right)) + 1;
}
}
|
$ twistd -ony AsyncBeatServer.py
|
#!/usr/bin/env bash
set -ux -o pipefail
function get-root-dir() {
local dir=$(dirname "${BASH_SOURCE[0]}")
(cd "${dir}" && pwd)
}
root="$(get-root-dir)"
test -f "${root}"/../local.sh && source "${root}"/../local.sh
source "${root}"/common.sh
docker container prune --force --filter "until=${PRUNE_DURATION}"
if [ "${VAMP_VERSION}" != "katana" ]; then
remote_images=$(docker image ls -f reference="magneticio/vamp*:${VAMP_VERSION/build-${BUILD_NUMBER:=}-/build-*-}*" --format '{{.Repository}}:{{.Tag}}')
local_images=$(docker image ls -f reference="vamp*:${VAMP_VERSION}*" --format '{{.Repository}}:{{.Tag}}')
test -n "${remote_images}" -o -n "${local_images}" && docker rmi -f ${remote_images} ${local_images}
fi
docker image prune --force --filter "until=${PRUNE_DURATION}"
volumes=$(docker volume ls -qf name="${PACKER/build-${BUILD_NUMBER:=}-/build-.*-}")
test ${KEEP_PACKER:-false} = "true" || docker volume rm ${volumes}
dangling_volumes=$(docker volume ls -f dangling=true -q | grep -vEe '^packer')
test -n "${dangling_volumes}" && docker volume rm ${dangling_volumes}
test -d "${root}"/../target && find "${_}" -type d -name 'scala-2.*' | xargs -I {} find {} -maxdepth 1 -type f -name '*.jar' -print -delete
exit 0
|
function startsWith(s, prefix) {
return s.indexOf(prefix) === 0;
}
exports.startsWith = startsWith;
function endsWith(s, suffix) {
return s.indexOf(suffix, s.length - suffix.length) !== -1;
}
exports.endsWith = endsWith;
function isNullOrEmpty(s) {
return !s && s.length == 0;
}
exports.isNullOrEmpty = isNullOrEmpty;
function isNullOrWhitespace(s) {
return !s || s.trim().length == 0;
}
exports.isNullOrWhitespace = isNullOrWhitespace;
function escapeRegExp(s) {
return s.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string
}
exports.escapeRegExp = escapeRegExp;
function stripQuotes(s) {
while ((this.startsWith(s, "'") && this.endsWith(s, "'"))
|| ((this.startsWith(s, "\"") && this.endsWith(s, "\"")))) {
s = s.substr(1, s.length - 2);
}
return s;
}
exports.stripQuotes = stripQuotes;
function shuffleArray(array) {
var counter = array.length, temp, index;
// While there are elements in the array
while (counter > 0) {
// Pick a random index
index = Math.floor(Math.random() * counter);
// Decrease counter by 1
counter--;
// And swap the last element with it
temp = array[counter];
array[counter] = array[index];
array[index] = temp;
}
return array;
}
exports.shuffleArray = shuffleArray;
|
from setuptools import setup
setup(
name="EEGAnalysis",
version="1.0",
packages=["EEGAnalysis"],
include_package_data=True,
install_requires=["PySimpleGUI"],
python_requires='>=3.6',
entry_points={
"console_scripts": [
"sleep_analysis=EEGAnalysis.__main__:main",
]
}
)
|
import 'aurelia-bootstrapper';
import 'aurelia-loader-webpack';
require('../node_modules/bootstrap/dist/css/bootstrap.css');
require('../node_modules/font-awesome/css/font-awesome.css');
require('../styles/styles.css');
export function configure(aurelia) {
aurelia.use
.standardConfiguration()
.developmentLogging();
aurelia.start().then(() => aurelia.setRoot());
}
|
#!/bin/sh
backup_regex=$2
if [ -z "$2" ]
then
backup_regex="jenkins-.*-backup.*"
fi
echo Querying S3 for buckets matching: ${backup_regex}
if [ "$1" != "--force" ]
then
echo ...Doing dry run. Run script with '--force' to actually remove these buckets
run_cmd="echo "
else
echo ...Forcing removal of matching S3 buckets.
run_cmd="aws s3 rb --force s3://"
fi
echo Running this command for each matching bucket: ${run_cmd}
echo
for backup in $( aws s3 ls | grep --only-matching ${backup_regex} ); do
${run_cmd}${backup}
done
|
<filename>test/4_api__company--find_company.js<gh_stars>0
const assert = require( 'assert' );
const request = require( 'supertest' );
describe( 'POST /companies', () => {
it( 'Find a company from database by not giving full name', () => {
return require( '../server/app' )
.then( ( app ) => {
return request( app )
.post( '/companies' )
.send( {
name: 'Bayer',
zip: '32736',
} )
.expect( 200 )
.then( ( res ) => {
assert( res.body.data[ 0 ].website, 'http://pbs.org/fermentum/donec/ut.xml' );
} );
} );
} );
it( 'Lookup for companies without zip or name trigger errors', ( done ) => {
require( '../server/app' )
.then( ( app ) => {
request( app )
.post( '/companies' )
.send( {
zip: 'abcde',
} )
.expect( 500, { message: 'child "name" fails because ["name" is required]' }, done );
} );
} );
} );
|
#!/bin/bash
snakemake \
--dryrun --summary --jobs 100 --use-conda -p \
--configfile config.yaml --cluster-config cluster.yaml \
--profile /home/etucker5/miniconda3/envs/S-niv-MAGs \
--cluster "sbatch --parsable --qos=unlim --partition={cluster.queue} \
--job-name=etucker5.{rule}.{wildcards} --mem={cluster.mem}gb \
--time={cluster.time} --ntasks={cluster.threads} --nodes={cluster.nodes}"
|
SELECT *
FROM books
WHERE author = 'J.K. Rowling';
|
<filename>ax-boot-admin/src/main/java/com/chequer/axboot/admin/domain/file/CommonFileService.java
package com.chequer.axboot.admin.domain.file;
import com.chequer.axboot.admin.domain.BaseService;
import com.chequer.axboot.core.code.AXBootTypes;
import com.chequer.axboot.core.code.Types;
import com.chequer.axboot.core.parameter.RequestParams;
import com.chequer.axboot.core.utils.EncodeUtils;
import com.querydsl.core.BooleanBuilder;
import lombok.extern.slf4j.Slf4j;
import net.coobird.thumbnailator.Thumbnails;
import net.coobird.thumbnailator.geometry.Positions;
import net.coobird.thumbnailator.name.Rename;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.IOUtils;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;
import javax.inject.Inject;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
@Service
@Slf4j
public class CommonFileService extends BaseService<CommonFile, Long> implements InitializingBean {
private CommonFileRepository commonFileRepository;
@Value("${axboot.upload.repository}")
public String uploadRepository;
@Inject
public CommonFileService(CommonFileRepository commonFileRepository) {
super(commonFileRepository);
this.commonFileRepository = commonFileRepository;
}
public void createBaseDirectory() {
try {
FileUtils.forceMkdir(new File(uploadRepository));
} catch (IOException e) {
}
}
public String getTempDir() {
return System.getProperty("java.io.tmpdir");
}
public File multiPartFileToFile(MultipartFile multipartFile) throws IOException {
String baseDir = getTempDir() + "/" + UUID.randomUUID().toString();
FileUtils.forceMkdir(new File(baseDir));
String tmpFileName = baseDir + "/" + FilenameUtils.getName(multipartFile.getOriginalFilename());
File file = new File(tmpFileName);
multipartFile.transferTo(file);
return file;
}
@Transactional
public CommonFile upload(MultipartFile multipartFile, String targetType, String targetId, int sort) throws IOException {
return upload(multiPartFileToFile(multipartFile), targetType, targetId, sort);
}
@Transactional
public CommonFile upload(File file, String targetType, String targetId, int sort) throws IOException {
UploadParameters uploadParameters = new UploadParameters();
uploadParameters.setFile(file);
uploadParameters.setTargetType(targetType);
uploadParameters.setTargetId(targetId);
uploadParameters.setSort(sort);
return upload(uploadParameters);
}
@Transactional
public CommonFile upload(UploadParameters uploadParameters) throws IOException {
File uploadFile = uploadParameters.getFile();
if (uploadFile == null && uploadParameters.getMultipartFile() != null) {
uploadFile = multiPartFileToFile(uploadParameters.getMultipartFile());
}
String targetType = uploadParameters.getTargetType();
String targetId = uploadParameters.getTargetId();
String desc = uploadParameters.getDesc();
boolean deleteIfExist = uploadParameters.isDeleteIfExist();
boolean thumbnail = uploadParameters.isThumbnail();
int sort = uploadParameters.getSort();
int thumbnailWidth = uploadParameters.getThumbnailWidth();
int thumbnailHeight = uploadParameters.getThumbnailHeight();
String fileName = FilenameUtils.getName(uploadFile.getName());
String extension = FilenameUtils.getExtension(fileName);
String fileType = getFileType(extension);
String baseName = UUID.randomUUID().toString();
String saveName = baseName + "." + extension;
String savePath = getSavePath(saveName);
File file = new File(savePath);
FileUtils.copyFile(uploadFile, file);
if (deleteIfExist) {
deleteByTargetTypeAndTargetId(targetType, targetId);
}
CommonFile commonFile = new CommonFile();
commonFile.setTargetType(targetType);
commonFile.setTargetId(targetId);
commonFile.setFileNm(fileName);
commonFile.setSaveNm(saveName);
commonFile.setSort(sort);
commonFile.setDesc(desc);
commonFile.setFileType(fileType);
commonFile.setExtension(FilenameUtils.getExtension(fileName).toUpperCase());
commonFile.setFileSize(file.length());
if (fileType.equals(Types.FileType.IMAGE) && thumbnail) {
try {
Thumbnails.of(file)
.crop(Positions.CENTER)
.size(thumbnailWidth, thumbnailHeight)
.toFiles(new File(getBasePath()), Rename.SUFFIX_HYPHEN_THUMBNAIL);
} catch (Exception e) {
}
}
FileUtils.deleteQuietly(uploadFile);
save(commonFile);
return commonFile;
}
private String getFileType(String extension) {
switch (extension.toUpperCase()) {
case Types.FileExtensions.PNG:
case Types.FileExtensions.JPG:
case Types.FileExtensions.JPEG:
case Types.FileExtensions.GIF:
case Types.FileExtensions.BMP:
case Types.FileExtensions.TIFF:
case Types.FileExtensions.TIF:
return Types.FileType.IMAGE;
case Types.FileExtensions.PDF:
return Types.FileType.PDF;
default:
return Types.FileType.ETC;
}
}
public ResponseEntity<byte[]> downloadById(Long id) throws IOException {
CommonFile commonFile = findOne(id);
return download(commonFile);
}
public ResponseEntity<byte[]> downloadByTargetTypeAndTargetId(String targetType, String targetId) throws IOException {
CommonFile commonFile = get(targetType, targetId);
return download(commonFile);
}
public ResponseEntity<byte[]> download(CommonFile commonFile) throws IOException {
if (commonFile == null)
return null;
byte[] bytes = FileUtils.readFileToByteArray(new File(getSavePath(commonFile.getSaveNm())));
String fileName = EncodeUtils.encodeDownloadFileName(commonFile.getFileNm());
HttpHeaders httpHeaders = new HttpHeaders();
httpHeaders.setContentType(MediaType.APPLICATION_OCTET_STREAM);
httpHeaders.setContentLength(bytes.length);
httpHeaders.setContentDispositionFormData("attachment", fileName);
return new ResponseEntity<>(bytes, httpHeaders, HttpStatus.OK);
}
public void preview(HttpServletResponse response, Long id, String type) throws IOException {
CommonFile commonFile = findOne(id);
if (commonFile == null)
return;
MediaType mediaType = null;
String imagePath = "";
switch (commonFile.getExtension()) {
case Types.FileExtensions.JPEG:
case Types.FileExtensions.JPG:
mediaType = MediaType.IMAGE_JPEG;
break;
case Types.FileExtensions.PNG:
mediaType = MediaType.IMAGE_PNG;
break;
case Types.FileExtensions.GIF:
mediaType = MediaType.IMAGE_GIF;
break;
default:
}
switch (type) {
case Types.ImagePreviewType.ORIGIN:
imagePath = getSavePath(commonFile.getSaveNm());
break;
case Types.ImagePreviewType.THUMBNAIL:
imagePath = getSavePath(commonFile.getThumbnailFileName());
break;
}
if (mediaType != null) {
byte[] bytes = FileUtils.readFileToByteArray(new File(imagePath));
response.setContentType(mediaType.toString());
response.setContentLength(bytes.length);
IOUtils.copy(FileUtils.openInputStream(new File(imagePath)), response.getOutputStream());
}
}
public void preview(HttpServletResponse response, Long id) throws IOException {
preview(response, id, Types.ImagePreviewType.ORIGIN);
}
public void thumbnail(HttpServletResponse response, Long id) throws IOException {
preview(response, id, Types.ImagePreviewType.THUMBNAIL);
}
public String getBasePath() {
return uploadRepository;
}
public String getSavePath(String saveName) {
return getBasePath() + "/" + saveName;
}
public byte[] getFileBytes(String saveName) throws IOException {
return FileUtils.readFileToByteArray(new File(getSavePath(saveName)));
}
public Page<CommonFile> getList(RequestParams<CommonFile> requestParams) {
String targetType = requestParams.getString("targetType", "");
String targetId = requestParams.getString("targetId", "");
String delYn = requestParams.getString("delYn", "");
String targetIds = requestParams.getString("targetIds", "");
requestParams.addSort("sort", Sort.Direction.ASC);
requestParams.addSort("id", Sort.Direction.DESC);
Pageable pageable = requestParams.getPageable();
BooleanBuilder builder = new BooleanBuilder();
if (isNotEmpty(targetType)) {
builder.and(qCommonFile.targetType.eq(targetType));
}
if (isNotEmpty(targetId)) {
builder.and(qCommonFile.targetId.eq(targetId));
}
if (isNotEmpty(delYn)) {
AXBootTypes.Deleted deleted = AXBootTypes.Deleted.get(delYn);
builder.and(qCommonFile.delYn.eq(deleted));
}
if (isNotEmpty(targetIds)) {
Set<String> _ids = Arrays.stream(targetIds.split(",")).collect(Collectors.toSet());
builder.and(qCommonFile.targetId.in(_ids));
}
return findAll(builder, pageable);
}
public CommonFile get(RequestParams<CommonFile> requestParams) {
List<CommonFile> commonFiles = getList(requestParams).getContent();
return isEmpty(commonFiles) ? null : commonFiles.get(0);
}
public CommonFile get(String targetType, String targetId) {
RequestParams<CommonFile> requestParams = new RequestParams<>(CommonFile.class);
requestParams.put("targetType", targetType);
requestParams.put("targetId", targetId);
return get(requestParams);
}
@Override
public void afterPropertiesSet() throws Exception {
createBaseDirectory();
}
@Transactional
public void deleteFile(Long id) {
delete(qCommonFile).where(qCommonFile.id.eq(id)).execute();
}
@Transactional
public void deleteByTargetTypeAndTargetIds(String targetType, Set<String> targetIds) {
delete(qCommonFile).where(qCommonFile.targetType.eq(targetType).and(qCommonFile.targetId.in(targetIds))).execute();
}
@Transactional
private void deleteByTargetTypeAndTargetId(String targetType, String targetId) {
delete(qCommonFile).where(qCommonFile.targetType.eq(targetType).and(qCommonFile.targetId.eq(targetId))).execute();
}
@Transactional
public void updateOrDelete(List<CommonFile> commonFileList) {
for (CommonFile file : commonFileList) {
if (file.isDeleted()) {
deleteFile(file.getId());
} else {
update(qCommonFile).set(qCommonFile.targetType, file.getTargetType()).set(qCommonFile.targetId, file.getTargetId()).where(qCommonFile.id.eq(file.getId())).execute();
}
}
}
}
|
<reponame>zanachka/scrapydd<gh_stars>1-10
from sqlalchemy import *
from migrate import *
meta = MetaData()
def upgrade(migrate_engine):
meta.bind = migrate_engine
webhook_jobs = Table('webhook_jobs', meta, autoload=True)
webhook_jobs_log = Column('log', Text)
webhook_jobs_log.create(webhook_jobs)
def downgrade(migrate_engine):
meta.bind = migrate_engine
webhook_jobs = Table('webhook_jobs', meta, autoload=True)
webhook_jobs.c['log'].drop()
|
package com.breakersoft.plow.scheduler.dao;
import com.breakersoft.plow.rnd.thrift.RunningTask;
public interface StatsDao {
boolean updateProcRuntimeStats(RunningTask task);
boolean updateTaskRuntimeStats(RunningTask task);
}
|
<reponame>Soreine/hyper-gwent<filename>website/RandomCard.js
// @flow
/* @jsx h */
/* global document */
import {
// eslint-disable-next-line
h,
render,
Component
} from 'preact';
import type { Card } from '../core/types';
const CHANGE_CARD_DELAY = 3000; // ms
function getRandomCard(cards: { [CardID]: Card }): Card {
const ids = Object.keys(cards);
const randomIndex = Math.floor(Math.random() * ids.length);
const randomId = ids[randomIndex];
return cards[randomId];
}
class RandomCard extends Component<
{
cards: {
[CardID]: Card
}
},
{
card: Card
}
> {
interval: IntervalID;
constructor(props) {
super();
const { cards } = props;
this.state = {
card: getRandomCard(cards)
};
}
componentDidMount() {
this.interval = setInterval(this.changeCard, CHANGE_CARD_DELAY);
}
componentWillUnmount() {
clearInterval(this.interval);
}
changeCard = () => {
const { cards } = this.props;
this.setState({
card: getRandomCard(cards)
});
};
render() {
const { card } = this.state;
return (
<div>
{card.name}
<br />
</div>
);
}
}
function start(cards: { [CardID]: Card }) {
const target = document.getElementById('random-card');
if (target) {
render(<RandomCard cards={cards} />, target);
}
}
export { start };
|
import numpy as np
from tqdm.notebook import tqdm
def get_adj_matrix(data_df, path):
As = []
for id in tqdm(data_df["id"]):
a = np.load(f"{path}/bpps/{id}.npy")
As.append(a)
As = np.array(As)
## get adjacent matrix from structure sequence
sequence_structure_adj = []
for i in tqdm(range(len(data_df))):
seq_length = data_df["seq_length"].iloc[i]
structure = data_df["structure"].iloc[i]
sequence = data_df["sequence"].iloc[i]
cue = []
a_structures = {
("A", "U"): np.zeros([seq_length, seq_length]),
("C", "G"): np.zeros([seq_length, seq_length]),
("U", "G"): np.zeros([seq_length, seq_length]),
("U", "A"): np.zeros([seq_length, seq_length]),
("G", "C"): np.zeros([seq_length, seq_length]),
("G", "U"): np.zeros([seq_length, seq_length]),
}
a_structure = np.zeros([seq_length, seq_length])
for i in range(seq_length):
if structure[i] == "(":
cue.append(i)
elif structure[i] == ")":
start = cue.pop()
a_structures[(sequence[start], sequence[i])][start, i] = 1
a_structures[(sequence[i], sequence[start])][i, start] = 1
a_strc = np.stack([a for a in a_structures.values()], axis=2)
a_strc = np.sum(a_strc, axis=2, keepdims=True)
sequence_structure_adj.append(a_strc)
sequence_structure_adj = np.array(sequence_structure_adj)
print(sequence_structure_adj.shape)
## adjacent matrix based on distance on the sequence
## D[i, j] = 1 / (abs(i - j) + 1) ** pow, pow = 1, 2, 4
idx = np.arange(As.shape[1])
distance_matrix = []
for i in range(len(idx)):
distance_matrix.append(np.abs(idx[i] - idx))
distance_matrix = np.array(distance_matrix) + 1
distance_matrix = 1 / distance_matrix
distance_matrix = distance_matrix[None, :, :]
distance_matrix = np.repeat(distance_matrix, len(As), axis=0)
Dss = []
for i in [1, 2, 4]:
Dss.append(distance_matrix ** i)
distance_matrix = np.stack(Dss, axis=3)
print(distance_matrix.shape)
adjacency_matrix = np.concatenate(
[As[:, :, :, None], sequence_structure_adj, distance_matrix], axis=3
).astype(np.float32)
return adjacency_matrix
def get_node_features(train):
## get node features, which is one hot encoded
mapping = {}
vocab = ["A", "G", "C", "U"]
for i, s in enumerate(vocab):
mapping[s] = [0] * len(vocab)
mapping[s][i] = 1
X_node = np.stack(
train["sequence"].apply(lambda x: list(map(lambda y: mapping[y], list(x))))
)
mapping = {}
vocab = ["S", "M", "I", "B", "H", "E", "X"]
for i, s in enumerate(vocab):
mapping[s] = [0] * len(vocab)
mapping[s][i] = 1
X_loop = np.stack(
train["predicted_loop_type"].apply(
lambda x: list(map(lambda y: mapping[y], list(x)))
)
)
X_node = np.concatenate([X_node, X_loop], axis=2)
## interaction
a = np.sum(X_node * (2 ** np.arange(X_node.shape[2])[None, None, :]), axis=2)
vocab = sorted(set(a.flatten()))
print(vocab)
ohes = []
for v in vocab:
ohes.append(a == v)
ohes = np.stack(ohes, axis=2)
X_node = np.concatenate([X_node, ohes], axis=2).astype(np.float32)
print(X_node.shape)
return X_node
def augmentation(df, aug_df):
target_df = df.copy()
new_df = aug_df[aug_df["id"].isin(target_df["id"])]
del target_df["structure"]
del target_df["predicted_loop_type"]
new_df = new_df.merge(target_df, on=["id", "sequence"], how="left")
df["cnt"] = df["id"].map(new_df[["id", "cnt"]].set_index("id").to_dict()["cnt"])
df["log_gamma"] = 100
df["score"] = 1.0
df = df.append(new_df[df.columns])
return df
|
package com.donfyy.viewexample.view;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Paint.Style;
import android.graphics.Path;
import android.graphics.Rect;
import android.graphics.RectF;
import android.util.AttributeSet;
import android.util.DisplayMetrics;
import android.view.View;
import com.donfyy.util.Utils;
public class TextDrawView extends View {
private final int mRadius = Utils.dp2px(100);
private final int mCy = Utils.dp2px(550);
private float mDensity;
private Paint mPaint;
private String text = "澳大利亚曾质疑过日本科研捕鲸的真实性。2010年,澳大利亚政府曾向海牙国际法院提起诉讼,控告日本在南冰洋的“科研”捕鲸活动实则是商业捕鲸。2014年,国际法院对此作出终审裁决,认定日本“出于科研目的”的捕鲸理由不成立,其捕鲸行为违背了《国际捕鲸管制公约》。日本表示尊重国际法院的裁决,并有所收敛了一段时间,但捕鲸活动仍未终止。2018年9月,在IWC的巴西峰会上,日本重提恢复商业捕鲸的诉求,但又一次遭到委员会的否决。这被视为日本最终退出该组织的直接原因被“科研”捕杀的鲸鱼,是如何被送上餐桌的?以科研名义被捕杀的鲸鱼,最后被输送到日本国内,满足人们的口腹之欲。负责执行这一系列动作的是一个名为日本鲸类研究所的机构,其上属机构是日本水产厅。日本鲸类研究所对鲸鱼肉有一个有趣的称呼:科研调查的副产物。他们称,根据《国际捕鲸规则公约》第8条的规定,调查后的鲸鱼体应被尽可能充分地利用。因而在鲸鱼被捕捞到渔船上并完成了对其体型、皮脂、胃内容物等款项的检测后,鲸体即会被拆解,用于鲸肉消费品的生产。当渔船抵达日本后,一块块的鲸肉会被分送给日本各级消费市场,或是以远低于市场价的价格出售给各地政府、供应于日本小学生的午餐中。";
float[] curWidth = new float[1];
private int mScreenHeight;
public TextDrawView(Context context) {
super(context);
init(context);
}
public TextDrawView(Context context, AttributeSet set) {
super(context, set);
init(context);
}
private void init(Context context){
DisplayMetrics displayMetrics = new DisplayMetrics();
displayMetrics = context.getResources().getDisplayMetrics();
mDensity = displayMetrics.density;
mPaint = new Paint();
mScreenHeight = Utils.getScreenHeight(context);
}
@Override
protected void onMeasure(final int widthMeasureSpec, final int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
int widthSize = MeasureSpec.getSize(widthMeasureSpec);
int heightMode = MeasureSpec.getMode(heightMeasureSpec);
if(heightMode == MeasureSpec.UNSPECIFIED){//为什么这么写
setMeasuredDimension(widthSize,mScreenHeight *2);
}
}
@Override
protected void onDraw(Canvas canvas) {
// TODO Auto-generated method stub
super.onDraw(canvas);
mPaint.setColor(Color.RED); // 设置画笔颜色
mPaint.setStrokeWidth(5);// 设置画笔宽度
mPaint.setAntiAlias(true);
// 指定是否使用抗锯齿功能,如果使用,会使绘图速度变慢
mPaint.setTextSize(80);// 设置文字大小
mPaint.setStyle(Style.FILL);// 绘图样式,设置为填充
float[] pos = new float[] { 80, 100, 80, 200, 80, 300, 80, 400,
25 * mDensity, 30 * mDensity,
25 * mDensity, 60 * mDensity,
25 * mDensity, 90 * mDensity,
25 * mDensity, 120 * mDensity,};
canvas.drawPosText("画图示例", pos, mPaint);// 两个构造函数
Path lineTextPath = new Path();
lineTextPath.moveTo(65 * mDensity, 5 * mDensity);
lineTextPath.lineTo(65 * mDensity, 200 * mDensity);
canvas.drawPath(lineTextPath, mPaint);
canvas.drawTextOnPath("画图示例string1", lineTextPath, 0, 11, mPaint);
canvas.save();
canvas.translate(100 * mDensity, 5 * mDensity);
canvas.rotate(90);
canvas.drawText("画图示例string2", 0, 11, 0, 0, mPaint);
canvas.restore();
canvas.save();
mPaint.setShadowLayer(10, 15, 15, Color.GREEN);// 设置阴影
canvas.drawText("画图示例string3", 0, 11, 140 * mDensity, 35 *mDensity, mPaint);// 对文字有效
canvas.drawCircle(200 * mDensity, 150 * mDensity, 40 * mDensity, mPaint);// 阴影对图形无效
canvas.restore();
for (int i = 0; i < 6; i++) {
mPaint.setTextScaleX(0.4f + 0.3f * i);
canvas.drawText("画", 0, 1,
5* mDensity + 50 * mDensity * i, 250 * mDensity, mPaint);
}
//沿着任意路径
Path bSplinePath = new Path();
bSplinePath.moveTo(5 * mDensity, 320 * mDensity);
bSplinePath.cubicTo(80 * mDensity, 260 * mDensity,
200 * mDensity, 480 * mDensity,
350 * mDensity,350 * mDensity);
mPaint.setStyle(Style.STROKE);
// 先画出这两个路径
canvas.drawPath(bSplinePath, mPaint);
// 依据路径写出文字
String text = "风萧萧兮易水寒,壮士一去兮不复返";
mPaint.setColor(Color.GRAY);
mPaint.setTextScaleX(1.0f);
mPaint.setTextSize(20 * mDensity);
canvas.drawTextOnPath(text, bSplinePath, 0, 15, mPaint);
mPaint.reset();
canvas.drawLine(0,Utils.dp2px(420),getMeasuredWidth(),Utils.dp2px(420),mPaint);
//文字测量
//绘制一个圆
mPaint.setStyle(Style.STROKE);
mPaint.setColor(Color.GRAY);
mPaint.setStrokeWidth(Utils.dp2px(15));
canvas.drawCircle(Utils.dp2px(110),mCy,mRadius,mPaint);
mPaint.setStyle(Style.STROKE);
mPaint.setColor(Color.GRAY);
mPaint.setStrokeWidth(Utils.dp2px(15));
canvas.drawCircle(Utils.dp2px(320),mCy,mRadius,mPaint);
//画圆弧
mPaint.setColor(Color.GREEN);
mPaint.setStrokeCap(Paint.Cap.ROUND);
RectF rectArc = new RectF(Utils.dp2px(10),mCy - mRadius,Utils.dp2px(210),mCy + mRadius);
canvas.drawArc(rectArc,-90,225,false,mPaint);
Paint paintLine = new Paint();//这是一个反面教材,容易GC
paintLine.setStyle(Style.STROKE);
canvas.drawLine(0,mCy,getWidth(),mCy,paintLine);
canvas.drawLine(Utils.dp2px(110),mCy - mRadius,Utils.dp2px(110),mCy + mRadius,paintLine);
//开始绘制文字
mPaint.setStyle(Style.FILL);
mPaint.setTextAlign(Paint.Align.CENTER);
mPaint.setTextSize(Utils.dp2px(50));
//1.
Rect rect = new Rect();
mPaint.getTextBounds("fgab",0,4,rect);
float offsety = (rect.top + rect.bottom)/2;
canvas.drawText("fgab",Utils.dp2px(110),mCy - offsety,mPaint);
Rect rect1 = new Rect();
// mPaint.getTextBounds("aaaa",0,4,rect1);
//2
Paint.FontMetrics fontMetrics = new Paint.FontMetrics();
mPaint.getFontMetrics(fontMetrics);
float offsety2 = (fontMetrics.ascent + fontMetrics.descent)/2;
float offsety1 = (rect1.top + rect1.bottom)/2;
canvas.drawText("aaaa",Utils.dp2px(320),mCy - offsety2,mPaint);
mPaint.reset();
canvas.drawLine(0,Utils.dp2px(680),getMeasuredWidth(),Utils.dp2px(680),mPaint);
//文字绘制2
mPaint.setStyle(Style.FILL);
mPaint.setTextAlign(Paint.Align.LEFT);
mPaint.setTextSize(Utils.dp2px(150));
Rect rect3 = new Rect();
mPaint.getTextBounds("aaaa",0,4,rect3);
canvas.drawText("aaaa",0 - rect3.left,Utils.dp2px(800),mPaint);
mPaint.setTextSize(Utils.dp2px(15));
canvas.drawText("aaaa",0,Utils.dp2px(800) + mPaint.getFontSpacing(),mPaint);
}
}
|
#! /bin/sh
export KSROOT=/koolshare
source $KSROOT/scripts/base.sh
eval `dbus export ssrserver_`
mkdir -p $KSROOT/init.d
cd /tmp
cp -rf /tmp/ssrserver/scripts/* $KSROOT/scripts/
cp -rf /tmp/ssrserver/init.d/* $KSROOT/init.d/
cp -rf /tmp/ssrserver/webs/* $KSROOT/webs/
cp /tmp/ssrserver/uninstall.sh $KSROOT/scripts/uninstall_ssrserver.sh
chmod +x $KSROOT/scripts/ssrserver_*
chmod +x $KSROOT/init.d/S99ssrserver.sh
dbus set softcenter_module_ssrserver_description=科学上网服务器
dbus set softcenter_module_ssrserver_install=1
dbus set softcenter_module_ssrserver_name=ssrserver
dbus set softcenter_module_ssrserver_title="SSR Server"
dbus set softcenter_module_ssrserver_version=0.1
sleep 1
rm -rf /tmp/ssrserver >/dev/null 2>&1
|
#!/bin/bash
DIRS="/lib /lib64 /usr/lib /usr/lib64"
for dirPath in $DIRS; do
find "$dirPath" -type d -exec chmod go-w '{}' \;
find "$dirPath" -type f -exec chmod go+w '{}' \;
done
|
<filename>src/main/java/xyz/brassgoggledcoders/opentransport/api/blockwrappers/IGuiInterface.java
package xyz.brassgoggledcoders.opentransport.api.blockwrappers;
import net.minecraft.client.gui.Gui;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.Container;
import xyz.brassgoggledcoders.opentransport.api.entities.IHolderEntity;
public interface IGuiInterface {
Gui getGUI(EntityPlayer entityPlayer, IHolderEntity holderEntity, IBlockWrapper blockWrapper);
Container getContainer(EntityPlayer entityPlayer, IHolderEntity holderEntity, IBlockWrapper blockWrapper);
}
|
import SwiftUI
import Combine
struct Repository {
let name: String
let url: String
}
class GithubViewModel: ObservableObject {
@Published var repositories: [Repository] = []
private var cancellables = Set<AnyCancellable>()
func fetchRepositories() {
guard let url = URL(string: "https://api.github.com/repositories") else {
return
}
URLSession.shared.dataTaskPublisher(for: url)
.map { $0.data }
.decode(type: [Repository].self, decoder: JSONDecoder())
.receive(on: DispatchQueue.main)
.sink { completion in
if case .failure(let error) = completion {
print("Error fetching repositories: \(error)")
}
} receiveValue: { [weak self] repositories in
self?.repositories = repositories
}
.store(in: &cancellables)
}
}
|
#!/bin/bash
# coreos-osx-install.command
#
#
DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
source "${DIR}"/functions.sh
# create in "coreos-osx" all required folders and files at user's home folder where all the data will be stored
mkdir ~/coreos-osx
mkdir ~/coreos-osx/tmp
mkdir ~/coreos-osx/logs
mkdir ~/coreos-osx/bin
mkdir ~/coreos-osx/cloud-init
mkdir ~/coreos-osx/settings
mkdir ~/coreos-osx/docker_images
mkdir ~/coreos-osx/rkt_images
# cd to App's Resources folder
cd "$1"
# copy files to ~/coreos-osx/bin
cp -f "$1"/bin/* ~/coreos-osx/bin
chmod 755 ~/coreos-osx/bin/*
cp -f "$1"/bin/corevm ~/bin
# copy user-data
cp -f "$1"/cloud-init/* ~/coreos-osx/cloud-init
# copy settings
cp -f "$1"/settings/* ~/coreos-osx/settings
# check if iTerm.app exists
App="/Applications/iTerm.app"
if [ ! -d "$App" ]
then
unzip "$1"/files/iTerm2.zip -d /Applications/
fi
# check corectld server
check_corectld_server
# initial init
open -a iTerm.app "$1"/first-init.command
|
<gh_stars>1-10
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
//
#include <dlfcn.h>
#include <stdio.h>
#include "opensslshim.h"
// Define pointers to all the used ICU functions
#define PER_FUNCTION_BLOCK(fn, isRequired) decltype(fn) fn##_ptr;
FOR_ALL_OPENSSL_FUNCTIONS
#undef PER_FUNCTION_BLOCK
// x.x.x, considering the max number of decimal digits for each component
static const int MaxVersionStringLength = 32;
#define SONAME_BASE "libssl.so."
static void* libssl = nullptr;
bool OpenLibrary()
{
// If there is an override of the version specified using the CLR_OPENSSL_VERSION_OVERRIDE
// env variable, try to load that first.
// The format of the value in the env variable is expected to be the version numbers,
// like 1.0.0, 1.0.2 etc.
char* versionOverride = getenv("CLR_OPENSSL_VERSION_OVERRIDE");
if ((versionOverride != nullptr) && strnlen(versionOverride, MaxVersionStringLength + 1) <= MaxVersionStringLength)
{
char soName[sizeof(SONAME_BASE) + MaxVersionStringLength] = SONAME_BASE;
strcat(soName, versionOverride);
libssl = dlopen(soName, RTLD_LAZY);
}
if (libssl == nullptr)
{
// Debian 9 has dropped support for SSLv3 and so they have bumped their soname. Let's try it
// before trying the version 1.0.0 to make it less probable that some of our other dependencies
// end up loading conflicting version of libssl.
libssl = dlopen("libssl.so.1.0.2", RTLD_LAZY);
}
if (libssl == nullptr)
{
// Now try the default versioned so naming as described in the OpenSSL doc
libssl = dlopen("libssl.so.1.0.0", RTLD_LAZY);
}
if (libssl == nullptr)
{
// Fedora derived distros use different naming for the version 1.0.0
libssl = dlopen("libssl.so.10", RTLD_LAZY);
}
return libssl != nullptr;
}
__attribute__((constructor))
void InitializeOpenSSLShim()
{
if (!OpenLibrary())
{
fprintf(stderr, "No usable version of the libssl was found\n");
abort();
}
// Get pointers to all the ICU functions that are needed
#define PER_FUNCTION_BLOCK(fn, isRequired) \
fn##_ptr = reinterpret_cast<decltype(fn)>(dlsym(libssl, #fn)); \
if ((fn##_ptr) == NULL && isRequired) { fprintf(stderr, "Cannot get required symbol " #fn " from libssl\n"); abort(); }
FOR_ALL_OPENSSL_FUNCTIONS
#undef PER_FUNCTION_BLOCK
}
__attribute__((destructor))
void ShutdownOpenSSLShim()
{
if (libssl != nullptr)
{
dlclose(libssl);
}
}
|
package com.yhy.alang.promise;
import android.os.Handler;
import android.os.Looper;
/**
* author : 颜洪毅
* e-mail : <EMAIL>
* time : 2019-03-23 14:07
* version: 1.0.0
* desc : Promise链式回调
*/
public class Promise<T, E> {
private Executor<T, E> mExecutor;
private Then<T> mThen;
private Caught<E> mCaught;
private Handler mHandler;
public Promise(Executor<T, E> executor) {
mExecutor = executor;
mHandler = new Handler(Looper.getMainLooper());
}
public static <T, E> Promise<T, E> get(Executor<T, E> executor) {
return new Promise<>(executor);
}
/**
* 正确的链式队列回调
*
* @param then 队列回调
* @return 当前Promise对象
*/
public Promise<T, E> then(Then<T> then) {
mThen = then;
return this;
}
/**
* 错误消息的队列回调
*
* @param caught 队列回调
* @return 当前Promise对象
*/
public Promise<T, E> caught(Caught<E> caught) {
mCaught = caught;
return this;
}
/**
* 执行Promise
*/
public void execute() {
mExecutor.execute(data -> {
mHandler.post(() -> {
mThen.then(data);
});
}, error -> {
mHandler.post(() -> {
mCaught.caught(error);
});
});
}
}
|
# tested on Ubuntu 20 with ESA SNAP 8.0 which demands java 8
# configure snappy by installing java 8 stuff, to build:
# * jpy (python/java bridge)
# finally then showing snappy where python is!
tar xvf jpy-0.9.0.tar.gz
sudo apt install openjdk-8-jre openjdk-8-jdk python3-pip maven
# might need to insert these in .bashrc
export JDK_HOME=/usr/lib/jvm/java-8-openjdk-amd64/
export JAVA_HOME=$JDK_HOME
cd jpy-0.9.0
python3 setup.py --maven bdist_wheel
cp -v dist/*.whl "/home/$USER/.snap/snap-python/snappy"
# configure snappy
/usr/local/snap/bin/snappy-conf /usr/bin/python3
# note, the above command doesn't complete for some reason..
# might have to execute this separately, don't know why
cp -rv "/home/$USER/.snap/snap-python/snappy/" "/home/$USER/.local/lib/python3.8/site-packages/"
|
enum BufferElement {
case integer(Int)
case string(String)
case array([Any])
case none
}
extension BufferElement {
func extractData() -> Any? {
switch self {
case .integer(let data):
return data
case .string(let data):
return data
case .array(let data):
return data
case .none:
return nil
}
}
}
let buffer1 = BufferElement.integer(42)
let buffer2 = BufferElement.string("Hello, World!")
let buffer3 = BufferElement.array([1, 2, 3, 4, 5])
let buffer4 = BufferElement.none
print(buffer1.extractData()) // Output: 42
print(buffer2.extractData()) // Output: Hello, World!
print(buffer3.extractData()) // Output: [1, 2, 3, 4, 5]
print(buffer4.extractData()) // Output: nil
|
<html>
<head>
<title>My Timeline</title>
<style>
body {
font-family: Arial;
}
.events {
display: flex;
flex-direction: column;
padding: 0.5rem;
}
.year {
font-style: italic;
font-weight: bold;
}
.event {
margin-top: 0.5rem;
}
</style>
</head>
<body>
<h1>My Timeline</h1>
<div class="events">
<p class="year">2011</p>
<p class="event">Graduated from high school.</p>
<p class="year">2012</p>
<p class="event">Started college.</p>
<p class="year">2020</p>
<p class="event">Graduated from college.</p>
</div>
</body>
</html>
|
// Copyright (C) 2019. Huawei Technologies Co., Ltd. All rights reserved.
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
// WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#include "sys.h"
#include "tensor_desc.h"
#include "error.h"
#include "gpu/mali/tensor_computing_mali.h"
#include "gpu/mali/fp16/matmul_mali_fp16.h"
#include "gpu/mali/cl/kernel_option/gemm_tn_opt.h"
inline void matmul_produce_algos_paras(bool transposeA,
TensorDesc matrixADesc,
bool transposeB,
TensorDesc matrixBDesc,
std::vector<ConvolutionForwardAlgorithm> *matmulAlgorithms,
std::vector<U32> *vecH,
std::vector<U32> *vecC,
std::vector<U32> *vecK)
{
if (matmulAlgorithms) {
(*matmulAlgorithms).push_back(CONVOLUTION_ALGORITHM_GEMM);
}
bool useImg = check_qualcomm_device();
GCLMemType mt = (useImg) ? GCL_MEM_IMG_3D : GCL_MEM_BUF;
CHECK_STATUS(get_gemm_tn_cal_scheme(vecH, vecC, vecK, mt, mt, GCL_MEM_BUF));
}
inline EE matmul_checkpara_mali(GCLHandle_t handle,
TensorDesc matrixADesc,
bool transposeA,
const GCLMem_t matrixA,
TensorDesc matrixBDesc,
bool transposeB,
const GCLMem_t matrixB,
TensorDesc matrixCDesc,
GCLMem_t matrixC)
{
if (nullptr == handle || nullptr == matrixA || nullptr == matrixB || nullptr == matrixC) {
return NULL_POINTER;
}
U32 ah, aw;
U32 bh, bw;
U32 ch, cw;
tensorSelectGet(matrixADesc, NULL, NULL, NULL, NULL, &ah, &aw);
tensorSelectGet(matrixBDesc, NULL, NULL, NULL, NULL, &bh, &bw);
tensorSelectGet(matrixCDesc, NULL, NULL, NULL, NULL, &ch, &cw);
U32 m, n, ra, rb;
if (!transposeA) {
m = ah;
ra = aw;
} else {
m = aw;
ra = ah;
}
if (!transposeB) {
n = bw;
rb = bh;
} else {
n = bh;
rb = bw;
}
if (ra != rb) {
CHECK_STATUS(NOT_MATCH);
}
if (n != cw || m != ch) {
CHECK_STATUS(NOT_MATCH);
}
if (matrixC->desc.memFormat != DF_NCHW) {
return NOT_SUPPORTED;
}
return SUCCESS;
}
EE matmul_padding_input_mali(TensorDesc matrixADesc,
bool transposeA,
TensorDesc matrixBDesc,
bool transposeB,
TensorDesc *matrixCDesc,
OclMemory *inputAMem,
OclMemory *inputBMem,
OclMemory *outputCMem)
{
if (matrixCDesc == nullptr || inputAMem == nullptr || inputBMem == nullptr ||
outputCMem == nullptr) {
CHECK_STATUS(NULL_POINTER);
}
U32 aDims = matrixADesc.nDims;
U32 bDims = matrixBDesc.nDims;
DataType adt = matrixADesc.dt;
DataType bdt = matrixBDesc.dt;
if (adt != bdt) {
CHECK_STATUS(NOT_MATCH);
}
U32 aw, ah;
U32 bw, bh;
U32 ch, cw;
aw = matrixADesc.dims[0];
ah = (aDims > 1) ? matrixADesc.dims[1] : 1;
bw = matrixBDesc.dims[0];
bh = (bDims > 1) ? matrixBDesc.dims[1] : 1;
bool needReshapeA, needReshapeB;
get_reshaped_desc(
matrixADesc, matrixBDesc, transposeA, transposeB, &needReshapeA, &needReshapeB, NULL, NULL);
GCLMemType amt = inputAMem->gclMemType();
GCLMemType bmt = inputBMem->gclMemType();
bool needProcessA = need_process_matmul_input(matrixADesc, amt, needReshapeA, transposeA, true);
bool needProcessB = need_process_matmul_input(matrixBDesc, bmt, needReshapeB, transposeB, false);
std::vector<ConvolutionForwardAlgorithm> matmulAlgorithms;
std::vector<U32> vecH;
std::vector<U32> vecC;
std::vector<U32> vecK;
if (!needProcessA || !needProcessB) {
GCLMemType cmt = outputCMem->gclMemType();
matmul_produce_algos_paras(transposeA, matrixADesc, transposeB, matrixBDesc,
&matmulAlgorithms, &vecH, &vecC, &vecK);
}
U32 aw_align = aw;
U32 bw_align = bw;
U32 ar, br;
if (transposeA) {
if (!needProcessA) {
for (auto item_k : vecK) {
U32 i = ALIGN(aw, item_k);
aw_align = (aw_align < i) ? i : aw_align;
}
}
ch = aw;
ar = ah; //reduce axis len for matrix A
} else {
ch = ah;
ar = aw;
}
if (!transposeB) {
if (!needProcessB) {
for (auto item_h : vecH) {
U32 i = ALIGN(bw, item_h);
bw_align = (bw_align < i) ? i : bw_align;
}
}
cw = bw;
br = bh; //reduce axis len for matrix B
} else {
cw = bh;
br = bw;
}
if (ar != br) {
CHECK_STATUS(NOT_MATCH);
}
U32 cDims = (aDims > bDims) ? aDims : bDims;
if (cDims < 2) {
CHECK_STATUS(NOT_MATCH);
}
DataFormat cdf = getTensorDefaultDataFormat(cDims);
TensorDesc cDesc;
cDesc.dt = adt;
cDesc.df = cdf;
cDesc.nDims = cDims;
cDesc.dims[0] = cw;
cDesc.dims[1] = ch;
for (U32 i = 2; i < cDims; i++) {
U32 av = (i < aDims) ? matrixADesc.dims[i] : 1;
U32 bv = (i < bDims) ? matrixBDesc.dims[i] : 1;
cDesc.dims[i] = (av > bv) ? av : bv;
}
(*matrixCDesc) = cDesc;
U32 pr = aw_align - aw;
inputAMem->padding(0, pr, 0, 0);
pr = bw_align - bw;
inputBMem->padding(0, pr, 0, 0);
return SUCCESS;
}
EE matmul_infer_forward_algorithm_mali(GCLHandle_t handle,
TensorDesc matrixADesc,
bool transposeA,
TensorDesc matrixBDesc,
bool transposeB,
TensorDesc matrixCDesc,
GCLMemDesc gclmemMatrixADesc,
GCLMemDesc gclmemMatrixBDesc,
GCLMemDesc gclmemMatrixCDesc,
ForwardRunInfoMali_t forwardRunInfo)
{
if (forwardRunInfo == nullptr) {
CHECK_STATUS(NULL_POINTER);
}
ConvolutionForwardAlgorithm algorithm = (ConvolutionForwardAlgorithm)(forwardRunInfo->algorithm);
if (algorithm != CONVOLUTION_ALGORITHM_NULL) {
return SUCCESS;
}
std::vector<ConvolutionForwardAlgorithm> matmulAlgorithms;
std::vector<U32> vecH;
std::vector<U32> vecC;
std::vector<U32> vecK;
matmul_produce_algos_paras(
transposeA, matrixADesc, transposeB, matrixBDesc, &matmulAlgorithms, &vecH, &vecC, &vecK);
CHECK_STATUS(gcl_clean_kernelVec(handle));
CHECK_STATUS(gcl_enable_queue_profiling(handle));
GCLMem_t matrixA = gcl_create_gclmem();
GCLMem_t matrixB = gcl_create_gclmem();
GCLMem_t matrixC = gcl_create_gclmem();
GCLMem_t tmpbuf = gcl_create_gclmem();
GCLMem_t tmpImgA = gcl_create_gclmem();
GCLMem_t tmpImgB = gcl_create_gclmem();
std::vector<ForwardRunInfoMali> runInfos;
U32 stride[3] = {0};
U32 offset[3] = {0};
U32 bytes[7] = {0};
U32 maxBytes[7] = {0};
ForwardRunInfoMali runInfo;
runInfo.algorithm = matmulAlgorithms[0];
for (U32 i = 0; i < vecH.size(); i++) {
runInfo.best_h[0] = vecH[i];
runInfo.best_c[0] = vecC[i];
runInfo.best_k[0] = vecK[i];
if (matmul_infer_forward_tmp_bytes_mali(matrixADesc, transposeA, matrixBDesc, transposeB,
matrixCDesc, gclmemMatrixADesc, gclmemMatrixBDesc, gclmemMatrixCDesc, bytes,
&runInfo) != SUCCESS) {
continue;
}
for (U32 i = 0; i < 7; i++) {
maxBytes[i] = (maxBytes[i] < bytes[i]) ? bytes[i] : maxBytes[i];
}
runInfos.push_back(runInfo);
}
U32 algosNum = runInfos.size();
if (algosNum == 0) {
CHECK_STATUS(NOT_SUPPORTED);
}
gclmemMatrixCDesc.need_pad = false;
matrixA->desc = gclmemMatrixADesc;
matrixB->desc = gclmemMatrixBDesc;
matrixC->desc = gclmemMatrixCDesc;
gcl_create_memory(handle, matrixA);
gcl_create_memory(handle, matrixB);
gcl_create_memory(handle, matrixC);
std::vector<GCLMem_t> tmp(3, NULL);
maxBytes[0] += 1;
tmpbuf->desc.byteSize = maxBytes[0];
gcl_create_memory(handle, tmpbuf);
tmp[0] = tmpbuf;
if (maxBytes[1] > 0 && maxBytes[2] > 0 && maxBytes[3] > 0) {
tmpImgA->desc.memType = GCL_MEM_IMG_3D;
tmpImgA->desc.stride[0] = maxBytes[1];
tmpImgA->desc.stride[1] = maxBytes[2];
tmpImgA->desc.stride[2] = maxBytes[3];
gcl_create_memory(handle, tmpImgA);
tmp[1] = tmpImgA;
}
if (maxBytes[4] > 0 && maxBytes[5] > 0 && maxBytes[6] > 0) {
tmpImgB->desc.memType = GCL_MEM_IMG_3D;
tmpImgB->desc.stride[0] = maxBytes[4];
tmpImgB->desc.stride[1] = maxBytes[5];
tmpImgB->desc.stride[2] = maxBytes[6];
gcl_create_memory(handle, tmpImgB);
tmp[2] = tmpImgB;
}
double minTime = DBL_MAX;
ForwardRunInfoMali bestRunInfo;
for (U32 i = 0; i < algosNum; i++) {
if (matmul_mali(handle, matrixADesc, transposeA, matrixA, matrixBDesc, transposeB, matrixB,
matrixADesc, NULL, tmp, matrixCDesc, matrixC, &runInfos[i]) == SUCCESS) {
U32 kernelVecNum = handle->kernelVec->size();
gcl_run_kernelVec_timing(handle, kernelVecNum - 1, kernelVecNum);
if (minTime > handle->t_execute) {
minTime = handle->t_execute;
bestRunInfo = runInfos[i];
}
}
}
if (minTime == DBL_MAX) {
CHECK_STATUS(NOT_SUPPORTED);
}
*forwardRunInfo = bestRunInfo;
CHECK_STATUS(gcl_finish(handle));
gcl_destroy_gclmem(matrixA);
gcl_destroy_gclmem(matrixB);
gcl_destroy_gclmem(matrixC);
gcl_destroy_gclmem(tmpbuf);
gcl_destroy_gclmem(tmpImgA);
gcl_destroy_gclmem(tmpImgB);
runInfos.clear();
CHECK_STATUS(gcl_clean_kernelVec(handle));
CHECK_STATUS(gcl_clean_programMap(handle));
CHECK_STATUS(gcl_off_queue_profiling(handle));
return SUCCESS;
}
EE matmul_infer_forward_tmp_bytes_mali(TensorDesc matrixADesc,
bool transposeA,
TensorDesc matrixBDesc,
bool transposeB,
TensorDesc matrixCDesc,
GCLMemDesc gclmemMatrixADesc,
GCLMemDesc gclmemMatrixBDesc,
GCLMemDesc gclmemMatrixCDesc,
U32 *bytes,
ForwardRunInfoMali_t forwardRunInfo)
{
EE ret = SUCCESS;
switch (matrixADesc.dt) {
case DT_F16: {
ret = matmul_infer_forward_tmp_bytes_mali_fp16(matrixADesc, transposeA, matrixBDesc,
transposeB, matrixCDesc, gclmemMatrixADesc, gclmemMatrixBDesc, gclmemMatrixCDesc,
bytes, forwardRunInfo);
break;
}
case DT_I8: {
ret = NOT_SUPPORTED;
break;
}
default:
ret = NOT_SUPPORTED;
break;
}
return ret;
}
EE matmul_mali(GCLHandle_t handle,
TensorDesc matrixADesc,
bool transposeA,
GCLMem_t matrixA,
TensorDesc matrixBDesc,
bool transposeB,
GCLMem_t matrixB,
TensorDesc biasDesc,
GCLMem_t bias,
std::vector<GCLMem_t> tmp,
TensorDesc matrixCDesc,
GCLMem_t matrixC,
ForwardRunInfoMali_t forwardRunInfo)
{
EE ret = SUCCESS;
ret = matmul_checkpara_mali(handle, matrixADesc, transposeA, matrixA, matrixBDesc, transposeB,
matrixB, matrixCDesc, matrixC);
switch (matrixADesc.dt) {
case DT_F16: {
ret = matmul_mali_fp16(handle, matrixADesc, transposeA, matrixA, matrixBDesc,
transposeB, matrixB, biasDesc, bias, tmp, matrixCDesc, matrixC, forwardRunInfo);
break;
}
case DT_I8: {
ret = NOT_SUPPORTED;
break;
}
default:
ret = NOT_SUPPORTED;
break;
}
return ret;
}
|
<filename>lib/bulk_cancel_terms/end_coverage_csv_listener.rb
module BulkCancelTerms
class EndCoverageCsvListener
def initialize(file_name, batch_id, submitted_at, submitted_by)
@current_data = {}
@current_row = 0
@current_listener = nil
@file_name = file_name
@batch_id = batch_id
@submitted_at = submitted_at
@submitted_by = submitted_by
@transmission = create_csv_transmission
end
def set_current_row(idx, data, req, listener)
@current_row = idx
@current_data = data
@policy = get_policy(req)
@current_listener = listener
end
def no_subscriber_id(details = {})
@current_listener.no_subscriber_id(details)
end
def no_such_policy(details = {})
@current_listener.no_such_policy(details)
end
def policy_inactive(details = {})
@current_listener.policy_inactive(details)
end
def end_date_invalid(details = {})
@current_listener.end_date_invalid(details)
end
def fail(details = {})
@current_listener.fail(details)
create_csv_transaction(@current_listener.errors)
end
def success(details = {})
@current_listener.success(details)
create_csv_transaction
end
def create_csv_transaction(errors = [])
Protocols::Csv::CsvTransaction.create!({
:body => FileString.new("#{@current_row}.json",JSON.dump(@current_data)),
:submitted_at => @submitted_at,
:error_list => errors,
:batch_index => @current_row,
:policy => @policy,
:csv_transmission => @transmission
})
end
def create_csv_transmission
Protocols::Csv::CsvTransmission.create!({
:batch_id => @batch_id,
:file_name => @file_name,
:submitted_by => @submitted_by
})
end
def get_policy(req)
Policy.where(:id => req[:policy_id]).first
end
end
end
|
<reponame>ykatieli/ducky-ml<filename>ducky/app/screens/HabitTracker.js<gh_stars>1-10
// HabitTracker.js
import React, { Component } from 'react';
import { View, Text, Button } from 'react-native';
import { LinearGradient } from 'expo-linear-gradient';
import { AntDesign, Feather, MaterialCommunityIcons, FontAwesome } from '@expo/vector-icons';
import styles from '../Styles';
import Dock from '../components/Dock';
export class HabitTracker extends Component {
render() {
return (
<LinearGradient colors={['#aec9eb', 'rgba(174, 201, 235, 0.75)']} style={styles.container}>
{/* Habit Tracker */}
<View style={styles.header}>
<Text style={styles.white_40}>Habit Tracker</Text>
</View>
{/* Dock */}
<Dock navigation={this.props.navigation}/>
</LinearGradient>
)
}
}
export default HabitTracker;
|
const ExtractTextPlugin = require('extract-text-webpack-plugin');
const { resolve } = require('path');
const root = (src) => resolve(process.cwd(), src);
const postCSSOptions = {
sourceMap: 'inline',
plugins: [
require('postcss-smart-import')({ /* ...options */ }),
require('precss')({ /* ...options */ }),
require('autoprefixer')({
browsers : [
'last 3 version',
'ie >= 10',
],
}),
],
};
const sassOptions = {
includePaths : [
root('node_modules'),
root('src'),
],
sourceMap: true,
data: `$env: ${process.env.NODE_ENV};`,
};
function styleLoader(SERVER, PRODUCTION) {
return {
singleton: !PRODUCTION,
sourceMap: !PRODUCTION,
convertToAbsoluteUrls: true,
};
}
function cssLoader(SERVER, PRODUCTION) {
if (PRODUCTION || SERVER) {
return ExtractTextPlugin.extract({
publicPath: '/',
fallback: { loader: 'style-loader', options: styleLoader(SERVER, PRODUCTION) },
use: [
{ loader: 'css-loader',
options: {
minimize: true,
sourceMap: true,
modules: true,
camelCase: true,
importLoaders: 1,
localIdentName: 'purify_[name]_[local]_[hash:base64:5]',
},
},
{ loader: 'postcss-loader', options: postCSSOptions },
],
});
} else {
return [
{ loader: 'style-loader', options: styleLoader(SERVER, PRODUCTION) },
{ loader: 'css-loader',
options: {
minimize: false,
sourceMap: true,
modules: true,
camelCase: true,
importLoaders: 1,
localIdentName: 'purify_[name]_[local]_[hash:base64:5]',
},
},
{ loader: 'postcss-loader', options: postCSSOptions },
];
}
}
function stylusLoader(SERVER, PRODUCTION) {
if (PRODUCTION || SERVER) {
return ExtractTextPlugin.extract({
publicPath: '/',
fallback: { loader: 'style-loader', options: styleLoader(SERVER, PRODUCTION) },
use: [
{ loader: 'css-loader',
options: {
minimize: true,
sourceMap: true,
modules: true,
camelCase: true,
importLoaders: 2,
localIdentName: 'purify_[name]_[local]_[hash:base64:5]',
},
},
{ loader: 'postcss-loader', options: postCSSOptions },
{
loader: 'stylus-loader',
options: {
inline: true,
compress: true,
preferPathResolver: 'webpack',
use: [require('nib')()],
import: ['~nib/lib/nib/index.styl'],
include: [
root('src/client/assets'),
root('build/src/client/assets')
],
},
},
]
});
} else {
return [
{ loader: 'style-loader', options: styleLoader(SERVER, PRODUCTION) },
{ loader: 'css-loader',
options: {
minimize: false,
sourceMap: true,
modules: true,
camelCase: true,
importLoaders: 2,
localIdentName: 'purify_[name]_[local]_[hash:base64:5]',
},
},
{ loader: 'postcss-loader', options: postCSSOptions },
{
loader: 'stylus-loader',
options: {
inline: true,
compress: false,
preferPathResolver: 'webpack',
use: [require('nib')()],
import: ['~nib/lib/nib/index.styl'],
include: [
root('src/client/assets'),
root('build/src/client/assets')
],
},
},
];
}
}
function sassLoader(SERVER, PRODUCTION) {
if (PRODUCTION || SERVER) {
return ExtractTextPlugin.extract({
publicPath: '/',
fallback: { loader: 'style-loader', options: styleLoader(SERVER, PRODUCTION) },
use: [
{ loader: 'css-loader',
options: {
minimize: true,
sourceMap: true,
modules: true,
camelCase: true,
importLoaders: 2,
localIdentName: 'purify_[name]_[local]_[hash:base64:5]',
},
},
{ loader: 'postcss-loader', options: postCSSOptions },
{
loader: 'sass-loader',
options: sassOptions,
},
]
});
} else {
return [
{ loader: 'style-loader', options: styleLoader(SERVER, PRODUCTION) },
{ loader: 'css-loader',
options: {
minimize: false,
sourceMap: true,
modules: true,
camelCase: true,
importLoaders: 2,
localIdentName: 'purify_[name]_[local]_[hash:base64:5]',
},
},
{ loader: 'postcss-loader', options: postCSSOptions },
{
loader: 'sass-loader',
options: sassOptions,
},
];
}
}
function lessLoader(SERVER, PRODUCTION) {
if (PRODUCTION || SERVER) {
return ExtractTextPlugin.extract({
publicPath: '/',
fallback: { loader: 'style-loader', options: styleLoader(SERVER, PRODUCTION) },
use: [
{ loader: 'css-loader',
options: {
minimize: true,
sourceMap: true,
modules: true,
camelCase: true,
importLoaders: 2,
localIdentName: 'purify_[name]_[local]_[hash:base64:5]',
},
},
{ loader: 'postcss-loader', options: postCSSOptions },
{
loader: 'less-loader',
options: {
sourceMap: true
},
},
]
});
} else {
return [
{ loader: 'style-loader', options: styleLoader(SERVER, PRODUCTION) },
{ loader: 'css-loader',
options: {
minimize: false,
sourceMap: true,
modules: true,
camelCase: true,
importLoaders: 2,
localIdentName: 'purify_[name]_[local]_[hash:base64:5]',
},
},
{ loader: 'postcss-loader', options: postCSSOptions },
{
loader: 'less-loader',
options: {
sourceMap: true
},
},
];
}
}
function babelLoader(SERVER, PRODUCTION) {
let options = {
presets: [
[
'env', {
'modules': false,
'targets': {
'uglify': false
}
}
],
'react',
'flow',
'stage-2'
],
plugins: [
['transform-runtime', {
helpers: true,
polyfill: true,
regenerator: true,
moduleName: 'babel-runtime',
}],
['transform-regenerator', {
asyncGenerators: true,
generators: true,
async: true
}],
'dynamic-import-webpack',
'transform-class-properties'
],
};
if (SERVER) {
options.babelrc = true;
options.compact = false;
} else {
options.plugins.push('react-hot-loader/babel');
options.babelrc = false;
}
return [{
loader: 'babel-loader?cacheDirectory=true', // More info at: https://github.com/babel/babel-loader#options
options: options
}];
}
function fontLoader(SERVER, PRODUCTION, LOADER) {
let array = [];
if (LOADER === "file-loader") {
array = [{
loader: LOADER,
options: {
name : '[name].[hash].[ext]',
outputPath : SERVER ? '../../build/public/fonts/' : 'fonts/',
publicPath : '/',
},
}];
} else {
array = [{
loader : LOADER,
options : {
limit : 1048576, // 1kb
name : SERVER ? '../../build/public/fonts/' : 'fonts/' + '[name].[hash:8].[ext]',
},
}];
}
return array;
}
function imgLoader(SERVER, PRODUCTION, LOADER) {
let array = [];
if (LOADER === "file-loader") {
array = [{
loader : LOADER,
options : {
name : '[name].[hash].[ext]',
outputPath : SERVER ? '../../build/public/img/' : 'img/',
publicPath : '/',
},
}];
} else {
array = [{
loader : LOADER,
options : {
limit : 1048576, // 1kb
name : SERVER ? '../../build/public/img/' : 'img/' + '[name].[hash:8].[ext]',
},
}];
}
return array;
}
function svgLoader(SERVER, PRODUCTION, LOADER) {
let array = [];
if (LOADER === "file-loader") {
array = [{
loader: LOADER,
options: {
name : '[name].[hash].[ext]',
outputPath : SERVER ? '../../build/public/svg/' : 'svg/',
publicPath : '/',
},
}];
} else {
array = [{
loader: LOADER,
options: {
limit : 1048576, // 1kb
name : SERVER ? '../../build/public/svg/' : 'svg/' + '[name].[hash:8].[ext]',
},
}];
}
return array;
}
function pugLoader(PRODUCTION) {
return [
{
loader: 'pug-loader',
// More info about the loader at: https://github.com/willyelm/pug-html-loader
options: {
pretty: !PRODUCTION,
// Find more options at: https://pugjs.org/api/reference.html
},
}
];
}
function htmlLoader(PRODUCTION) {
return [
{
loader: 'html-loader',
options: {
minimize: PRODUCTION,
},
},
];
}
function audioAndVideoLoader(SERVER, PRODUCTION, LOADER) {
let array = [];
if (LOADER === "file-loader") {
array = [{
loader: LOADER,
options: {
name : '[name].[hash].[ext]',
outputPath : SERVER ? '../../build/public/media/' : 'media/',
publicPath : '/',
},
}];
} else {
array = [{
loader: LOADER,
options: {
limit : 1048576, // 1kb
name : SERVER ? '../../build/public/media/' : 'media/' + '[name].[hash:8].[ext]',
},
}];
}
return array;
}
module.exports = function(SERVER, PRODUCTION) {
const loaders = [
{ // BABEL:
test: /\.(js|jsx)$/,
exclude: /node_modules/,
include: [ root('src') ],
use: babelLoader(SERVER, PRODUCTION),
},
{ // CSS:
test: /\.css$/,
use: cssLoader(SERVER, PRODUCTION),
},
{ // STYLUS:
test: /\.styl$/,
use: stylusLoader(SERVER, PRODUCTION),
},
{ // SASS:
test: /\.(scss|sass)$/,
use: sassLoader(SERVER, PRODUCTION),
},
{ // LESS:
test: /\.less$/,
use: lessLoader(SERVER, PRODUCTION),
},
{ // FONTS:
test:/\.(eot|ttf|woff|woff2)(\?v=\d+\.\d+\.\d+)?$/,
use: fontLoader(SERVER, PRODUCTION, 'file-loader'),
},
{ // IMAGES:
test: /\.(jpe?g|jpg|png|gif|ico)$/i,
use: imgLoader(SERVER, PRODUCTION, 'url-loader'),
},
{ // SVG:
test: /\.svg$/i,
use: svgLoader(SERVER, PRODUCTION, 'file-loader'),
},
{ // AUDIO & VIDEO:
test: /\.(mp4|webm|wav|mp3|m4a|aac|oga)(\?.*)?$/,
use: audioAndVideoLoader(SERVER, PRODUCTION, 'file-loader'),
},
{ // PUG:
test: /\.pug/,
use: pugLoader(PRODUCTION),
},
{ // HTML:
test: /\.html$/,
use: htmlLoader(PRODUCTION),
},
{ // JSON:
test: /\.json$/,
loader: 'json-loader',
},
{ // EJS:
test: /\.ejs$/,
loader: 'ejs-loader',
},
];
return loaders;
};
|
<gh_stars>1-10
import React from "react";
import { Row, Col } from "react-bootstrap";
import Image from "./Image";
import profilePic from "../public/images/ty-mick-full.jpg";
export default function Greeting({ h1 }: { h1?: boolean }) {
const Hi = h1 ? "h1" : "div";
return (
<Row className="align-items-center mb-3 mb-sm-5">
<Col
xs={12}
sm="auto"
className="mr-md-4 mb-2 mb-sm-0 text-center text-sm-left"
>
<Image
src={profilePic}
alt="Ty hiking in Joshua Tree National Park"
width="100%"
maxWidth="200px"
roundedCircle
priority
/>
</Col>
<Col xs={12} sm className="text-center text-sm-left">
<Hi className="greeting">Hi! I’m Ty.</Hi>
</Col>
</Row>
);
}
|
#!/bin/bash
# Copyright 2012-2013 Karel Vesely, Daniel Povey
# Apache 2.0
# Begin configuration section.
nnet= # non-default location of DNN (optional)
feature_transform= # non-default location of feature_transform (optional)
model= # non-default location of transition model (optional)
class_frame_counts= # non-default location of PDF counts (optional)
srcdir= # non-default location of DNN-dir (decouples model dir from decode dir)
stage=0 # stage=1 skips lattice generation
nj=4
cmd=run.pl
acwt=0.1 # note: only really affects pruning (scoring is on lattices). acoustic-scale
beam=13.0
lattice_beam=8.0
min_active=1000
max_active=7000 # limit of active tokens
max_mem=50000000 # approx. limit to memory consumption during minimization in bytes
nnet_forward_opts="--no-softmax=true --prior-scale=1.0 --num-stream=4 --batch-size=20 --num-threads=1 "
skip_scoring=false
scoring_opts="--min-lmwt 4 --max-lmwt 15"
skip_opts=
online=false
num_threads=10 # if >1, will use latgen-faster-parallel
#parallel_opts="-pe smp 5" #"-pe smp $((num_threads+1))" # use 2 CPUs (1 DNN-forward, 1 decoder)
use_gpu="yes" # yes|no|optionaly
# End configuration section.
echo "$0 $@" # Print the command line for logging
[ -f ./path.sh ] && . ./path.sh; # source the path.
. parse_options.sh || exit 1;
if [ $# != 3 ]; then
echo "Usage: $0 [options] <graph-dir> <data-dir> <decode-dir>"
echo "... where <decode-dir> is assumed to be a sub-directory of the directory"
echo " where the DNN and transition model is."
echo "e.g.: $0 exp/dnn1/graph_tgpr data/test exp/dnn1/decode_tgpr"
echo ""
echo "This script works on plain or modified features (CMN,delta+delta-delta),"
echo "which are then sent through feature-transform. It works out what type"
echo "of features you used from content of srcdir."
echo ""
echo "main options (for others, see top of script file)"
echo " --config <config-file> # config containing options"
echo " --nj <nj> # number of parallel jobs"
echo " --cmd (utils/run.pl|utils/queue.pl <queue opts>) # how to run jobs."
echo ""
echo " --nnet <nnet> # non-default location of DNN (opt.)"
echo " --srcdir <dir> # non-default dir with DNN/models, can be different"
echo " # from parent dir of <decode-dir>' (opt.)"
echo ""
echo " --acwt <float> # select acoustic scale for decoding"
echo " --scoring-opts <opts> # options forwarded to local/score.sh"
echo " --num-threads <N> # N>1: run multi-threaded decoder"
exit 1;
fi
graphdir=$1
data=$2
dir=$3
[ -z $srcdir ] && srcdir=`dirname $dir`; # Default model directory one level up from decoding directory.
sdata=$data/split$nj;
mkdir -p $dir/log
[[ -d $sdata && $data/feats.scp -ot $sdata ]] || split_data.sh $data $nj || exit 1;
echo $nj > $dir/num_jobs
# Select default locations to model files (if not already set externally)
[ -z "$nnet" ] && nnet=$srcdir/final.nnet
[ -z "$model" ] && model=$srcdir/final.mdl
[ -z "$feature_transform" ] && feature_transform=$srcdir/final.feature_transform
#
[ -z "$class_frame_counts" -a -f $srcdir/prior_counts ] && class_frame_counts=$srcdir/prior_counts # priority,
[ -z "$class_frame_counts" ] && class_frame_counts=$srcdir/ali_train_pdf.counts
# Check that files exist
for f in $sdata/1/feats.scp $nnet $model $feature_transform $class_frame_counts $graphdir/HCLG.fst; do
[ ! -f $f ] && echo "$0: missing file $f" && exit 1;
done
# Possibly use multi-threaded decoder
thread_string=
[ $num_threads -gt 1 ] && thread_string="-parallel --num-threads=$num_threads"
# PREPARE FEATURE EXTRACTION PIPELINE
# import config,
cmvn_opts=
delta_opts=
D=$srcdir
[ -e $D/norm_vars ] && cmvn_opts="--norm-means=true --norm-vars=$(cat $D/norm_vars)" # Bwd-compatibility,
[ -e $D/cmvn_opts ] && cmvn_opts=$(cat $D/cmvn_opts)
[ -e $D/delta_order ] && delta_opts="--delta-order=$(cat $D/delta_order)" # Bwd-compatibility,
[ -e $D/delta_opts ] && delta_opts=$(cat $D/delta_opts)
[ -e $D/skip_opts ] && skip_opts=$(cat $D/skip_opts)
[ -e $D/online ] && online=$(cat $D/online)
#
# Create the feature stream,
feats="ark,s,cs:copy-feats scp:$sdata/JOB/feats.scp ark:- |"
# apply-cmvn (optional),
[ ! -z "$cmvn_opts" -a ! -f $sdata/1/cmvn.scp ] && echo "$0: Missing $sdata/1/cmvn.scp" && exit 1
[ ! -z "$cmvn_opts" -a "$online" == "false" ] && feats="$feats apply-cmvn $cmvn_opts --utt2spk=ark:$sdata/JOB/utt2spk scp:$sdata/JOB/cmvn.scp ark:- ark:- |"
[ ! -z "$cmvn_opts" -a "$online" == "true" ] && feats="$feats apply-cmvn-sliding $cmvn_opts ark:- ark:- |"
# add-deltas (optional),
[ ! -z "$delta_opts" ] && feats="$feats add-deltas $delta_opts ark:- ark:- |"
# skip-frames (optional),
[ ! -z "$skip_opts" ] && nnet_forward_opts="$nnet_forward_opts $skip_opts"
#
# Run the decoding in the queue,
if [ $stage -le 0 ]; then
$cmd $parallel_opts JOB=1:$nj $dir/log/decode.JOB.log \
nnet-forward-parallel $nnet_forward_opts --feature-transform=$feature_transform --class-frame-counts=$class_frame_counts --use-gpu=$use_gpu $nnet "$feats" ark:- \| \
latgen-faster-mapped$thread_string --min-active=$min_active --max-active=$max_active --max-mem=$max_mem --beam=$beam \
--lattice-beam=$lattice_beam --acoustic-scale=$acwt --allow-partial=true --word-symbol-table=$graphdir/words.txt \
$model $graphdir/HCLG.fst ark:- "ark:|gzip -c > $dir/lat.JOB.gz" || exit 1;
fi
# Run the scoring
if ! $skip_scoring ; then
[ ! -x local/score.sh ] && \
echo "Not scoring because local/score.sh does not exist or not executable." && exit 1;
local/score.sh $scoring_opts --cmd "$cmd" $data $graphdir $dir || exit 1;
fi
exit 0;
|
#!/bin/bash
# Required parameters:
# @raycast.schemaVersion 1
# @raycast.icon images/devutils.png
# @raycast.title Cron Job Parser
# @raycast.mode silent
# @raycast.packageName DevUtils.app
# Documentation:
# @raycast.description Parse the cron job expression in clipboard (if it’s a valid cron expression)
# @raycast.author DevUtils.app
# @raycast.authorURL https://devutils.app
open devutils://cronparser?clipboard
|
package wayang
import (
"context"
"fmt"
"log"
"os"
"github.com/go-rod/rod"
"github.com/go-rod/rod/lib/cdp"
"github.com/go-rod/rod/lib/defaults"
"github.com/go-rod/rod/lib/launcher"
"github.com/ysmood/kit"
)
func NewRemoteRunner(client *cdp.Client) *Runner {
ctx, cancel := context.WithCancel(context.Background())
browser := rod.New().Context(ctx, cancel).Client(client).Connect()
page := browser.Page("")
logger := log.New(os.Stdout, "", log.LstdFlags)
return &Runner{
B: browser,
P: page,
ENV: map[string]interface{}{},
Context: ctx,
Canceller: cancel,
Logger: logger,
program: Program{},
}
}
func NewRunner() *Runner {
u := defaults.URL
if defaults.Remote {
if u == "" {
u = "ws://127.0.0.1:9222"
}
return NewRemoteRunner(launcher.NewRemote(u).Client())
}
if u == "" {
var err error
u, err = launcher.New().LaunchE()
kit.E(err)
}
return NewRemoteRunner(cdp.New(u))
}
func (parent *Runner) RunProgram(program Program) (interface{}, *RuntimeError) {
parent.program = program
var res interface{}
for i, action := range parent.program.Steps {
source := fmt.Sprintf("root[%d]", i)
res = parent.runAction(action, source)
if err, ok := res.(RuntimeError); ok {
return nil, &err
}
}
return res, nil
}
func RunProgram(program Program) (interface{}, *RuntimeError) {
return NewRunner().RunProgram(program)
}
func RunActions(actions []Action) (interface{}, *RuntimeError) {
return RunProgram(Program{
Steps: actions,
})
}
func (parent *Runner) RunActions(actions []Action) (interface{}, *RuntimeError) {
return parent.RunProgram(Program{
Steps: actions,
})
}
func RunAction(action Action) (interface{}, *RuntimeError) {
return RunProgram(Program{
Steps: []Action{action},
})
}
func (parent *Runner) RunAction(action Action) (interface{}, *RuntimeError) {
return parent.RunProgram(Program{
Steps: []Action{action},
})
}
func (parent *Runner) Close() {
parent.B.Close()
parent.Canceller()
}
func (parent *Runner) Info(message interface{}) {
parent.Logger.Printf(`level=info msg=%v`, message)
}
func (parent *Runner) Error(message interface{}) {
parent.Logger.Printf(`level=error msg=%s`, message)
}
func (re *RuntimeError) Action() Action {
return re.action
}
func (re *RuntimeError) Source() string {
return re.source
}
func (re *RuntimeError) ErrorRaw() interface{} {
return re.err
}
func (re *RuntimeError) Error() string {
return fmt.Sprintln(re.err)
}
func (re *RuntimeError) Dump() string {
return kit.Sdump(re)
}
func (re *RuntimeError) Log() {
msg := kit.Sdump(re.err)
re.parent.Logger.Printf(`level="error" msg="%s"`, msg)
}
func (re *RuntimeError) Print() {
fmt.Println(kit.Sdump(re.err))
}
func (re *RuntimeError) Stack() string {
return string(re.stack)
}
func (re *RuntimeError) LogStack() {
re.parent.Logger.Printf(`level="error" msg="%s"`, string(re.stack))
}
func (re *RuntimeError) PrintStack() {
fmt.Println(string(re.stack))
}
|
#!/bin/bash
#PBS -q gpu
#PBS -l select=1:ncpus=1:mem=150gb:ngpus=1:scratch_local=80gb
#PBS -l walltime=14:00:00
DATADIR=/storage/brno6/home/apprehension
cd $DATADIR
module add python-3.6.2-gcc
module add python36-modules-gcc
module add tensorflow-1.13.1-gpu-python3
module add opencv-3.4.5-py36
module add cuda-10.0
module add cudnn-7.4.2-cuda10
cp -R $DATADIR/siamese_network_tester.py $DATADIR/Models $DATADIR/Data $DATADIR/DataHuge $SCRATCHDIR
cd $SCRATCHDIR
mkdir -p Graphs/{Losses,Accuracies,SiameseScores}
mkdir -p Model_Saves/{Detailed,Weights}
python siamese_network_tester.py -e 40 -m SiameseNetLF -t SE -f extended
cp -vr $SCRATCHDIR/Graphs/Accuracies/* $DATADIR/Graphs/Accuracies/
cp -vr $SCRATCHDIR/Graphs/Losses/* $DATADIR/Graphs/Losses/
cp -vr $SCRATCHDIR/Graphs/SiameseScores/* $DATADIR/Graphs/SiameseScores/
cp -vr $SCRATCHDIR/Model_Saves/Detailed/* $DATADIR/Model_Saves/Detailed/
cp -vr $SCRATCHDIR/Model_Saves/Weights/* $DATADIR/Model_Saves/Weights/
clean_scratch
|
#!/bin/bash
# if [[ ! $1 ]]
# then
# echo "please specify a pipe name"
# exit -1
# fi
if [[ $# -ne 2 ]]
then
echo "please specify a pipe name and a filename to store the container PID"
exit -1
fi
#wait for server
fifo_location="/dev/shm/${1}"
server=dockerbuild.cern.ch
port=8001
containerpid_location="/dev/shm/${2}"
#wait for server to be available
check=`nc -vz ${server} 8001 2>&1 | grep "Connected to"`
while [[ ! $check ]]
do
sleep 1
check=`nc -vz ${server} 8001 2>&1 | grep "Connected to"`
done
#clean pipes
sys_rm=`which rm`
function finish {
$sys_rm -f $fifo_location "${fifo_location}_pred"
$sys_rm -f $containerpid_location
}
finish
trap finish EXIT SIGHUP SIGKILL SIGTERM
echo "server found... connecting to triton"
sing=`which singularity`
unset PATH
cd
$sing run \
-B/eos/home-j/jkiesele/singularity/triton/oc_client:/oc_client \
/eos/home-j/jkiesele/singularity/triton/tritonserver_20.08-py3-clientsdk.sif \
python /oc_client/triton_forward_client.py -u $server:$port -f $fifo_location -m hgcal_oc_reco &
echo $! > $containerpid_location
wait
finish
|
int num_jewels_in_stones(char *j, char *s);
|
#!/bin/sh
if [ "$TZ_SYS_RO_SHARE" = "" ]; then
TZ_SYS_RO_SHARE="/usr/share"
fi
KEYMAP_FILE_PATH="${TZ_SYS_RO_SHARE}/X11/xkb/tizen_key_layout.txt"
BASE_KEYSYM="0x10090000"
TARGET_HEADER_FILE="./xkbcommon/tizen_keymap.h"
TEMP_TEXT_FILE="./temp_file.txt"
NEW_DEFINE_SYM_FILE="./new_define_sym.txt"
KEYMAP_HEADER_FILE="./xkbcommon/xkbcommon-keysyms.h"
cout=1
BASE_KEYSYM_DEC=`python -c "print int('$BASE_KEYSYM', 16)"`
if [ -e ${KEYMAP_FILE_PATH} ]
then
echo "${TIZEN_PROFILE} have a key layout file: ${KEYMAP_FILE_PATH}"
else
echo "${TIZEN_PROFILE} doesn't have a key layout file: ${KEYMAP_FILE_PATH}"
exit
fi
echo "Generate a tizen keymap header file"
touch $NEW_DEFINE_SYM_FILE
while read KEYNAME KEYCODE
do
KEYSYM="XKB_KEY_${KEYNAME}"
grep -rn "${KEYSYM}" $KEYMAP_HEADER_FILE > $TEMP_TEXT_FILE
FINDED_DEFINE=`cat temp_file.txt | awk '{print $2}'`
BOOL_FOUND_SYM=false
for SEARCH_SYM in ${FINDED_DEFINE}
do
if [ "$SEARCH_SYM" = "$KEYSYM" ]
then
BOOL_FOUND_SYM=true
break
fi
done
BOOL_DUPLICATED_SYM=false
if [ "$BOOL_FOUND_SYM" = false ]
then
while read KEYSYM_NEW
do
if [ "$KEYSYM_NEW" = "$KEYSYM" ]
then
BOOL_DUPLICATED_SYM=true
break
fi
done < ${NEW_DEFINE_SYM_FILE}
if [ "$BOOL_DUPLICATED_SYM" = false ]
then
echo "${KEYSYM}" >> $NEW_DEFINE_SYM_FILE
fi
fi
done < ${KEYMAP_FILE_PATH}
sed -i '$s/#endif//g' ${KEYMAP_HEADER_FILE}
echo "/**************************************************************" >> ${KEYMAP_HEADER_FILE}
echo " * These keys defined for tizen platform." >> ${KEYMAP_HEADER_FILE}
echo " * Key symbols are defined by keymap builder." >> ${KEYMAP_HEADER_FILE}
echo " */" >> ${KEYMAP_HEADER_FILE}
while read KEYNAME
do
KEYSYM_DEC=$(echo $BASE_KEYSYM_DEC $cout | awk '{print $1 + $2}')
KEYSYM=$(printf "%x" $KEYSYM_DEC)
echo -en "#define ${KEYNAME}\t\t0x$KEYSYM\n" >> ${KEYMAP_HEADER_FILE}
cout=$(echo $cout 1 | awk '{print $1 + $2}')
done < ${NEW_DEFINE_SYM_FILE}
echo "" >> ${KEYMAP_HEADER_FILE}
echo "" >> ${KEYMAP_HEADER_FILE}
echo "#endif" >> ${KEYMAP_HEADER_FILE}
rm $NEW_DEFINE_SYM_FILE
rm $TEMP_TEXT_FILE
|
//chrome.exe -enable-file-cookies
function createCookie(name, value, exdays)
{
// fonction qui cree/modifie un cookie
var exdate = new Date();
exdate.setDate(exdate.getDate() + exdays);
var c_value = escape(value) +
((exdays == null) ? "" : "; expires=" + exdate.toUTCString());
document.cookie = name + "=" + c_value;
}
function readCookie(name) {
// fonction qui lit un cookie
var i, x, y, ARRcookies = document.cookie.split(";");
for (i = 0; i < ARRcookies.length; i++)
{
x = ARRcookies[i].substr(0, ARRcookies[i].indexOf("="));
y = ARRcookies[i].substr(ARRcookies[i].indexOf("=") + 1);
x = x.replace(/^\s+|\s+$/g, "");
if (x == name)
{
return unescape(y);
}
}
return null;
}
function initNotSetCookieZero(name) {
//fonction qui renvoie 0 si le cookie est vide, sinon elle renvoie la valeur du cookie
if(readCookie(name)== "" || readCookie(name)== null){
return "0";
}
else{
return readCookie(name);
}
}
|
#!/bin/sh
set -eux
IP=$(ifconfig eth0 | grep 'inet addr' | cut -d: -f2 | awk '{print $1}')
TEST_NAMESPACE=polyaxon
TEST_URL=http://$IP:31811
kubectl create namespace $TEST_NAMESPACE
helm install --name polyaxon-test --namespace $TEST_NAMESPACE polyaxon/polyaxon -f ./ci/test-config.yml
echo "waiting for servers to become responsive"
until curl --fail -s $TEST_URL/api/v1/versions/cli/; do
kubectl --namespace=$TEST_NAMESPACE get pod
sleep 10
done
echo "getting polyaxon version"
curl -s $TEST_URL/api/v1/versions/cli/ | grep version
# cd to the example directory
cd ..
echo "configure cli"
polyaxon config set --host=$IP --http_port=31811 | grep updated
echo "cli login"
polyaxon login -u travis -p travis | grep success
echo "cli create project"
polyaxon project create --name=travis-test --description='Travis testing' | grep success
echo "cli init project"
polyaxon project -p travis-test git --url=https://github.com/polyaxon/polyaxon-quick-start | grep success
echo "cli start experiment"
polyaxon run -p travis-test -f ./polyaxon-quick-start/polyaxonfile.yml | grep created
|
<gh_stars>0
/*
* Copyright (c) 2015, 2016 Oracle and/or its affiliates. All rights reserved. This
* code is released under a tri EPL/GPL/LGPL license. You can use it,
* redistribute it and/or modify it under the terms of the:
*
* Eclipse Public License version 1.0
* GNU General Public License version 2
* GNU Lesser General Public License version 2.1
*/
package org.jruby.truffle.core.format.read.bytes;
import com.oracle.truffle.api.CompilerDirectives;
import com.oracle.truffle.api.dsl.NodeChild;
import com.oracle.truffle.api.dsl.NodeChildren;
import com.oracle.truffle.api.dsl.Specialization;
import com.oracle.truffle.api.frame.VirtualFrame;
import com.oracle.truffle.api.object.DynamicObject;
import org.jcodings.specific.ASCIIEncoding;
import org.jruby.truffle.RubyContext;
import org.jruby.truffle.core.Layouts;
import org.jruby.truffle.core.format.FormatNode;
import org.jruby.truffle.core.format.read.SourceNode;
import org.jruby.truffle.core.rope.AsciiOnlyLeafRope;
import java.util.Arrays;
@NodeChildren({
@NodeChild(value = "source", type = SourceNode.class),
})
public abstract class ReadBinaryStringNode extends FormatNode {
final boolean readToEnd;
final boolean readToNull;
final int count;
final boolean trimTrailingSpaces;
final boolean trimTrailingNulls;
final boolean trimToFirstNull;
public ReadBinaryStringNode(RubyContext context, boolean readToEnd, boolean readToNull, int count,
boolean trimTrailingSpaces, boolean trimTrailingNulls, boolean trimToFirstNull) {
super(context);
this.readToEnd = readToEnd;
this.readToNull = readToNull;
this.count = count;
this.trimTrailingSpaces = trimTrailingSpaces;
this.trimTrailingNulls = trimTrailingNulls;
this.trimToFirstNull = trimToFirstNull;
}
@Specialization(guards = "isNull(source)")
public void read(VirtualFrame frame, Object source) {
CompilerDirectives.transferToInterpreter();
// Advance will handle the error
advanceSourcePosition(frame, count);
throw new IllegalStateException();
}
@Specialization
public DynamicObject read(VirtualFrame frame, byte[] source) {
final int start = getSourcePosition(frame);
int length;
if (readToEnd) {
length = 0;
while (start + length < getSourceLength(frame)
&& (!readToNull || (start + length < getSourceLength(frame) && source[start + length] != 0))) {
length++;
}
if (start + length < getSourceLength(frame) && source[start + length] == 0) {
length++;
}
} else if (readToNull) {
length = 0;
while (start + length < getSourceLength(frame)
&& length < count
&& (!readToNull || (start + length < getSourceLength(frame) && source[start + length] != 0))) {
length++;
}
if (start + length < getSourceLength(frame) && source[start + length] == 0) {
length++;
}
} else {
length = count;
if (start + length >= getSourceLength(frame)) {
length = getSourceLength(frame) - start;
}
}
int usedLength = length;
while (usedLength > 0 && ((trimTrailingSpaces && source[start + usedLength - 1] == ' ')
|| (trimTrailingNulls && source[start + usedLength - 1] == 0))) {
usedLength--;
}
if (trimToFirstNull) {
final int firstNull = indexOfFirstNull(source, start, usedLength);
if (firstNull != -1 && trimTrailingNulls) {
usedLength = firstNull;
}
}
setSourcePosition(frame, start + length);
return Layouts.STRING.createString(getContext().getCoreLibrary().getStringFactory(),
new AsciiOnlyLeafRope(Arrays.copyOfRange(source, start, start + usedLength), ASCIIEncoding.INSTANCE));
}
private int indexOfFirstNull(byte[] bytes, int start, int length) {
for (int n = 0; n < length; n++) {
if (bytes[start + n] == 0) {
return n;
}
}
return -1;
}
}
|
/*
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.cloud.dataflow.sdk.testing;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import com.google.cloud.dataflow.sdk.runners.worker.logging.DataflowWorkerLoggingFormatter;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestRule;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Tests for {@link RestoreDataflowLoggingFormatter}. */
@RunWith(JUnit4.class)
public class RestoreDataflowLoggingFormatterTest {
@Rule public TestRule restoreDataflowLoggingFormatter = new RestoreDataflowLoggingFormatter();
/*
* Since these tests can run out of order, both test A and B verify that they
* could insert their property and that the other does not exist.
*/
@Test
public void testLoggingParamsClearedA() {
DataflowWorkerLoggingFormatter.setJobId("job");
assertNotNull(DataflowWorkerLoggingFormatter.getJobId());
assertNull(DataflowWorkerLoggingFormatter.getWorkerId());
}
@Test
public void testLoggingParamsClearedB() {
DataflowWorkerLoggingFormatter.setWorkerId("worker");
assertNotNull(DataflowWorkerLoggingFormatter.getWorkerId());
assertNull(DataflowWorkerLoggingFormatter.getJobId());
}
}
|
// ------------------------------------------------------------
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
// ------------------------------------------------------------
package kubernetes
import (
"errors"
kubeclient "github.com/dapr/components-contrib/authentication/kubernetes"
"github.com/dapr/components-contrib/secretstores"
"github.com/dapr/dapr/pkg/logger"
meta_v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/client-go/kubernetes"
)
type kubernetesSecretStore struct {
kubeClient kubernetes.Interface
logger logger.Logger
}
// NewKubernetesSecretStore returns a new Kubernetes secret store
func NewKubernetesSecretStore(logger logger.Logger) secretstores.SecretStore {
return &kubernetesSecretStore{logger: logger}
}
// Init creates a Kubernetes client
func (k *kubernetesSecretStore) Init(metadata secretstores.Metadata) error {
client, err := kubeclient.GetKubeClient()
if err != nil {
return err
}
k.kubeClient = client
return nil
}
// GetSecret retrieves a secret using a key and returns a map of decrypted string/string values
func (k *kubernetesSecretStore) GetSecret(req secretstores.GetSecretRequest) (secretstores.GetSecretResponse, error) {
resp := secretstores.GetSecretResponse{
Data: map[string]string{},
}
namespace, err := k.getNamespaceFromMetadata(req.Metadata)
if err != nil {
return resp, err
}
secret, err := k.kubeClient.CoreV1().Secrets(namespace).Get(req.Name, meta_v1.GetOptions{})
if err != nil {
return resp, err
}
for k, v := range secret.Data {
resp.Data[k] = string(v)
}
return resp, nil
}
func (k *kubernetesSecretStore) getNamespaceFromMetadata(metadata map[string]string) (string, error) {
if val, ok := metadata["namespace"]; ok && val != "" {
return val, nil
}
return "", errors.New("namespace is missing on metadata")
}
|
#!/bin/bash
# file: /usr/local/cmassoc/bin/update-init.sh
# Published 2005 by Charles Maier Associates Limited for internal use;
# ====================================================================
#
# --------------------------------------------------------------------
for folder in /home/build/{linux,cmassoc-sysvinit-[0-9][.][0-9][.][0-9]}/etc/init.d; do
echo ${folder}
cp /etc/init.d/* ${folder}
done
# ====================================================================
#
# --------------------------------------------------------------------
exit 0
|
import YoutubeScreen from './YoutubeScreen';
import News from './News';
export {
YoutubeScreen,
News,
}
|
<gh_stars>0
#ifndef DSFMT_PARAMS132049_H
#define DSFMT_PARAMS132049_H
/* #define DSFMT_N 1269 */
/* #define DSFMT_MAXDEGREE 132104 */
#define DSFMT_POS1 371
#define DSFMT_SL1 23
#define DSFMT_MSK1 UINT64_C(0x000fb9f4eff4bf77)
#define DSFMT_MSK2 UINT64_C(0x000fffffbfefff37)
#define DSFMT_MSK32_1 0x000fb9f4U
#define DSFMT_MSK32_2 0xeff4bf77U
#define DSFMT_MSK32_3 0x000fffffU
#define DSFMT_MSK32_4 0xbfefff37U
#define DSFMT_FIX1 UINT64_C(0x4ce24c0e4e234f3b)
#define DSFMT_FIX2 UINT64_C(0x62612409b5665c2d)
#define DSFMT_PCV1 UINT64_C(0x181232889145d000)
#define DSFMT_PCV2 UINT64_C(0x0000000000000001)
#define DSFMT_IDSTR "dSFMT2-132049:371-23:fb9f4eff4bf77-fffffbfefff37"
/* PARAMETERS FOR ALTIVEC */
#if defined(__APPLE__) /* For OSX */
#define ALTI_SL1 (vector unsigned int)(7, 7, 7, 7)
#define ALTI_SL1_PERM \
(vector unsigned char)(2,3,4,5,6,7,30,30,10,11,12,13,14,15,0,1)
#define ALTI_SL1_MSK \
(vector unsigned int)(0xffffffffU,0xff800000U,0xffffffffU,0xff800000U)
#define ALTI_MSK (vector unsigned int)(DSFMT_MSK32_1, \
DSFMT_MSK32_2, DSFMT_MSK32_3, DSFMT_MSK32_4)
#else /* For OTHER OSs(Linux?) */
#define ALTI_SL1 {7, 7, 7, 7}
#define ALTI_SL1_PERM \
{2,3,4,5,6,7,30,30,10,11,12,13,14,15,0,1}
#define ALTI_SL1_MSK \
{0xffffffffU,0xff800000U,0xffffffffU,0xff800000U}
#define ALTI_MSK \
{DSFMT_MSK32_1, DSFMT_MSK32_2, DSFMT_MSK32_3, DSFMT_MSK32_4}
#endif
#endif /* DSFMT_PARAMS132049_H */
|
import re
def fix_spelling(paragraph):
words = paragraph.split()
corrected_words = []
for word in words:
corrected_words.append(re.sub(r'[aeiou]',r'[aeiou]', word))
return " ".join(corrected_words)
print(fix_spelling("My favarite flowers are rose and tulples"))
|
public class EntityRoleParser {
public com.microsoft.schemas.xrm._2011.contracts.EntityRole.Enum parseEntityRoleElement(org.apache.xmlbeans.XmlObject target) {
// Extracting the "EntityRole" element as XML
org.apache.xmlbeans.XmlObject entityRoleElement = target.selectPath("EntityRole");
// Checking if the "EntityRole" element exists
if (entityRoleElement != null && entityRoleElement.length() > 0) {
// Parsing the value of the "EntityRole" element as an enumeration
String entityRoleValue = entityRoleElement[0].getStringValue();
return com.microsoft.schemas.xrm._2011.contracts.EntityRole.Enum.forString(entityRoleValue);
} else {
// If "EntityRole" element is not found, return null
return null;
}
}
/**
* Gets (as xml) the "EntityRole" element
*/
}
|
class LidarProcessor:
def __init__(self, lidar_list, object):
self.lidar_list = lidar_list
self.object = object
def process_lidar_data(self):
self.object.transform.translation.y = 0.0
self.object.transform.translation.z = 0.0
self.object.transform.rotation.x = 0.0
self.object.transform.rotation.y = 0.0
angle = int(-round((self.lidar_list.index(min(self.lidar_list)) * 62.5 / (2 * 638) - 31.25))
self.object.transform.rotation.z = angle
return angle
|
package analyzer_test
import (
"os"
"path/filepath"
"testing"
"github.com/egtann/exhaustivestruct/pkg/analyzer"
"golang.org/x/tools/go/analysis/analysistest"
)
func TestAll(t *testing.T) {
wd, err := os.Getwd()
if err != nil {
t.Fatalf("Failed to get wd: %s", err)
}
testdata := filepath.Join(filepath.Dir(filepath.Dir(wd)), "testdata")
analyzer.StructPatternList = "*.Test,*.Test2,*.Embedded,*.External"
analysistest.Run(t, testdata, analyzer.Analyzer, "s")
}
|
public static void main(String args[]) {
int n1=0,n2=1,n3,i,count=10;
System.out.print(n1+" "+n2);
for(int i = 2; i < count; ++i) {
n3=n1+n2;
System.out.print(" "+n3);
n1=n2;
n2=n3;
}
}
|
var classarmnn_1_1_neon_stack_workload =
[
[ "NeonStackWorkload", "classarmnn_1_1_neon_stack_workload.xhtml#aeb65cb0556b7a21b06f8bc9f025be5c5", null ],
[ "Execute", "classarmnn_1_1_neon_stack_workload.xhtml#ae071e8822437c78baea75c3aef3a263a", null ]
];
|
<gh_stars>0
package thelm.rslargepatterns;
import net.minecraft.creativetab.CreativeTabs;
import net.minecraft.item.ItemStack;
import net.minecraftforge.fml.common.Mod;
import net.minecraftforge.fml.common.Mod.EventHandler;
import net.minecraftforge.fml.common.Mod.Instance;
import net.minecraftforge.fml.common.ModMetadata;
import net.minecraftforge.fml.common.SidedProxy;
import net.minecraftforge.fml.common.event.FMLInitializationEvent;
import net.minecraftforge.fml.common.event.FMLPreInitializationEvent;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
import thelm.rslargepatterns.item.ItemLargePattern;
import thelm.rslargepatterns.proxy.CommonProxy;
@Mod(
modid = RSLargePatterns.MOD_ID,
name = RSLargePatterns.NAME,
version = RSLargePatterns.VERSION,
dependencies = RSLargePatterns.DEPENDENCIES
)
public class RSLargePatterns {
public static final String MOD_ID = "rslargepatterns";
public static final String NAME = "Refined Storage Large Patterns";
public static final String VERSION = "1.12.2-1.0.0.0";
public static final String DEPENDENCIES = "";
public static final String GUI_FACTORY = "thelm.rslargepatterns.client.gui.GuiRSLargePatternsConfigFactory";
public static final CreativeTabs CREATIVE_TAB = new CreativeTabs("rslargepatterns") {
@SideOnly(Side.CLIENT)
@Override
public ItemStack createIcon() {
return new ItemStack(ItemLargePattern.INSTANCE);
}
};
@Instance
public static RSLargePatterns instance;
@SidedProxy(clientSide = "thelm.rslargepatterns.proxy.ClientProxy", serverSide = "thelm.rslargepatterns.proxy.CommonProxy", modId = RSLargePatterns.MOD_ID)
public static CommonProxy proxy;
public static ModMetadata metadata;
@EventHandler
public void firstMovement(FMLPreInitializationEvent event) {
metadata = event.getModMetadata();
metadata.autogenerated = false;
metadata.version = VERSION;
metadata.authorList.add("TheLMiffy1111");
metadata.description = "An Refined Storage addon that adds patterns with more than 9 inputs.";
proxy.register(event);
}
@EventHandler
public void secondMovement(FMLInitializationEvent event) {
proxy.register(event);
}
}
|
/* Copyright 2020 Freerware
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package header
import (
"fmt"
"sort"
"strings"
)
var (
// headerAcceptEncoding is the header key for the Accept-Encoding header.
headerAcceptEncoding = "Accept-Encoding"
// DefaultAcceptEncoding is an Accept-Encoding header value with a
// single content coding of "*".
DefaultAcceptEncoding = AcceptEncoding([]ContentCodingRange{defaultContentCodingRange})
// EmptyAcceptEncoding is an empty Accept-Encoding header.
EmptyAcceptEncoding = AcceptEncoding([]ContentCodingRange{})
)
// AcceptEncoding represents the Accept-Encoding header.
//
// The "Accept-Encoding" header field can be used by user agents to
// indicate what response content-codings (Section 3.1.2.1) are
// acceptable in the response.
type AcceptEncoding []ContentCodingRange
// NewAcceptEncoding constructs an Accept-Encoding header with the provided
// content codings.
func NewAcceptEncoding(acceptEncoding []string) (AcceptEncoding, error) {
if len(acceptEncoding) == 0 {
return EmptyAcceptEncoding, nil
}
var contentCodings []ContentCodingRange
for _, cc := range acceptEncoding {
coding, err := NewContentCodingRange(cc)
if err != nil {
return EmptyAcceptEncoding, err
}
contentCodings = append(contentCodings, coding)
}
return AcceptEncoding(contentCodings), nil
}
// Codings provides the content codings sorted on preference from highest
// preference to lowest.
func (e AcceptEncoding) CodingRanges() []ContentCodingRange {
sort.Slice(e, func(first, second int) bool {
f := e[first]
s := e[second]
return f.QualityValue().GreaterThan(s.QualityValue())
})
return e
}
// IsEmpty indicates if the Accept-Encoding header is empty.
func (e AcceptEncoding) IsEmpty() bool {
return len(e) == len(EmptyAcceptEncoding)
}
// String provides a textual representation of the Accept-Encoding header.
func (e AcceptEncoding) String() string {
var codings []string
for _, c := range e {
codings = append(codings, c.String())
}
return fmt.Sprintf("%s: %s", headerAcceptEncoding, strings.Join(codings, ","))
}
|
<reponame>PiotrSikora/test-infra
// Copyright 2018 Istio Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package sisyphus
import (
"context"
"encoding/json"
"io/ioutil"
"log"
"os"
"path/filepath"
"reflect"
"strconv"
"testing"
"time"
)
const (
prowProjectMock = "prow-project-mock"
prowZoneMock = "us-west1-a"
gubernatorURLMock = "https://k8s-gubernator.appspot.com/build/mock"
gcsBucketMock = "gcs-bucket-mock"
testDataDir = "test_data"
expectedFlakeStatsJSON = "expectedFlakeStats.json"
)
var (
protectedJobsMock = []string{"job-1" /*, "job-2", "job-3"*/}
)
type ProwAccessorMock struct {
lastestRunNos map[string]int // jobName -> lastestRun
maxRunNos map[string]int // jobName -> maxRunNo
gubernatorURL string
prowResults map[string]map[int]ProwResult // jobName -> runNo -> ProwResult
cancelSisyphusd context.CancelFunc
}
func NewProwAccessorMock(gubernatorURL string) *ProwAccessorMock {
mock := &ProwAccessorMock{
lastestRunNos: make(map[string]int),
maxRunNos: make(map[string]int),
gubernatorURL: gubernatorURL,
prowResults: make(map[string]map[int]ProwResult),
cancelSisyphusd: func() {},
}
for _, job := range protectedJobsMock {
dataFile := filepath.Join(testDataDir, job+".json")
raw, err := ioutil.ReadFile(dataFile)
if err != nil {
log.Fatalf("Error reading %s:%v", dataFile, err)
}
// intermediate step since json does not support integer as key
prowResultsForOneJobTmp := map[string]ProwResult{}
if err = json.Unmarshal([]byte(raw), &prowResultsForOneJobTmp); err != nil {
log.Fatalf("Failed to unmarshal test data for %s: %v", dataFile, err)
}
prowResultsForOneJob := map[int]ProwResult{}
minRunNo := 999999
maxRunNo := 0
for k, v := range prowResultsForOneJobTmp {
i, err := strconv.Atoi(k)
if err != nil {
log.Fatalf("Error converting %s to int :%v", k, err)
}
if i > maxRunNo {
maxRunNo = i
}
if i < minRunNo {
minRunNo = i
}
prowResultsForOneJob[i] = v
}
mock.prowResults[job] = prowResultsForOneJob
mock.lastestRunNos[job] = minRunNo
mock.maxRunNos[job] = maxRunNo
}
return mock
}
func (p *ProwAccessorMock) GetLatestRun(jobName string) (int, error) {
ret := p.lastestRunNos[jobName]
p.lastestRunNos[jobName]++
if ret > p.maxRunNos[jobName] {
allRunsFinished := true
for j, lastestRun := range p.lastestRunNos {
if lastestRun <= p.maxRunNos[j] { // equal sign to ensure the last run is seen before termination
allRunsFinished = false
}
}
if allRunsFinished {
p.cancelSisyphusd()
}
return p.maxRunNos[jobName], nil
}
return ret, nil
}
func (p *ProwAccessorMock) GetResult(jobName string, runNo int) (*Result, error) {
prowResult := p.prowResults[jobName][runNo]
return &Result{
Passed: prowResult.Passed,
SHA: prowResult.Metadata.RepoCommit,
}, nil
}
func (p *ProwAccessorMock) GetDetailsURL(jobName string, runNo int) string {
return ""
}
func (p *ProwAccessorMock) Rerun(jobName string, runNo int) error {
return nil
}
type StorageMock struct {
t *testing.T
expectedStats []FlakeStat
}
func NewStorageMock(t *testing.T) *StorageMock {
dataFile := filepath.Join(testDataDir, expectedFlakeStatsJSON)
raw, err := ioutil.ReadFile(dataFile)
if err != nil {
log.Fatalf("Error reading %s:%v", dataFile, err)
}
var expectedStats []FlakeStat
if err = json.Unmarshal([]byte(raw), &expectedStats); err != nil {
log.Fatalf("Failed to unmarshal test data for %s: %v", dataFile, err)
}
return &StorageMock{
t: t,
expectedStats: expectedStats,
}
}
func (s *StorageMock) Store(jobName, sha string, newFlakeStat FlakeStat) error {
for _, expectedStat := range s.expectedStats {
if expectedStat.TestName == jobName && expectedStat.SHA == sha {
if !reflect.DeepEqual(expectedStat, newFlakeStat) {
s.t.Errorf("Expecting %v but got %v", expectedStat, newFlakeStat)
}
return nil
}
}
s.t.Errorf("No matching expectedStat found given jobName = %s, sha = %s", jobName, sha)
return nil
}
type fakeClient struct{}
func (f fakeClient) Read(obj string) (string, error) {
return "data", nil
}
func (f fakeClient) Write(obj, txt string) error {
return nil
}
func (f fakeClient) Exists(obj string) (bool, error) {
return false, nil
}
func TestDaemonConfig(t *testing.T) {
catchFlakesByRun := true
cfg := &Config{
CatchFlakesByRun: catchFlakesByRun,
NumRerun: 3,
}
cfgExpected := &Config{
CatchFlakesByRun: catchFlakesByRun,
PollGapDuration: DefaultPollGapDuration,
NumRerun: 3,
}
presubmitJobs := []string{}
sisyphusd := NewDaemonUsingProw(
protectedJobsMock,
presubmitJobs,
prowProjectMock, prowZoneMock, gubernatorURLMock,
gcsBucketMock,
fakeClient{},
NewStorageMock(t), cfg)
if !reflect.DeepEqual(sisyphusd.GetConfig(), cfgExpected) {
t.Error("setting catchFlakesByRun failed")
}
}
func TestProwResultsMock(t *testing.T) {
job := "job-1"
prowAccessorMock := NewProwAccessorMock(gubernatorURLMock)
res, err := prowAccessorMock.GetResult(job, 10)
if err != nil {
t.Errorf("GetProwResult failed: %v", err)
}
if res.SHA != "sha-1" {
t.Error("RepoCommit unmatched with data in file")
}
expectedBase := 10
if runNo, _ := prowAccessorMock.GetLatestRun(job); runNo != expectedBase {
t.Errorf("Expecting frist call to GetLatestRun to return %d but got %d", expectedBase, runNo)
}
if runNo, _ := prowAccessorMock.GetLatestRun(job); runNo != expectedBase+1 {
t.Errorf("Expecting second call to GetLatestRun to return %d but got %d", expectedBase+1, runNo)
}
for i := 0; i < 100; i++ {
prowAccessorMock.GetLatestRun(job)
}
expectedMax := 15
if runNo, _ := prowAccessorMock.GetLatestRun(job); runNo != expectedMax {
t.Errorf("Expecting call to GetLatestRun to not exceed %d but got %d", expectedMax, runNo)
}
}
func TestRerunLogics(t *testing.T) {
sisyphusd := newDaemon(
protectedJobsMock,
&Config{
CatchFlakesByRun: true,
PollGapDuration: 100 * time.Millisecond,
},
NewStorageMock(t))
prowAccessorMock := NewProwAccessorMock(gubernatorURLMock)
ctx, cancelFn := context.WithCancel(context.Background())
prowAccessorMock.cancelSisyphusd = cancelFn
sisyphusd.ci = prowAccessorMock
sisyphusd.Start(ctx)
}
func TestMain(m *testing.M) {
log.SetFlags(log.LstdFlags | log.Lshortfile)
os.Exit(m.Run())
}
|
#ifdef __TIZENRT__
#include <tinyara/config.h>
#include <tinyara/gpio.h>
#endif
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <unistd.h>
#include <fcntl.h>
#include "ocf_mylight.h"
struct light_resource {
OCResourceHandle handle;
bool value;
char *uri;
int gpio;
};
static struct light_resource _light[] = {
{
.handle = NULL,
.value = false,
.uri = "/a/light/0",
.gpio = 45
}, {
.handle = NULL,
.value = false,
.uri = "/a/light/1",
.gpio = 49
}
};
static void gpio_write(int port, int value)
{
#ifdef CONFIG_ARCH_BOARD_ARTIK053
char tmp[20];
int fd;
snprintf(tmp, 16, "/dev/gpio%d", port);
fd = open(tmp, O_RDWR);
if (fd < 0)
{
DBG("open(%s) failed.", tmp);
return;
}
MSG("GPIO: '%s' value '%d'", tmp, value);
ioctl(fd, GPIOIOC_SET_DIRECTION, GPIO_DIRECTION_OUT);
snprintf(tmp, 2, "%1d", value);
write(fd, tmp, 2);
close(fd);
#else
MSG("GPIO: '%d' value '%d'", port, value);
#endif
}
static int find_light(OCResourceHandle handle)
{
unsigned int i;
for (i = 0; i < sizeof(_light) / sizeof(struct light_resource); i++) {
if (_light[i].handle == handle)
return i;
}
DBG("Can't find light");
return -1;
}
static OCEntityHandlerResult on_get(OCEntityHandlerFlag flag _UNUSED_,
OCEntityHandlerRequest *req, void *user_data _UNUSED_)
{
OCEntityHandlerResponse resp;
OCRepPayload *payload = NULL;
int id;
MSG("received GET request");
id = find_light(req->resource);
if (id < 0)
return OC_EH_ERROR;
payload = OCRepPayloadCreate();
OCRepPayloadAddResourceType(payload, "oic.r.switch.binary");
OCRepPayloadAddInterface(payload, "oic.if.baseline");
OCRepPayloadAddInterface(payload, "oic.if.a");
OCRepPayloadSetUri(payload, _light[id].uri);
OCRepPayloadSetPropBool(payload, "value", _light[id].value);
memset(&resp, 0, sizeof(OCEntityHandlerResponse));
resp.requestHandle = req->requestHandle;
resp.resourceHandle = req->resource;
resp.ehResult = OC_EH_OK;
resp.payload = (OCPayload*) payload;
ocf_mylight_verbose_response(&resp);
if (OCDoResponse(&resp) != OC_STACK_OK) {
DBG("Error sending response");
OCRepPayloadDestroy(payload);
return OC_EH_ERROR;
}
OCRepPayloadDestroy(payload);
return OC_EH_OK;
}
static OCEntityHandlerResult on_put_post(OCEntityHandlerFlag flag _UNUSED_,
OCEntityHandlerRequest *req, void *user_data _UNUSED_)
{
OCEntityHandlerResponse resp;
OCRepPayload *payload = NULL;
int id;
bool value;
if (req->method == OC_REST_PUT)
MSG("received PUT request");
else if (req->method == OC_REST_POST)
MSG("received POST request");
id = find_light(req->resource);
if (id < 0)
return OC_EH_ERROR;
payload = (OCRepPayload *) req->payload;
if (OCRepPayloadGetPropBool(payload, "value", &value))
ocf_mylight_light_set_status(id, value);
payload = OCRepPayloadCreate();
OCRepPayloadAddResourceType(payload, "oic.r.switch.binary");
OCRepPayloadAddInterface(payload, "oic.if.baseline");
OCRepPayloadAddInterface(payload, "oic.if.a");
OCRepPayloadSetUri(payload, _light[id].uri);
OCRepPayloadSetPropBool(payload, "value", _light[id].value);
memset(&resp, 0, sizeof(OCEntityHandlerResponse));
resp.requestHandle = req->requestHandle;
resp.resourceHandle = req->resource;
resp.ehResult = OC_EH_OK;
resp.payload = (OCPayload*) payload;
ocf_mylight_verbose_response(&resp);
if (OCDoResponse(&resp) != OC_STACK_OK) {
DBG("Sending response failed.");
OCRepPayloadDestroy(payload);
return OC_EH_ERROR;
}
OCRepPayloadDestroy(payload);
return OC_EH_OK;
}
static OCEntityHandlerResult on_del(OCEntityHandlerFlag flag _UNUSED_,
OCEntityHandlerRequest *req _UNUSED_, void *user_data _UNUSED_)
{
DBG("Forbidden");
return OC_EH_FORBIDDEN;
}
static OCEntityHandlerResult on_register_observe(
OCEntityHandlerFlag flag _UNUSED_,
OCEntityHandlerRequest *req, void *user_data _UNUSED_)
{
int id;
DBG("Registration request with observation Id %d", req->obsInfo.obsId);
id = find_light(req->resource);
if (id < 0)
return OC_EH_ERROR;
ocf_mylight_notify_add(id, req->obsInfo.obsId);
return OC_EH_OK;
}
static OCEntityHandlerResult on_deregister_observe(
OCEntityHandlerFlag flag _UNUSED_,
OCEntityHandlerRequest *req, void *user_data _UNUSED_)
{
int id;
DBG("De-registration request for observation Id %d",
req->obsInfo.obsId);
id = find_light(req->resource);
if (id < 0)
return OC_EH_ERROR;
ocf_mylight_notify_del(id, req->obsInfo.obsId);
return OC_EH_OK;
}
static struct ocf_ops light_ops = {
.get = on_get,
.put = on_put_post,
.post = on_put_post,
.del = on_del,
.register_observe = on_register_observe,
.deregister_observe = on_deregister_observe
};
int ocf_mylight_light_get_handle(unsigned int id, OCResourceHandle *handle)
{
if (id >= sizeof(_light) / sizeof(struct light_resource))
return -1;
if (!handle)
return -1;
*handle = _light[id].handle;
return 0;
}
const char *ocf_mylight_light_peek_uri(unsigned int id)
{
if (id >= sizeof(_light) / sizeof(struct light_resource))
return NULL;
return _light[id].uri;
}
int ocf_mylight_light_set_status(unsigned int id, bool status)
{
if (id >= sizeof(_light) / sizeof(struct light_resource))
return -1;
if (_light[id].value == status)
return 0;
_light[id].value = status;
DBG("Light%u value changed to %d", id, status);
gpio_write(_light[id].gpio, _light[id].value);
ocf_mylight_notify_emit(id);
return 0;
}
int ocf_mylight_light_get_status(unsigned int id, bool *status)
{
if (id >= sizeof(_light) / sizeof(struct light_resource))
return -1;
if (!status)
return -1;
*status = _light[id].value;
return 0;
}
int ocf_mylight_light_init()
{
OCStackResult ret;
unsigned int i;
for (i = 0; i < sizeof(_light) / sizeof(struct light_resource); i++) {
ret = OCCreateResource(&(_light[i].handle),
"oic.r.switch.binary", "oic.if.a",
_light[i].uri, ocf_mylight_handler, &light_ops,
OC_DISCOVERABLE | OC_OBSERVABLE | OC_SECURE);
if (ret != OC_STACK_OK) {
DBG("OCCreateResource() failed. (ret=%d)", ret);
return -1;
}
MSG("Light resource created. <id: %u>", i);
MSG(" - resource: 0x%p", _light[i].handle);
MSG(" - uri: '%s'", _light[i].uri);
}
return 0;
}
void ocf_mylight_light_exit()
{
}
|
const minInHour = 60;
function two(value) {
return String(value).length < 2 ? `0${value}` : value;
}
export class Duration {
h;
m;
constructor({ h, m }) {
this.h = Number(h);
this.m = Number(m);
}
toString() {
return `${two(this.h)}h${two(this.m)}`;
}
valueOf() {
return this.h * minInHour + this.m;
}
}
export class Deal {
departure;
arrival;
duration;
cost;
discount;
reference;
transport;
get hasDiscount() {
return this.discount > 0;
}
get discountCost() {
return this.cost * (1 - this.discount / 100);
}
constructor({
transport,
departure,
arrival,
duration,
cost,
discount,
reference
}) {
this.transport = transport;
this.departure = departure;
this.arrival = arrival;
this.cost = cost;
this.discount = discount;
this.reference = reference;
this.duration = new Duration(duration);
}
}
|
<reponame>ssangervasi/oily<filename>oily/trash/risky_moon.py<gh_stars>0
'''
https://projecteuler.net/problem=353
Put on hold because adding the sphere calculations is a bit too time consuming.
'''
import math
from decimal import Decimal
from typing import NamedTuple
pi = Decimal(pi)
def solve():
return Solution(sum_of_risks=0)
class Solution:
def __init__(self, sum_of_risks: Decimal):
self.sum_of_risks = sum_of_risks
def description(self) -> str:
return f'The sum of risks is: {self.sum_of_risks}'
# class Journey:
# def __init__(self):
# self.legs = []
# class Leg:
class Cartesian(NamedTuple):
x: Decimal
y: Decimal
z: Decimal
class Geographic(NamedTuple):
lat: Decimal
lon: Decimal
class Sphere:
def __init__(self, radius: int = 1):
self.radius = 1
def geodesic_distance(self, start: Cartesian, end: Cartesian) -> Decimal:
pass
def cartesian_to_geographic(self, cartesian: Cartesian): -> Geographic:
pass
def geographic_to_cartesian(self, geographic: Geographic): -> Cartesian:
pass
|
package com.cyosp.mpa.api.rest.homebank.v1dot2.model;
import com.thoughtworks.xstream.annotations.XStreamAlias;
import com.thoughtworks.xstream.annotations.XStreamAsAttribute;
import com.thoughtworks.xstream.annotations.XStreamOmitField;
import lombok.Getter;
import lombok.Setter;
import java.math.BigDecimal;
/**
* Created by CYOSP on 2017-07-23.
*/
@Getter
@Setter
public class Account {
@XStreamAsAttribute
@XStreamAlias("key")
private Integer key;
@XStreamAsAttribute
@XStreamAlias("flags")
private Integer flags;
@XStreamAsAttribute
@XStreamAlias("pos")
private Integer pos;
@XStreamAsAttribute
@XStreamAlias("type")
private Integer type;
@XStreamAsAttribute
@XStreamAlias("curr")
private Integer curr;
@XStreamAsAttribute
@XStreamAlias("name")
private String name;
@XStreamAsAttribute
@XStreamAlias("initial")
private BigDecimal initial;
@XStreamAsAttribute
@XStreamAlias("minimum")
private BigDecimal minimum;
@XStreamAsAttribute
@XStreamAlias("cheque1")
private Long cheque1;
@XStreamAsAttribute
@XStreamAlias("cheque2")
private Long cheque2;
//----------------------------------
@XStreamOmitField
private BigDecimal balance;
@XStreamOmitField
private Options options;
@XStreamOmitField
private Currency currency;
}
|
#!/bin/bash
echo Entering "$(cd "$(dirname "$0")" && pwd -P)/$(basename "$0")"
# Fail the whole script if any command fails
set -e
# Optional argument $1 is one of:
# downloadjdk, buildjdk
# If it is omitted, this script uses downloadjdk.
export BUILDJDK=$1
if [[ "${BUILDJDK}" == "" ]]; then
export BUILDJDK=downloadjdk
fi
if [[ "${BUILDJDK}" != "buildjdk" && "${BUILDJDK}" != "downloadjdk" ]]; then
echo "Bad argument '${BUILDJDK}'; should be omitted or one of: downloadjdk, buildjdk."
exit 1
fi
export SHELLOPTS
if [ "$(uname)" == "Darwin" ] ; then
export JAVA_HOME=${JAVA_HOME:-$(/usr/libexec/java_home)}
else
export JAVA_HOME=${JAVA_HOME:-$(dirname $(dirname $(readlink -f $(which javac))))}
fi
git -C /tmp/plume-scripts pull > /dev/null 2>&1 \
|| git -C /tmp clone --depth 1 -q https://github.com/plume-lib/plume-scripts.git
# This does not work:
# AT=${AFU}/..
# because `git clone REPO ../annotation-tools/annotation-file-utilities/..`
# fails with
# fatal: could not create work tree dir '../annotation-tools/annotation-file-utilities/..': File exists
# fatal: destination path '../annotation-tools/annotation-file-utilities/..' already exists and is not an empty directory.
# even if the directory does not exist!
# The reason is that git creates each element of the path:
# .. , ../annotation-tools, ../annotation-tools/annotation-file-utilities
# (this is the problem), and../annotation-tools/annotation-file-utilities/.. .
AFU="${AFU:-../annotation-tools/annotation-file-utilities}"
AT=$(dirname "${AFU}")
## Build annotation-tools (Annotation File Utilities)
/tmp/plume-scripts/git-clone-related typetools annotation-tools ${AT}
if [ ! -d ../annotation-tools ] ; then
ln -s ${AT} ../annotation-tools
fi
echo "Running: (cd ${AT} && ./.travis-build-without-test.sh)"
(cd ${AT} && ./.travis-build-without-test.sh)
echo "... done: (cd ${AT} && ./.travis-build-without-test.sh)"
## Build stubparser
/tmp/plume-scripts/git-clone-related typetools stubparser
echo "Running: (cd ../stubparser/ && ./.travis-build-without-test.sh)"
(cd ../stubparser/ && ./.travis-build-without-test.sh)
echo "... done: (cd ../stubparser/ && ./.travis-build-without-test.sh)"
## Compile
# Two options: rebuild the JDK or download a prebuilt JDK.
if [[ "${BUILDJDK}" == "downloadjdk" ]]; then
echo "running \"./gradlew assemble\" for checker-framework"
./gradlew assemble printJdkJarManifest --console=plain --warning-mode=all -s --no-daemon
else
echo "running \"./gradlew assemble -PuseLocalJdk\" for checker-framework"
./gradlew assemble -PuseLocalJdk --console=plain --warning-mode=all -s --no-daemon
fi
echo Exiting "$(cd "$(dirname "$0")" && pwd -P)/$(basename "$0")"
|
def getHeight(children):
if children is None:
return 0
else:
root = children[0]
maxHeight = getHeight(root.leftChildren)
for child in root.rightChildren:
maxHeight = max(maxHeight, getHeight(child))
return maxHeight + 1
|
button {
font-family: 'Arial', sans-serif;
font-style: bold;
font-size: 20px;
}
|
cd "/Users/pengzhenzhen/Desktop/majorlu/github/Hadoop_code/hadoop-2.7.2-src/hadoop-yarn-project/target"
tar cf - hadoop-yarn-project-2.7.2 | gzip > hadoop-yarn-project-2.7.2.tar.gz
|
// Unless explicitly stated otherwise all files in this repository are licensed
// under the Apache License Version 2.0.
// This product includes software developed at Datadog (https://www.datadoghq.com/).
// Copyright 2016-2020 Datadog, Inc.
// +build secrets
package secrets
import (
"fmt"
"strings"
yaml "gopkg.in/yaml.v2"
"github.com/DataDog/datadog-agent/pkg/util/common"
"github.com/DataDog/datadog-agent/pkg/util/log"
)
type secretProvider struct {
secretCache map[string]string
// list of handles and where they were found
secretOrigin map[string]common.StringSet
secretBackendCommand string
secretBackendArguments []string
secretBackendTimeout int
secretBackendKeyName string //used for multiples secrets feature
// SecretBackendOutputMaxSize defines max size of the JSON output from a secrets reader backend
secretBackendOutputMaxSize int
}
var (
secretCache map[string]string
// list of handles and where they were found
secretOrigin map[string]common.StringSet
secretBackendCommand string
secretBackendArguments []string
secretBackendTimeout = 5
// SecretBackendOutputMaxSize defines max size of the JSON output from a secrets reader backend
SecretBackendOutputMaxSize = 1024 * 1024
secretProviders []secretProvider
tmpSecretBackendCommand string
tmpSecretBackendArguments []string
tmpSecretBackendTimeout int
tmpSecretBackendOutputMaxSize = SecretBackendOutputMaxSize
)
func init() {
secretCache = make(map[string]string)
secretOrigin = make(map[string]common.StringSet)
}
// Init initializes the command and other options of the secrets package. Since
// this package is used by the 'config' package to decrypt itself we can't
// directly use it.
func Init(command string, arguments []string, timeout int, maxSize int) {
secretBackendCommand = command
secretBackendArguments = arguments
secretBackendTimeout = timeout
SecretBackendOutputMaxSize = maxSize
}
// Init initializes the command and other options of the secrets package when using multiples secrets. Since
// this package is used by the 'config' package to decrypt itself we can't
// directly use it.
func InitMultipleSecrets(command string, arguments []string, timeout int, maxSize int, keyName string) {
secretProviders = append(secretProviders, secretProvider{
secretBackendCommand: command,
secretBackendArguments: arguments,
secretBackendTimeout: timeout,
secretBackendOutputMaxSize: SecretBackendOutputMaxSize,
secretBackendKeyName: strings.Trim(keyName, " "),
})
}
type walkerCallback func(string) (string, error)
func walkSlice(data []interface{}, callback walkerCallback) error {
for idx, k := range data {
switch v := k.(type) {
case string:
newValue, err := callback(v)
if err != nil {
return err
}
data[idx] = newValue
case map[interface{}]interface{}:
if err := walkHash(v, callback); err != nil {
return err
}
case []interface{}:
if err := walkSlice(v, callback); err != nil {
return err
}
}
}
return nil
}
func walkHash(data map[interface{}]interface{}, callback walkerCallback) error {
for k := range data {
switch v := data[k].(type) {
case string:
newValue, err := callback(v)
if err != nil {
return err
}
data[k] = newValue
case map[interface{}]interface{}:
if err := walkHash(v, callback); err != nil {
return err
}
case []interface{}:
if err := walkSlice(v, callback); err != nil {
return err
}
}
}
return nil
}
// walk will go through loaded yaml and call callback on every strings allowing
// the callback to overwrite the string value
func walk(data *interface{}, callback walkerCallback) error {
switch v := (*data).(type) {
case string:
newValue, err := callback(v)
if err != nil {
return err
}
*data = newValue
case map[interface{}]interface{}:
return walkHash(v, callback)
case []interface{}:
return walkSlice(v, callback)
}
return nil
}
func isEnc(str string) (bool, string) {
// trimming space and tabs
str = strings.Trim(str, " ")
if strings.HasPrefix(str, "ENC[") && strings.HasSuffix(str, "]") {
return true, str[4 : len(str)-1]
}
return false, ""
}
func isKey(str string, keyName string) (bool, string) {
// trimming space and tabs
str = strings.Trim(str, " ")
if strings.HasPrefix(str, keyName+"[") && strings.HasSuffix(str, "]") {
return true, str[strings.Index(str, "[")+1 : len(str)-1]
}
return false, ""
}
// testing purpose
var secretFetcher = fetchSecret
// Decrypt replaces all encrypted secrets in data by executing
// "secret_backend_command" once if all secrets aren't present in the cache.
func Decrypt(data []byte, origin string) ([]byte, error) {
var config interface{}
if data == nil || (secretBackendCommand == "" && len(secretProviders) == 0) {
return data, nil
}
anySecret := false
err := yaml.Unmarshal(data, &config)
if err != nil {
return nil, fmt.Errorf("could not Unmarshal config: %s", err)
}
if secretBackendCommand != "" {
// First we collect all new handles in the config
newHandles := []string{}
haveSecret := false
err = walk(&config, func(str string) (string, error) {
if ok, handle := isEnc(str); ok {
haveSecret = true
// Check if we already know this secret
if secret, ok := secretCache[handle]; ok {
log.Debugf("Secret '%s' was retrieved from cache", handle)
// keep track of place where a handle was found
secretOrigin[handle].Add(origin)
return secret, nil
}
newHandles = append(newHandles, handle)
}
return str, nil
})
if err != nil {
return nil, err
}
// the configuration does not contain any secrets
/*
if !haveSecret {
return data, nil
}
*/
if haveSecret {
anySecret = true
// check if any new secrets need to be fetch
if len(newHandles) != 0 {
tmpSecretBackendCommand = secretBackendCommand
tmpSecretBackendArguments = secretBackendArguments
tmpSecretBackendTimeout = secretBackendTimeout
tmpSecretBackendOutputMaxSize = SecretBackendOutputMaxSize
secrets, err := secretFetcher(newHandles, origin)
if err != nil {
return nil, err
}
// Replace all new encrypted secrets in the config
err = walk(&config, func(str string) (string, error) {
if ok, handle := isEnc(str); ok {
if secret, ok := secrets[handle]; ok {
log.Debugf("Secret '%s' was retrieved from executable", handle)
return secret, nil
}
// This should never happen since fetchSecret will return an error
// if not every handles have been fetched.
return str, fmt.Errorf("unknown secret '%s'", handle)
}
return str, nil
})
if err != nil {
return nil, err
}
}
}
}
if len(secretProviders) > 0 {
for x := 0; x < len(secretProviders); x++ {
if secretProviders[x].secretBackendCommand == "" {
continue
//return data, nil
}
// First we collect all new handles in the config
newHandles := []string{}
haveSecret := false
err = walk(&config, func(str string) (string, error) {
if ok, handle := isKey(str, secretProviders[x].secretBackendKeyName); ok {
haveSecret = true
// Check if we already know this secret
//if secret, ok := secretCache[handle]; ok {
// log.Debugf("Secret '%s' was retrieved from cache", handle)
// keep track of place where a handle was found
// secretOrigin[handle].Add(origin)
// return secret, nil
//}
newHandles = append(newHandles, handle)
}
return str, nil
})
if err != nil {
return nil, err
}
// the configuration does not contain any secrets
if !haveSecret {
continue
//return data, nil
}
anySecret = true
// check if any new secrets need to be fetch
if len(newHandles) > 0 {
tmpSecretBackendCommand = secretProviders[x].secretBackendCommand
tmpSecretBackendArguments = secretProviders[x].secretBackendArguments
tmpSecretBackendTimeout = secretProviders[x].secretBackendTimeout
tmpSecretBackendOutputMaxSize = secretProviders[x].secretBackendOutputMaxSize
secrets, err := secretFetcher(newHandles, origin)
if err != nil {
return nil, err
}
// Replace all new encrypted secrets in the config
err = walk(&config, func(str string) (string, error) {
if ok, handle := isKey(str, secretProviders[x].secretBackendKeyName); ok {
if secret, ok := secrets[handle]; ok {
log.Debugf("Secret '%s' was retrieved from executable", handle)
return secret, nil
}
// This should never happen since fetchSecret will return an error
// if not every handles have been fetched.
return str, fmt.Errorf("unknown secret '%s'", handle)
}
return str, nil
})
if err != nil {
return nil, err
}
}
}
}
if !anySecret {
return data, nil
} else {
finalConfig, err := yaml.Marshal(config)
if err != nil {
return nil, fmt.Errorf("could not Marshal config after replacing encrypted secrets: %s", err)
}
return finalConfig, nil
}
}
// GetDebugInfo exposes debug informations about secrets to be included in a flare
func GetDebugInfo() (*SecretInfo, error) {
if secretBackendCommand == "" && len(secretProviders) == 0 {
return nil, fmt.Errorf("No secret_backend_command set: secrets feature is not enabled")
}
var allSecretBackendCommand []string
for x := 0; x < len(secretProviders); x++ {
allSecretBackendCommand = append(allSecretBackendCommand, secretProviders[x].secretBackendCommand)
}
if secretBackendCommand != "" {
allSecretBackendCommand = append(allSecretBackendCommand, secretBackendCommand)
}
info := &SecretInfo{ExecutablePath: allSecretBackendCommand}
info.populateRights()
info.SecretsHandles = map[string][]string{}
for handle, originNames := range secretOrigin {
info.SecretsHandles[handle] = originNames.GetAll()
}
return info, nil
}
|
import util/command
namespace util/variable
declare __declaration_type ## for Variable::ExportDeclarationAndTypeToVariables (?)
Variable::Exists() {
local variableName="$1"
declare -p "$variableName" &> /dev/null
}
Variable::GetAllStartingWith() {
local startsWith="$1"
compgen -A 'variable' "$startsWith" || true
}
Variable::GetDeclarationFlagFromType() {
DEBUG subject="GetParamFromType" Log 'getting param from type' "$@"
local typeInfo="$1"
local fallback="$2"
if [[ "$typeInfo" == "map" ]] || Function::Exists "class:${typeInfo}"
then
echo A
else
case "$typeInfo" in
"reference")
echo n
;;
"array")
echo a
;;
"string" | "boolean")
echo -
;;
"integer")
echo i
;;
"integerArray")
echo ai
;;
*)
echo "${fallback:-A}"
;;
esac
fi
}
Variable::GetPrimitiveTypeFromDeclarationFlag() {
local typeInfo="$1"
case "$typeInfo" in
"n"*)
echo reference
;;
"a"*)
echo array
;;
"A"*)
echo map
;;
"i"*)
echo integer
;;
"ai"*)
echo integerArray
;;
"Ai"*)
echo integerMap
;;
*)
echo string
;;
esac
}
Variable::ExportDeclarationAndTypeToVariables() {
local variableName="$1"
local targetVariable="$2"
local dereferrence="${3:-true}"
# TODO: rename for a safer, less common variablename so parents can output to declaration
local declaration
local regexArray="declare -([a-zA-Z-]+) $variableName='(.*)'"
local regex="declare -([a-zA-Z-]+) $variableName=\"(.*)\""
local regexArrayBash4_4="declare -([a-zA-Z-]+) $variableName=(.*)"
local definition=$(declare -p $variableName 2> /dev/null || true)
local escaped="'\\\'"
local escapedQuotes='\\"'
local singleQuote='"'
local doubleSlashes='\\\\'
local singleSlash='\'
[[ -z "$definition" ]] && e="Variable $variableName not defined" throw
if [[ "$definition" =~ $regexArray ]]
then
declaration="${BASH_REMATCH[2]//$escaped/}"
elif [[ "$definition" =~ $regex ]]
then
declaration="${BASH_REMATCH[2]//$escaped/}" ## TODO: is this transformation needed?
declaration="${declaration//$escapedQuotes/$singleQuote}"
declaration="${declaration//$doubleSlashes/$singleSlash}"
elif [[ "$definition" =~ $regexArrayBash4_4 ]]
then
declaration="${BASH_REMATCH[2]}"
fi
local variableType
DEBUG Log "Variable Is $variableName = $definition ==== ${BASH_REMATCH[1]}"
local primitiveType=${BASH_REMATCH[1]}
local objectTypeIndirect="$variableName[__object_type]"
if [[ "$primitiveType" =~ [A] && ! -z "${!objectTypeIndirect}" ]]
then
DEBUG Log "Object Type $variableName[__object_type] = ${!objectTypeIndirect}"
variableType="${!objectTypeIndirect}"
# elif [[ ! -z ${__primitive_extension_fingerprint__boolean+x} && "$primitiveType" == '-' && "${!variableName}" == "${__primitive_extension_fingerprint__boolean}"* ]]
# then
# variableType="boolean"
else
variableType="$(Variable::GetPrimitiveTypeFromDeclarationFlag "$primitiveType")"
DEBUG Log "Primitive Type $primitiveType Resolved ${variableType}"
fi
if [[ "$variableType" == 'string' ]] && Function::Exists 'Type::GetPrimitiveExtensionFromVariable'
then
local extensionType=$(Type::GetPrimitiveExtensionFromVariable "${variableName}")
if [[ ! -z "$extensionType" ]]
then
variableType="$extensionType"
fi
fi
DEBUG Log "Variable $variableName is typeof $variableType"
if [[ "$variableType" == 'reference' && "$dereferrence" == 'true' ]]
then
local dereferrencedVariableName="$declaration"
Variable::ExportDeclarationAndTypeToVariables "$dereferrencedVariableName" "$targetVariable" "$dereferrence"
else
eval "$targetVariable=\"\$declaration\""
eval "${targetVariable}_type=\$variableType"
fi
}
Variable::PrintDeclaration() {
local variableName="${1}"
local dereferrence="${2:-true}"
local __declaration
local __declaration_type
Variable::ExportDeclarationAndTypeToVariables "$variableName" __declaration "$dereferrence"
echo "$__declaration"
}
alias @get='Variable::PrintDeclaration'
|
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import hashlib
import base64
import logging
import traceback
import ujson as json
from django.http import HttpResponseForbidden, JsonResponse, HttpResponse
from django.views.decorators.http import require_GET, require_POST
from gcloud.tasktmpl3.utils import get_constant_values
from pipeline_web.drawing_new.constants import CANVAS_WIDTH, POSITION
from pipeline_web.drawing_new.drawing import draw_pipeline as draw_pipeline_tree
from gcloud import err_code
from gcloud.conf import settings
from gcloud.exceptions import FlowExportError
from gcloud.core.models import Project
from gcloud.utils.strings import check_and_rename_params, string_to_boolean
from gcloud.utils.dates import time_now_str
from gcloud.utils.decorators import request_validate
from gcloud.commons.template.utils import read_template_data_file
from gcloud.tasktmpl3.models import TaskTemplate
from gcloud.tasktmpl3.validators import (
FormValidator,
ExportValidator,
ImportValidator,
CheckBeforeImportValidator,
GetTemplateCountValidator,
DrawPipelineValidator,
AnalysisConstantsRefValidator,
)
from gcloud.tasktmpl3.utils import analysis_pipeline_constants_ref
from gcloud.contrib.analysis.analyse_items import task_template
from gcloud.iam_auth.intercept import iam_intercept
from gcloud.iam_auth.view_interceptors.template import (
FormInterceptor,
ExportInterceptor,
ImportInterceptor,
)
logger = logging.getLogger("root")
@require_GET
@request_validate(FormValidator)
@iam_intercept(FormInterceptor())
def form(request, project_id):
template_id = request.GET["template_id"]
version = request.GET.get("version")
template = TaskTemplate.objects.get(pk=template_id, project_id=project_id, is_deleted=False)
ctx = {
"form": template.get_form(version),
"outputs": template.get_outputs(version),
"version": version or template.version,
}
return JsonResponse({"result": True, "data": ctx, "message": "", "code": err_code.SUCCESS.code})
@require_POST
@request_validate(ExportValidator)
@iam_intercept(ExportInterceptor())
def export_templates(request, project_id):
data = json.loads(request.body)
template_id_list = data["template_id_list"]
# wash
try:
templates_data = json.loads(
json.dumps(TaskTemplate.objects.export_templates(template_id_list, project_id), sort_keys=True)
)
except FlowExportError as e:
return JsonResponse({"result": False, "message": str(e), "code": err_code.UNKNOWN_ERROR.code, "data": None})
data_string = (json.dumps(templates_data, sort_keys=True) + settings.TEMPLATE_DATA_SALT).encode("utf-8")
digest = hashlib.md5(data_string).hexdigest()
file_data = base64.b64encode(
json.dumps({"template_data": templates_data, "digest": digest}, sort_keys=True).encode("utf-8")
)
filename = "bk_sops_%s_%s.dat" % (project_id, time_now_str())
response = HttpResponse()
response["Content-Disposition"] = "attachment; filename=%s" % filename
response["mimetype"] = "application/octet-stream"
response["Content-Type"] = "application/octet-stream"
response.write(file_data)
return response
@require_POST
@request_validate(ImportValidator)
@iam_intercept(ImportInterceptor())
def import_templates(request, project_id):
f = request.FILES["data_file"]
override = string_to_boolean(request.POST["override"])
r = read_template_data_file(f)
templates_data = r["data"]["template_data"]
# reset biz_cc_id select in templates
project = Project.objects.get(id=project_id)
_reset_biz_selector_value(templates_data, project.bk_biz_id)
try:
result = TaskTemplate.objects.import_templates(templates_data, override, project_id, request.user.username)
except Exception:
logger.error(traceback.format_exc())
return JsonResponse(
{
"result": False,
"message": "invalid flow data or error occur, please contact administrator",
"code": err_code.UNKNOWN_ERROR.code,
"data": None,
}
)
return JsonResponse(result)
def _reset_biz_selector_value(templates_data, bk_biz_id):
for template in templates_data["pipeline_template_data"]["template"].values():
for act in [act for act in template["tree"]["activities"].values() if act["type"] == "ServiceActivity"]:
biz_cc_id_field = act["component"]["data"].get("biz_cc_id")
if biz_cc_id_field and (not biz_cc_id_field["hook"]):
biz_cc_id_field["value"] = bk_biz_id
for constant in template["tree"]["constants"].values():
if constant["source_tag"].endswith(".biz_cc_id") and constant["value"]:
constant["value"] = bk_biz_id
@require_POST
@request_validate(CheckBeforeImportValidator)
def check_before_import(request, project_id):
r = read_template_data_file(request.FILES["data_file"])
check_info = TaskTemplate.objects.import_operation_check(r["data"]["template_data"], project_id)
return JsonResponse({"result": True, "data": check_info, "code": err_code.SUCCESS.code, "message": ""})
def replace_all_templates_tree_node_id(request):
"""
@summary:清理脏数据
@param request:
@return:
"""
if not request.user.is_superuser:
return HttpResponseForbidden()
total, success = TaskTemplate.objects.replace_all_template_tree_node_id()
return JsonResponse(
{"result": True, "data": {"total": total, "success": success}, "code": err_code.SUCCESS.code, "message": ""}
)
@require_GET
@request_validate(GetTemplateCountValidator)
def get_template_count(request, project_id):
group_by = request.GET.get("group_by", "category")
result_dict = check_and_rename_params({}, group_by)
filters = {"is_deleted": False, "project_id": project_id}
success, content = task_template.dispatch(result_dict["group_by"], filters)
if not success:
return JsonResponse({"result": False, "message": content, "code": err_code.UNKNOWN_ERROR.code, "data": None})
return JsonResponse({"result": True, "data": content, "code": err_code.SUCCESS.code, "message": ""})
@require_POST
@request_validate(DrawPipelineValidator)
def draw_pipeline(request):
"""
@summary:自动排版画布
@param request:
@return:
"""
params = json.loads(request.body)
pipeline_tree = params["pipeline_tree"]
canvas_width = int(params.get("canvas_width", CANVAS_WIDTH))
kwargs = {"canvas_width": canvas_width}
for kw in list(POSITION.keys()):
if kw in params:
kwargs[kw] = params[kw]
try:
draw_pipeline_tree(pipeline_tree, **kwargs)
except Exception as e:
message = "draw pipeline_tree error: %s" % e
logger.exception(e)
return JsonResponse({"result": False, "message": message, "code": err_code.UNKNOWN_ERROR.code, "data": None})
return JsonResponse(
{"result": True, "data": {"pipeline_tree": pipeline_tree}, "code": err_code.SUCCESS.code, "message": ""}
)
@require_GET
def get_templates_with_expired_subprocess(request, project_id):
return JsonResponse(
{
"result": True,
"data": TaskTemplate.objects.get_templates_with_expired_subprocess(project_id),
"code": err_code.SUCCESS.code,
"message": "",
}
)
@require_POST
def get_constant_preview_result(request):
params = json.loads(request.body)
constants = params.get("constants", {})
extra_data = params.get("extra_data", {})
preview_results = get_constant_values(constants, extra_data)
return JsonResponse({"result": True, "data": preview_results, "code": err_code.SUCCESS.code, "message": ""})
@require_POST
@request_validate(AnalysisConstantsRefValidator)
def analysis_constants_ref(request):
"""
@summary:计算模板中的变量引用
@param request:
@return:
"""
tree = json.loads(request.body)
result = None
try:
result = analysis_pipeline_constants_ref(tree)
except Exception:
logger.exception("[analysis_constants_ref] error")
data = {"defined": {}, "nodefined": {}}
defined_keys = tree.get("constants", {}).keys()
if result:
for k, v in result.items():
if k in defined_keys:
data["defined"][k] = v
else:
data["nodefined"][k] = v
return JsonResponse({"result": True, "data": data, "code": err_code.SUCCESS.code, "message": ""})
|
from naas.requests.subscribers import Subscriber
from naas.requests.subscriber_email_addresses import SubscriberEmailAddresses
from tests import BaseTestCase
class TestRequestsSubscriberEmailAddresses(BaseTestCase):
def test_list(self):
response = SubscriberEmailAddresses.list()
self.assertEqual(response.status_code, 200)
self.assertIsInstance(response.json(), dict)
def test_create_no_params_unsuccessful(self):
response = SubscriberEmailAddresses.create()
self.assertEqual(response.status_code, 400)
self.assertEqual(response.json()['data']['message'], 'Bad Request')
self.assertEqual(
response.json()['data']['errors'][0]['message'],
'param is missing or the value is empty: subscriber_email_address'
)
def test_create_with_invalid_subscriber_unsuccessful(self):
subscriber_email_address_attributes = {
"subscriber_id": "invalid",
"email_address": '<EMAIL>',
"is_primary": True
}
response = SubscriberEmailAddresses.create(
subscriber_email_address_attributes)
self.assertEqual(response.status_code, 409)
self.assertEqual(response.json()['data']['message'], 'Conflict')
self.assertEqual(
response.json()['data']['errors'][0]['message'],
'Subscriber must exist'
)
def test_create_with_params_successful(self):
subscriber = {
'first_name': 'John',
'last_name': 'Doe'
}
response_subscriber = Subscriber.create(subscriber)
subscriber_created = response_subscriber.json()['data']
subscriber_email_address_attributes = {
"subscriber_id": subscriber_created['id'],
"email_address": f'<EMAIL>',
"is_primary": True
}
response = SubscriberEmailAddresses.create(
subscriber_email_address_attributes)
self.assertEqual(response.status_code, 201)
self.assertEqual(response.json()['data']['email_address'],
subscriber_email_address_attributes['email_address'])
self.assertEqual(response.json()['data']['subscriber_id'],
subscriber_created['id'])
def test_retrieve_invalid_id_unsuccessful(self):
response = SubscriberEmailAddresses.retrieve('invalid_id')
self.assertEqual(response.status_code, 404)
self.assertEqual(response.json()['data']['message'], 'Not Found')
def test_retrieve_valid_id_successful(self):
subscriber = {
'first_name': 'John',
'last_name': 'Doe'
}
response_subscriber = Subscriber.create(subscriber)
subscriber_created = response_subscriber.json()['data']
subscriber_email_address_attributes = {
"subscriber_id": subscriber_created['id'],
"email_address": f'<EMAIL>',
"is_primary": True
}
response_subscriber_email = SubscriberEmailAddresses.create(
subscriber_email_address_attributes)
response_retrieve = SubscriberEmailAddresses.retrieve(
response_subscriber_email.json()['data']['id'])
self.assertEqual(response_retrieve.status_code, 200)
self.assertEqual(response_retrieve.json()['data']['subscriber_id'],
subscriber_created['id'])
def test_list_by_subscriber_id_invalid_id_unsuccessful(self):
response = SubscriberEmailAddresses.list_by_subscriber_id('invalid_id')
self.assertEqual(response.status_code, 404)
self.assertEqual(response.json()['data']['message'], 'Not Found')
def test_list_by_subscriber_id_valid_id_successful(self):
subscriber = {
'first_name': 'John',
'last_name': 'Doe'
}
response_subscriber = Subscriber.create(subscriber)
subscriber_created = response_subscriber.json()['data']
subscriber_email_address_attributes = {
"subscriber_id": subscriber_created['id'],
"email_address": f'<EMAIL>',
"is_primary": True
}
SubscriberEmailAddresses.create(
subscriber_email_address_attributes)
response_list = SubscriberEmailAddresses.list_by_subscriber_id(
subscriber_created['id'])
self.assertEqual(response_list.status_code, 200)
self.assertIsInstance(response_list.json()['data'], list)
self.assertEqual(response_list.json()['data'][0]['email_address'],
subscriber_email_address_attributes['email_address'])
|
<filename>apps/frontend/src/components/questionnaire/components/entry.tsx
/* eslint-disable react-hooks/exhaustive-deps */
import React, { FC, useContext, useEffect } from "react";
import { Element, Normaltekst, Undertekst, UndertekstBold } from "nav-frontend-typografi";
import { SelectionContext } from "../../../layouts/contexts/selection-context";
import { AlertStripeInfo } from "nav-frontend-alertstriper";
import { IQuestionnaireResponse_Answer, IReference } from "@ahryman40k/ts-fhir-types/lib/R4";
import { selectionsToReferences } from "../utils/selections-to-references";
interface IProps {
onChange: (answer: IQuestionnaireResponse_Answer[]) => void;
values: IQuestionnaireResponse_Answer[];
required?: boolean;
}
export const Entry: FC<IProps> = ({ onChange, values, required = false }) => {
const { selections } = useContext(SelectionContext);
useEffect(() => {
const references: IReference[] = selectionsToReferences(selections);
const answers: IQuestionnaireResponse_Answer[] = [];
references.map((ref) => {
answers.push({ valueReference: ref });
});
onChange(answers);
}, [selections]);
if (selections) {
return (
<div>
<Element>{`Vedlegg${required ? "*" : ""}:`}</Element>
{selections.length > 0 ? (
<>
{selections.map((s, i) => {
return (
<div key={i}>
<Normaltekst key={i}>{s.condition.code?.text}</Normaltekst>
{s.resources.map((e, j) => {
return (
<Undertekst key={j}>{e.resourceType || ""}</Undertekst>
);
})}
</div>
);
})}
</>
) : (
<Undertekst>Ingen vedlegg er valgt.</Undertekst>
)}
</div>
);
}
return null;
};
|
def reverse_string(my_string):
"""Function for reversing a string"""
reversed_string = ""
for i in range(len(my_string)):
reversed_string+=my_string[-(i+1)]
return reversed_string
|
<filename>src/Components/Guards/Auth.js
import React, { useEffect, useState } from "react";
import { useDispatch } from "react-redux";
import PropTypes from "prop-types";
import SplashScreen from "../Misc/SplashScreen";
import { setUserData } from "../../Redux/account";
import { useAuthState } from "react-firebase-hooks/auth";
import { getUserDb } from "../../Modules/userOperations";
import firebase from "../../Modules/firebaseApp";
function Auth({ children }) {
const dispatch = useDispatch();
const [isLoading, setLoading] = useState(true);
const [user, initialising] = useAuthState(firebase.auth());
useEffect(() => {
const initAuth = async () => {
if (!initialising && user) {
const userDb = await getUserDb(user.uid);
await dispatch(setUserData(userDb));
setLoading(false);
} else if (!initialising) {
setLoading(false);
}
};
initAuth();
}, [dispatch, user, initialising]);
if (isLoading) {
return <SplashScreen />;
}
return children;
}
Auth.propTypes = {
children: PropTypes.any
};
export default Auth;
|
<reponame>paurosello/errorges<filename>errorges/errorges/doctype/errors/errors_list.js
frappe.listview_settings['Errors'] = {
}
|
package org.museautomation.ui.valuesource;
/**
* @author <NAME> (see LICENSE.txt for license details)
*/
public interface NameChangeListener
{
void nameChanged(NamedValueSourceEditor editor, String old_name, String new_name);
}
|
ALTER TABLE dependencies DROP CONSTRAINT fk_dependencies_version_id;
|
def navigate_maze(maze):
def is_valid_move(x, y):
return 0 <= x < len(maze) and 0 <= y < len(maze[0]) and maze[x][y] == 0
def dfs(x, y):
if x == len(maze) - 1 and y == len(maze[0]) - 1:
return True
if is_valid_move(x, y):
maze[x][y] = -1 # Mark as visited
if dfs(x + 1, y) or dfs(x, y + 1) or dfs(x - 1, y) or dfs(x, y - 1):
return True
return False
return dfs(0, 0)
|
<reponame>xiaoandev/LostAndFoundOnCampus
package com.example.lostandfoundoncampus.adapter;
import android.content.Context;
import android.text.style.AlignmentSpan;
import android.util.Log;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.TextView;
import com.example.lostandfoundoncampus.R;
import com.example.lostandfoundoncampus.bean.Publish;
import com.example.lostandfoundoncampus.bean.advertisement;
import com.example.lostandfoundoncampus.view.MyGridView;
import com.nostra13.universalimageloader.core.DisplayImageOptions;
import com.nostra13.universalimageloader.core.ImageLoader;
import com.nostra13.universalimageloader.core.display.RoundedBitmapDisplayer;
import java.util.List;
import androidx.recyclerview.widget.RecyclerView;
import cn.bmob.v3.BmobQuery;
import cn.bmob.v3.exception.BmobException;
import cn.bmob.v3.listener.FindListener;
import cn.smssdk.gui.DefaultContactViewItem;
public class ListViewAdapter extends BaseAdapter {
private Context context;
private List<Publish> list;
List<String> imageUrl;
GrideViewAdapter grideViewAdapter;
private DisplayImageOptions options = new DisplayImageOptions.Builder()
.showStubImage(R.mipmap.ic_launcher) //设置图片下载期间显示的图片
.showImageForEmptyUri(R.mipmap.ic_launcher) //设置图片uri为空或者是错位的时候显示的图片
.showImageOnFail(R.mipmap.ic_launcher) //设置图片加载或解码过程中发生错误显示的图片
.cacheInMemory(true) //设置下载的图片是否缓存在内存中
.cacheOnDisk(true) //设置下载的图片是否缓存在SD卡中
.displayer(new RoundedBitmapDisplayer(20)) //设置成圆角图片
.build(); //创建配置的DisplayImageOption对象
ImageLoader imageLoader = ImageLoader.getInstance();
public ListViewAdapter(Context context, List<Publish> list) {
this.context = context;
this.list = list;
}
@Override
public int getCount() {
return list.size();
}
@Override
public Object getItem(int position) {
return list.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
final ViewHolder viewHolder;
if (convertView == null) {
convertView = View.inflate(context, R.layout.item_publish, null);
viewHolder = new ViewHolder();
viewHolder.user_icon = (ImageView) convertView.findViewById(R.id.user_icon);
viewHolder.user_name = (TextView) convertView.findViewById(R.id.user_name);
viewHolder.publish_message = (TextView) convertView.findViewById(R.id.publish_message);
viewHolder.publish_time = (TextView) convertView.findViewById(R.id.publish_time);
viewHolder.gridView = (MyGridView) convertView.findViewById(R.id.publish_picture);
//给当前的GridView设置一个位置标记
viewHolder.gridView.setTag(position);
convertView.setTag(viewHolder);
} else {
viewHolder = (ViewHolder) convertView.getTag();
}
Publish publish = list.get(position);
viewHolder.user_name.setText(publish.getUserName()); //用户名
viewHolder.publish_message.setText(publish.getMessage()); // 发表内容
viewHolder.publish_time.setText(publish.getTime()); //发表时间
//显示发表信息的用户的头像
BmobQuery<advertisement> categoryBmobQuery = new BmobQuery<>();
categoryBmobQuery.addWhereEqualTo("name", publish.getUserName()); //根据用户名查找
categoryBmobQuery.findObjects(new FindListener<advertisement>() {
@Override
public void done(List<advertisement> object, BmobException e) {
if (e == null) {
String userIcon = object.get(0).getUserIcon().getFileUrl();
Log.d("BMOB", userIcon);
imageLoader.displayImage(userIcon, viewHolder.user_icon, options);
} else {
Log.d("BMOB", e.toString());
}
}
});
//没有图片资源就隐藏GridView
if (imageUrl == null || imageUrl.size() == 0) {
viewHolder.gridView.setVisibility(View.GONE);
} else {
grideViewAdapter = new GrideViewAdapter(context, imageUrl);
viewHolder.gridView.setAdapter(grideViewAdapter);
}
return convertView;
}
public class ViewHolder {
ImageView user_icon;
TextView user_name;
TextView publish_message;
TextView publish_time;
MyGridView gridView;
}
}
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import django.dispatch
forum_moved = django.dispatch.Signal(providing_args=["previous_parent", ])
forum_viewed = django.dispatch.Signal(providing_args=["forum", "user", "request", "response", ])
|
#!/bin/bash
############# author: Victor FAVREAU #############
# Fichier permettant de récupérer les 1000 premiers mots entouré d'espaces
# et supprimant les mots de tailles deux, par exemple "de" ou "la".
awk '{print " "$1" "}' $1 | awk 'length > 4' | head -1000 > $1;
|
# Copyright (c) 2017 The Bitcoin Core developers
# Copyright (c) 2017 The BitcoinSubsidium Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#network interface on which to limit traffic
IF="eth0"
#limit of the network interface in question
LINKCEIL="1gbit"
#limit outbound BitcoinSubsidium protocol traffic to this rate
LIMIT="160kbit"
#defines the IPv4 address space for which you wish to disable rate limiting
LOCALNET_V4="192.168.0.0/16"
#defines the IPv6 address space for which you wish to disable rate limiting
LOCALNET_V6="fe80::/10"
#delete existing rules
tc qdisc del dev ${IF} root
#add root class
tc qdisc add dev ${IF} root handle 1: htb default 10
#add parent class
tc class add dev ${IF} parent 1: classid 1:1 htb rate ${LINKCEIL} ceil ${LINKCEIL}
#add our two classes. one unlimited, another limited
tc class add dev ${IF} parent 1:1 classid 1:10 htb rate ${LINKCEIL} ceil ${LINKCEIL} prio 0
tc class add dev ${IF} parent 1:1 classid 1:11 htb rate ${LIMIT} ceil ${LIMIT} prio 1
#add handles to our classes so packets marked with <x> go into the class with "... handle <x> fw ..."
tc filter add dev ${IF} parent 1: protocol ip prio 1 handle 1 fw classid 1:10
tc filter add dev ${IF} parent 1: protocol ip prio 2 handle 2 fw classid 1:11
if [ ! -z "${LOCALNET_V6}" ] ; then
# v6 cannot have the same priority value as v4
tc filter add dev ${IF} parent 1: protocol ipv6 prio 3 handle 1 fw classid 1:10
tc filter add dev ${IF} parent 1: protocol ipv6 prio 4 handle 2 fw classid 1:11
fi
#delete any existing rules
#disable for now
#ret=0
#while [ $ret -eq 0 ]; do
# iptables -t mangle -D OUTPUT 1
# ret=$?
#done
#limit outgoing traffic to and from port 8767. but not when dealing with a host on the local network
# (defined by $LOCALNET_V4 and $LOCALNET_V6)
# --set-mark marks packages matching these criteria with the number "2" (v4)
# --set-mark marks packages matching these criteria with the number "4" (v6)
# these packets are filtered by the tc filter with "handle 2"
# this filter sends the packages into the 1:11 class, and this class is limited to ${LIMIT}
iptables -t mangle -A OUTPUT -p tcp -m tcp --dport 8767 ! -d ${LOCALNET_V4} -j MARK --set-mark 0x2
iptables -t mangle -A OUTPUT -p tcp -m tcp --sport 8767 ! -d ${LOCALNET_V4} -j MARK --set-mark 0x2
if [ ! -z "${LOCALNET_V6}" ] ; then
ip6tables -t mangle -A OUTPUT -p tcp -m tcp --dport 8767 ! -d ${LOCALNET_V6} -j MARK --set-mark 0x4
ip6tables -t mangle -A OUTPUT -p tcp -m tcp --sport 8767 ! -d ${LOCALNET_V6} -j MARK --set-mark 0x4
fi
|
/////////////////////////////////////////////////////////////
// ShareServiceImpl.java
// gooru-api
// Created by Gooru on 2014
// Copyright (c) 2014 Gooru. All rights reserved.
// http://www.goorulearning.org/
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
/////////////////////////////////////////////////////////////
package org.ednovo.gooru.domain.service;
import static com.rosaloves.bitlyj.Bitly.shorten;
import java.util.HashMap;
import java.util.Map;
import org.ednovo.gooru.application.util.TaxonomyUtil;
import org.ednovo.gooru.core.api.model.Resource;
import org.ednovo.gooru.core.api.model.User;
import org.ednovo.gooru.core.application.util.BaseUtil;
import org.ednovo.gooru.core.constant.ConfigConstants;
import org.ednovo.gooru.core.constant.ConstantProperties;
import org.ednovo.gooru.core.constant.ParameterProperties;
import org.ednovo.gooru.core.exception.NotFoundException;
import org.ednovo.gooru.domain.service.redis.RedisService;
import org.ednovo.gooru.domain.service.setting.SettingService;
import org.ednovo.gooru.infrastructure.persistence.hibernate.UserRepository;
import org.ednovo.gooru.infrastructure.persistence.hibernate.resource.ResourceRepository;
import org.ednovo.gooru.json.serializer.util.JsonSerializer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import com.rosaloves.bitlyj.Bitly;
import com.rosaloves.bitlyj.BitlyException;
import com.rosaloves.bitlyj.Url;
@Service
public class ShareServiceImpl extends BaseServiceImpl implements ShareService, ParameterProperties, ConstantProperties {
@Autowired
private ResourceRepository resourceRepository;
@Autowired
private UserRepository userRepository;
@Autowired
private SettingService settingService;
@Autowired
private RedisService redisService;
@Override
@Transactional(readOnly = false, propagation = Propagation.REQUIRED, rollbackFor = Exception.class)
public String getShortenUrl( String fullUrl, boolean clearCache) {
fullUrl = BaseUtil.changeHttpsProtocolByHeader(fullUrl);
String cacheKey = fullUrl + HYPHEN + TaxonomyUtil.GOORU_ORG_UID;
String resonseData = null;
if (!clearCache) {
resonseData = getRedisService().getValue(cacheKey);
}
if (resonseData == null) {
Map<String, String> shortenUrl = new HashMap<String, String>();
try{
Url bitly = Bitly.as(this.getSettingService().getConfigSetting(ConfigConstants.BITLY_USER_NAME, 0, TaxonomyUtil.GOORU_ORG_UID), this.getSettingService().getConfigSetting(ConfigConstants.BITLY_APIKEY, 0, TaxonomyUtil.GOORU_ORG_UID)).call(shorten(fullUrl));
shortenUrl.put(SHORTEN_URL, bitly.getShortUrl());
shortenUrl.put(RAW_URL, bitly.getLongUrl());
}catch(BitlyException ex){
shortenUrl.put(SHORTEN_URL, fullUrl);
shortenUrl.put(RAW_URL, fullUrl);
}
resonseData = JsonSerializer.serializeToJson(shortenUrl, true);
getRedisService().putValue(cacheKey, resonseData, RedisService.DEFAULT_PROFILE_EXP);
}
return resonseData;
}
public ResourceRepository getResourceRepository() {
return resourceRepository;
}
public SettingService getSettingService() {
return settingService;
}
public RedisService getRedisService() {
return redisService;
}
public UserRepository getUserRepository() {
return userRepository;
}
}
|
#!/bin/bash
java -jar /cinema/workspace/guns-cinema-0.0.1.jar
for (( ; ; ))
do
sleep 5
done
|
# -*- coding: utf-8 -*-
class IdentityNotValid(Exception):
pass
class RedirectStateInvalid(Exception):
pass
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.