text
stringlengths 1
1.05M
|
|---|
<reponame>blaiszik/globus_ko
var request = require('supertest');
var app = require('../app.js');
// describe('GET /', function() {
// it('should return 200 OK', function(done) {
// request(app)
// .get('/')
// .expect(200, done);
// });
// });
// describe('GET /signin', function() {
// it('should return 200 OK', function(done) {
// request(app)
// .get('/signin')
// .expect(200, done);
// });
// });
// describe('GET /signup', function() {
// it('should return 200 OK', function(done) {
// request(app)
// .get('/signup')
// .expect(200, done);
// });
// });
// describe('GET /api', function() {
// it('should return 200 OK', function(done) {
// request(app)
// .get('/api')
// .expect(200, done);
// });
// });
// describe('GET /contact', function() {
// it('should return 200 OK', function(done) {
// request(app)
// .get('/pages/contact-us')
// .expect(200, done);
// });
// });
// describe('GET /pages/about-us', function() {
// it('should return 200 OK', function(done) {
// request(app)
// .get('/pages/about-us')
// .expect(200, done);
// });
// });
// describe('GET /pages/faqs', function() {
// it('should return 200 OK', function(done) {
// request(app)
// .get('/pages/faqs')
// .expect(200, done);
// });
// });
// describe('GET /pages/getting-started', function() {
// it('should return 200 OK', function(done) {
// request(app)
// .get('/pages/getting-started')
// .expect(200, done);
// });
// });
// describe('GET /pages/how-it-works', function() {
// it('should return 200 OK', function(done) {
// request(app)
// .get('/pages/how-it-works')
// .expect(200, done);
// });
// });
// describe('GET /pages/jobs', function() {
// it('should return 200 OK', function(done) {
// request(app)
// .get('/pages/jobs')
// .expect(200, done);
// });
// });
// describe('GET /pages/our-team', function() {
// it('should return 200 OK', function(done) {
// request(app)
// .get('/pages/our-team')
// .expect(200, done);
// });
// });
// describe('GET /pages/press', function() {
// it('should return 200 OK', function(done) {
// request(app)
// .get('/pages/press')
// .expect(200, done);
// });
// });
// describe('GET /pages/privacy-policy', function() {
// it('should return 200 OK', function(done) {
// request(app)
// .get('/pages/privacy-policy')
// .expect(200, done);
// });
// });
// describe('GET /pages/terms-of-use', function() {
// it('should return 200 OK', function(done) {
// request(app)
// .get('/pages/terms-of-use')
// .expect(200, done);
// });
// });
// describe('GET /random-url', function() {
// it('should return 404', function(done) {
// request(app)
// .get('/reset')
// .expect(404, done);
// });
// });
|
import { Page, ElementHandle, Browser, Viewport, launch } from 'puppeteer';
import { TaskEither, tryCatch } from 'fp-ts/lib/TaskEither';
import { ArticleContent, TagName } from './types';
import { articleContentHandler } from './browser';
export const _launchBrowser = async (...params: Parameters<typeof launch>): Promise<Browser> => {
return await launch(...params);
};
export const _getPageFromBrowser = async (browser: Browser): Promise<Page> => {
const pages = await browser.pages();
const page = await (pages?.[0] ?? browser.newPage());
await page.bringToFront();
return page;
};
export const _goToUrl = async (url: string, page: Page): Promise<Page> => {
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const response = await page.goto(url, { waitUntil: 'domcontentloaded' });
// NOTE: Because 401 will always be returned first
// if (response.status() !== 200) {
// throw new Error(`goto ${url} failed`);
// }
return page;
};
export const _loginKnowledge = async (
{ id, password }: { id: string; password: string },
knowledgeLoginPage: Page,
): Promise<Page> => {
await knowledgeLoginPage.type('input[name="username"]', id);
await knowledgeLoginPage.type('input[name="password"]', password);
const submitButton = await knowledgeLoginPage.waitForXPath('//*[@id="content_top"]/div/form/div[3]/div/button', {
timeout: 3000,
});
const [response] = await Promise.all([
// The promise resolves after navigation has finished
knowledgeLoginPage.waitForNavigation(),
submitButton?.click(),
]);
if (response?.status() !== 200) {
throw new Error('login failed');
}
return knowledgeLoginPage;
};
export const _extractArticleContentsFromPage = async (articlePage: Page): Promise<ArticleContent[]> => {
const contentHandle = await articlePage.evaluateHandle(articleContentHandler);
const contentChildrenHandlers = Array.from(await contentHandle.getProperties())
.map(([, property]) => property.asElement())
.filter((elm): elm is ElementHandle<Element> => !!elm);
return await Promise.all(
contentChildrenHandlers.map((handler) =>
handler.evaluate((elm: HTMLElement) => ({
tagName: elm.tagName.toLowerCase() as TagName,
innerText: elm.innerText,
})),
),
);
};
export const _setViewPortToPage = async (viewPort: Viewport, isHeadless: boolean, page: Page) => {
if (!isHeadless) {
await page.setViewport(viewPort);
}
return page;
};
export const _closeBrowser = async (browser: Browser): Promise<void> => {
await browser.close();
};
export const _showDraftPreview = async (draftArticlePage: Page): Promise<Page> => {
const previewTab = await draftArticlePage.waitForXPath('//*[@id="content_main"]/div[1]/div[2]/ul/li[2]/a', {
timeout: 3000,
});
await previewTab?.click();
// wait for loading has finished
const contentElement = await draftArticlePage.waitForSelector('div#content', { timeout: 3000 });
if (!contentElement) {
throw new Error('show draft preview failed');
}
return draftArticlePage;
};
export const toTaskEither = <P extends unknown[], R>(
asyncFn: (...args: P) => Promise<R>,
): ((...args: P) => TaskEither<Error, R>) => (...args) =>
tryCatch(
() => asyncFn(...args),
(e) => e as Error,
);
export const getPageFromBrowser = toTaskEither(_getPageFromBrowser);
export const goToUrl = toTaskEither(_goToUrl);
export const loginKnowledge = toTaskEither(_loginKnowledge);
export const extractArticleContentsFromPage = toTaskEither(_extractArticleContentsFromPage);
export const setViewPortToPage = toTaskEither(_setViewPortToPage);
export const launchBrowser = toTaskEither(_launchBrowser);
export const closeBrowser = toTaskEither(_closeBrowser);
export const showDraftPreview = toTaskEither(_showDraftPreview);
|
<!DOCTYPE html>
<html>
<head>
<title>3D Data Visualization</title>
</head>
<body>
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/r108/three.min.js"></script>
<script>
let scene, camera, renderer;
// Set the scene size.
const WIDTH = 400;
const HEIGHT = 300;
// Set some camera attributes
const VIEW_ANGLE = 45;
const ASPECT = WIDTH / HEIGHT;
const NEAR = 0.1;
const FAR = 10000;
// create a WebGL renderer, camera, and a scene
renderer = new THREE.WebGLRenderer();
// Set the background color
renderer.setClearColor(0xEEEEEE);
// Setup the camera
camera = new THREE.PerspectiveCamera( VIEW_ANGLE,
ASPECT,
NEAR,
FAR );
// Create the scene
scene = new THREE.Scene();
// Add the camera to the scene
scene.add(camera);
// Position the camera
camera.position.z = 300;
// Start the renderer
renderer.setSize(WIDTH, HEIGHT);
// Attach the rendere to the container
document.body.appendChild( renderer.domElement );
// Create a light
let light = new THREE.PointLight( 0xffffff );
// Set its position
light.position.x = 10;
light.position.y = 50;
light.position.z = 130;
// Add to the scene
scene.add( light );
// Create a 3D cube
let cubeGeometry = new THREE.BoxGeometry( 10, 10, 10);
let cubeMaterial = new THREE.MeshLambertMaterial( { color: 0xff0000 } );
let cube = new THREE.Mesh( cubeGeometry, cubeMaterial );
// Add the cube to the scene
scene.add( cube );
// Render the scene
renderer.render(scene, camera);
// Add rotation to cube
function updateCube() {
cube.rotation.y += 0.02;
}
// Animate the cube
function animate() {
requestAnimationFrame( animate );
updateCube();
renderer.render( scene, camera );
}
animate();
// Add mouse control
let control = new THREE.OrbitControls( camera, renderer.domElement );
</script>
</body>
</html>
|
# bash
# THIS SCRIPT IS USED FOR NETLIST MANIPULATION
#
# 20210826
#
# SUBSTITUTE KEYWORD
# 将RR变为XRR,CC变为XCC,$[rnpoly]、$[hrpolyu]、$[rppolyu]去掉$[],$W/、$L去掉$
# 电阻值、电容值为无
# sed -i -e 's/替换内容' 文件名
#
sed -i -e 's/\RR/XRR/g ; s/\CC/XCC/g ; s/\$\[rnpoly\]/rnpoly/g ; s/\$\[hrpolyu\]/hrpolyu/g ; s/\$\[rppolyu\]/rppolyu/g ; s/\$\[mim_cap_6\]/mim_cap_6/g ; s/\$W/W/g ; s/\$L/L/g ; s/[0-9]\{1,\}\.[0-9]\{1,\}[\ \Kfp]//g' LY2632_New1.cdl
|
#!/bin/bash
# Cloud SDK and App Engine
curl https://sdk.cloud.google.com | bash
gcloud components install app-engine-python
# Node.js
sudo port install nodejs
# Gulp.js
npm install -g gulp
# Python related
curl https://bootstrap.pypa.io/get-pip.py | python
pip install virtualenv
# Git
sudo port install git-core
|
package org.firstinspires.ftc.teamcode.terminators;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import org.firstinspires.ftc.teamcode.hardware.Robot;
/**
* Used to tell if an OpMode is still running and acts as a terminator to stop all threads
* Easier than sending around {@link com.qualcomm.robotcore.eventloop.opmode.OpMode} code
*/
public class Status {
/**
* Checks if the autonomous OpMode is requesting or has requested a stop
*
* @return if opMode is stopped
*/
public static boolean isStopRequested() {
if (Robot.getOpMode() instanceof LinearOpMode) {
return ((LinearOpMode) Robot.getOpMode()).isStopRequested() || Thread.interrupted();
}
return false;
}
}
|
from stream_manager import StreamManagerClient
from stream_manager import StreamManagerException
from stream_manager import MessageStreamDefinition
from stream_manager import StrategyOnFull
from stream_manager import Persistence
from stream_manager import ExportDefinition
from stream_manager import S3ExportTaskExecutorConfig
from stream_manager import S3ExportTaskDefinition
from stream_manager import Util
from stream_manager import ReadMessagesOptions
from stream_manager import StatusMessage
from stream_manager import Status
from stream_manager import ResourceNotFoundException
from stream_manager import StatusConfig
from stream_manager import StatusLevel
import asyncio
import sys
import os
stream_name = "pose-estimator-pi-output"
status_stream_name = "pose-estimator-pi-output_status_stream"
s3_bucket_name = os.environ["STREAM_MANAGER_S3_BUCKET_NAME"]
def init_gg_stream_manager():
print("Initializing Stream manager.....", flush=True)
s3_stream_client = StreamManagerClient()
try:
s3_stream_client.delete_message_stream(stream_name=stream_name)
except ResourceNotFoundException:
pass
try:
s3_stream_client.delete_message_stream(stream_name=status_stream_name)
except ResourceNotFoundException:
pass
try:
# Create the Status Stream.
s3_stream_client.create_message_stream(
MessageStreamDefinition(name=status_stream_name, strategy_on_full=StrategyOnFull.OverwriteOldestData, persistence=Persistence.Memory)
)
except StreamManagerException:
pass
my_s3_export_definition = ExportDefinition(
s3_task_executor=[
S3ExportTaskExecutorConfig(
identifier="s3_task_exe_" + stream_name,
status_config=StatusConfig(
status_level=StatusLevel.TRACE, # Default is INFO level statuses.
# Status Stream should be created before specifying in S3 Export Config.
status_stream_name=status_stream_name,
),
)
]
)
try:
s3_stream_client.create_message_stream(
MessageStreamDefinition(
name=stream_name,
max_size=268435456, # Default is 256 MB.
stream_segment_size=16777216, # Default is 16 MB.
time_to_live_millis=None, # By default, no TTL is enabled.
strategy_on_full=StrategyOnFull.OverwriteOldestData, # Required.
persistence=Persistence.File, # Default is File.
flush_on_write=False, # Default is false.
export_definition=my_s3_export_definition
)
)
except StreamManagerException:
pass
return s3_stream_client
def send_to_gg_stream_manager(s3_stream_client: StreamManagerClient, file_url: str, s3_key_name: str):
print("In Send to GG Stream Manager Function", flush=True)
#s3_key_name="processed_video_frames/"+file_prefix+"/"
print("Input URL, bucket and key are :::: {} - {} - {} ".format("file://"+file_url, s3_bucket_name, s3_key_name), flush=True)
try:
s3_export_task_definition = S3ExportTaskDefinition(input_url="file://"+file_url, bucket=s3_bucket_name, key=s3_key_name)
print("Task definition created successfully....", flush=True)
sequence_number = s3_stream_client.append_message(stream_name, Util.validate_and_serialize_to_json_bytes(s3_export_task_definition))
print("Successfully appended to stream with sequence number {}".format(sequence_number), flush=True)
is_upload_success = False
while not is_upload_success:
try:
messages_list = s3_stream_client.read_messages(status_stream_name, ReadMessagesOptions(min_message_count=1, read_timeout_millis=10000))
for message in messages_list:
# Deserialize the status message first.
status_message = Util.deserialize_json_bytes_to_obj(message.payload, StatusMessage)
if status_message.status == Status.Success:
print("Successfully uploaded file: {} to S3 bucket: {} and the location is: {}".format("file://"+file_url, s3_bucket_name, s3_key_name), flush=True)
is_upload_success = True
elif status_message.status == Status.Failure or status_message.status == Status.Canceled:
print("Unable to upload file:{} to S3 bucket:{}".format("file://"+file_url, s3_bucket_name), flush=True)
is_upload_success = True
except StreamManagerException:
print("Exception occurred while sending message to S3.. {} ", sys.exc_info()[0] , flush=True)
except asyncio.TimeoutError:
print("Timed out while executing.. {} ", sys.exc_info()[0] , flush=True)
except Exception:
print("Exception while running.. {} ", sys.exc_info()[0] , flush=True)
|
Pod::Spec.new do |s|
s.name = "ReactiveCocoa"
s.version = "2.5.2"
s.summary = "Streams of values over time"
s.description = <<-DESC
ReactiveCocoa (RAC) is an Objective-C framework inspired by Functional Reactive Programming. It provides APIs for composing and transforming streams of values.
DESC
s.homepage = "https://github.com/ReactiveCocoa/ReactiveCocoa"
s.license = { :type => "MIT", :file => "LICENSE.md" }
s.author = "ReactiveCocoa"
s.osx.deployment_target = "10.9"
s.ios.deployment_target = "8.0"
s.tvos.deployment_target = "9.0"
s.watchos.deployment_target = "2.0"
s.source = { :git => "https://github.com/iCHEF/ReactiveCocoa.git", :tag => "#{s.version}" }
s.source_files = "ReactiveCocoa/*.{h,m,d}",
"ReactiveCocoa/extobjc/*.{h,m}"
non_arc_files = 'ReactiveCocoa/RACObjCRuntime.m'
s.private_header_files = "**/*Private.h",
"**/*EXTRuntimeExtensions.h",
"**/RACEmpty*.h"
s.ios.exclude_files = "ReactiveCocoa/**/*{AppKit,NSControl,NSText,NSTable}*", non_arc_files
s.osx.exclude_files = "ReactiveCocoa/**/*{UIActionSheet,UIAlertView,UIBarButtonItem,"\
"UIButton,UICollectionReusableView,UIControl,UIDatePicker,"\
"UIGestureRecognizer,UIImagePicker,UIRefreshControl,"\
"UISegmentedControl,UISlider,UIStepper,UISwitch,UITableViewCell,"\
"UITableViewHeaderFooterView,UIText,MK}*", non_arc_files
s.tvos.exclude_files = "ReactiveCocoa/**/*{AppKit,NSControl,NSText,NSTable,UIActionSheet,"\
"UIAlertView,UIDatePicker,UIImagePicker,UIRefreshControl,UISlider,"\
"UIStepper,UISwitch,MK}*", non_arc_files
s.watchos.exclude_files = "ReactiveCocoa/**/*{UIActionSheet,UIAlertView,UIBarButtonItem,"\
"UIButton,UICollectionReusableView,UIControl,UIDatePicker,"\
"UIGestureRecognizer,UIImagePicker,UIRefreshControl,"\
"UISegmentedControl,UISlider,UIStepper,UISwitch,UITableViewCell,"\
"UITableViewHeaderFooterView,UIText,MK,AppKit,NSControl,NSText,"\
"NSTable,NSURLConnection}*", non_arc_files
s.frameworks = "Foundation"
s.prepare_command = <<-'CMD'.strip_heredoc
find -E . -type f -not -name 'RAC*' -regex '.*(EXT.*|metamacros)\.[hm]$' \
-execdir mv '{}' RAC'{}' \;
find . -regex '.*\.[hm]' \
-exec perl -pi \
-e 's@"(?:(?!RAC)(EXT.*|metamacros))\.h"@"RAC\1.h"@' '{}' \;
find . -regex '.*\.[hm]' \
-exec perl -pi \
-e 's@<ReactiveCocoa/(?:(?!RAC)(EXT.*))\.h>@<ReactiveCocoa/RAC\1.h>@' '{}' \;
CMD
s.subspec 'no-arc' do |sp|
sp.source_files = non_arc_files
sp.requires_arc = false
end
end
|
package com.telenav.osv.recorder;
/**
* The class define a custom {@link Throwable} containing all the messages of the errors.
* Created by cameliao on 1/31/18.
*/
public class Errors extends Throwable {
public static final String ERROR_OBD_ALREADY_CONNECTED = "OBD already connected.";
public static final String ERROR_NO_OBD_CONNECTION = "There is no OBD connection registered.";
public Errors(String message) {
super(message);
}
}
|
#!/bin/bash
source ../scripts/init.sh -DBACKEND=mix -DSHOW_SUMMARY=on -DSHOW_SCHEDULE=on -DMICRO_BENCH=on -DUSE_DOUBLE=on -DDISABLE_ASSERT=off -DENABLE_OVERLAP=on -DMEASURE_STAGE=off -DEVALUATOR_PREPROCESS=on -DUSE_MPI=on -DMAT=7
`which mpirun` -host nico3:2 -x GPUPerRank=2 ../scripts/env.sh ../scripts/gpu-bind.sh ./main ../tests/input/qft_28.qasm
|
<filename>src/main/java/com/alipay/api/response/AntMerchantExpandIndirectActivityQueryResponse.java
package com.alipay.api.response;
import java.util.List;
import com.alipay.api.internal.mapping.ApiField;
import com.alipay.api.internal.mapping.ApiListField;
import com.alipay.api.domain.ActivityMerchantOrder;
import com.alipay.api.AlipayResponse;
/**
* ALIPAY API: ant.merchant.expand.indirect.activity.query response.
*
* @author auto create
* @since 1.0, 2021-10-29 13:51:00
*/
public class AntMerchantExpandIndirectActivityQueryResponse extends AlipayResponse {
private static final long serialVersionUID = 3868492661217533453L;
/**
* 多活动申请返回结果
*/
@ApiListField("multi_result")
@ApiField("activity_merchant_order")
private List<ActivityMerchantOrder> multiResult;
/**
* 费率申请通过后实际生效的费率值,只有0或者0.001两种可能情况。当status字段返回TRUE时此字段有值,反之不返回费率信息
*/
@ApiField("rate")
private String rate;
/**
* 费率申请审核结果status的值:true(通过),false(拒绝),AUDITING(审核中)
*/
@ApiField("status")
private String status;
public void setMultiResult(List<ActivityMerchantOrder> multiResult) {
this.multiResult = multiResult;
}
public List<ActivityMerchantOrder> getMultiResult( ) {
return this.multiResult;
}
public void setRate(String rate) {
this.rate = rate;
}
public String getRate( ) {
return this.rate;
}
public void setStatus(String status) {
this.status = status;
}
public String getStatus( ) {
return this.status;
}
}
|
<filename>src/main/java/de/unistuttgart/ims/coref/annotator/tools/Options.java
package de.unistuttgart.ims.coref.annotator.tools;
import java.io.File;
import com.lexicalscope.jewel.cli.Option;
public interface Options {
@Option
File getInput();
@Option
File getOutput();
@Option
Fixer.Fix getFix();
@Option(defaultValue = "de")
String getLanguage();
}
|
#!/bin/bash
function ts {
echo [`date '+%b %d %X'`]
}
echo "$(ts) Starting the server in 5 seconds..."
/root/bin/Xeoma/xeoma -- -service -log -startdelay 5
|
#!/usr/bin/env bash
# Copyright 2019 Jetstack Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Make the zonal cluster resource definiton from the regional cluster resource
# definition. This helps to keep the two definitions the same, except for the
# presence of the region or zone property.
set -o errexit
set -o nounset
set -o pipefail
set -o xtrace
REPO_ROOT=$(dirname "${BASH_SOURCE[0]}")/..
# Capture the output of terraform fmt so that we can trigger the script to
# fail if formatting changes were made. terraform fmt does not consider
# applying formatting changes to be failure, however we want the files to be
# correctly formatted in version control.
FMT=$(terraform fmt $REPO_ROOT)
if [ "$FMT" != "" ]; then
echo "$FMT"
exit 1
fi
mkdir -p $REPO_ROOT/verify-terraform
pushd $REPO_ROOT/verify-terraform
cp ../example/main.tf main.tf
cp ../example/variables.tf variables.tf
cp ../example/terraform.tfvars.example terraform.tfvars
# Remove the requirement for a GCS backend so we can init and validate locally
perl -i -0pe 's/(\s*)backend "gcs" \{\n?\s*\n?\s*\}/\1# GCS bucket not used for testing/gms' main.tf
# Use the local version of the module, not the Terraform Registry version, and remove the version specification
perl -i -0pe 's/(\s*)source*\s*= "jetstack\/gke-cluster\/google"\n\s*version = "0.1.0-beta2"/\1source = "..\/"/gms' main.tf
terraform init
terraform validate
# TODO: Set up a GCP project and service account to run the following section
# in automated testing.
# To make Terraform plan and apply the the following env vars are required:
# GOOGLE_APPLICATION_CREDENTIALS is the path of a key.json for a service account
# GCP_PROJECT_ID is the ID of a GCP project to use
if [ ! -z ${GCP_PROJECT_ID+x} ] || [ ! -z ${GOOGLE_APPLICATION_CREDENTIALS+x} ]; then
echo $GCP_PROJECT_ID
echo $GOOGLE_APPLICATION_CREDENTIALS
sed -i.bak "s|my-project|$GCP_PROJECT_ID|g" terraform.tfvars
terraform plan
terraform apply -auto-approve
terraform destroy -auto-approve
else
echo "Skipping Terraform plan and apply as GCP_PROJECT_ID and GOOGLE_APPLICATION_CREDENTIALS not set."
fi
popd > /dev/null
rm -rf $REPO_ROOT/verify-terraform
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for USN-2805-1
#
# Security announcement date: 2015-11-09 00:00:00 UTC
# Script generation date: 2017-01-01 21:04:54 UTC
#
# Operating System: Ubuntu 14.04 LTS
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - linux-image-3.16.0-53-powerpc64-emb:3.16.0-53.72~14.04.1
# - linux-image-3.16.0-53-generic-lpae:3.16.0-53.72~14.04.1
# - linux-image-3.16.0-53-powerpc64-smp:3.16.0-53.72~14.04.1
# - linux-image-3.16.0-53-generic:3.16.0-53.72~14.04.1
# - linux-image-3.16.0-53-powerpc-smp:3.16.0-53.72~14.04.1
# - linux-image-3.16.0-53-powerpc-e500mc:3.16.0-53.72~14.04.1
# - linux-image-3.16.0-53-lowlatency:3.16.0-53.72~14.04.1
#
# Last versions recommanded by security team:
# - linux-image-3.16.0-53-powerpc64-emb:3.16.0-53.72~14.04.1
# - linux-image-3.16.0-53-generic-lpae:3.16.0-53.72~14.04.1
# - linux-image-3.16.0-53-powerpc64-smp:3.16.0-53.72~14.04.1
# - linux-image-3.16.0-53-generic:3.16.0-53.72~14.04.1
# - linux-image-3.16.0-53-powerpc-smp:3.16.0-53.72~14.04.1
# - linux-image-3.16.0-53-powerpc-e500mc:3.16.0-53.72~14.04.1
# - linux-image-3.16.0-53-lowlatency:3.16.0-53.72~14.04.1
#
# CVE List:
# - CVE-2015-5307
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade linux-image-3.16.0-53-powerpc64-emb=3.16.0-53.72~14.04.1 -y
sudo apt-get install --only-upgrade linux-image-3.16.0-53-generic-lpae=3.16.0-53.72~14.04.1 -y
sudo apt-get install --only-upgrade linux-image-3.16.0-53-powerpc64-smp=3.16.0-53.72~14.04.1 -y
sudo apt-get install --only-upgrade linux-image-3.16.0-53-generic=3.16.0-53.72~14.04.1 -y
sudo apt-get install --only-upgrade linux-image-3.16.0-53-powerpc-smp=3.16.0-53.72~14.04.1 -y
sudo apt-get install --only-upgrade linux-image-3.16.0-53-powerpc-e500mc=3.16.0-53.72~14.04.1 -y
sudo apt-get install --only-upgrade linux-image-3.16.0-53-lowlatency=3.16.0-53.72~14.04.1 -y
|
import {Container} from '@material-ui/core';
import CssBaseline from '@material-ui/core/CssBaseline';
import React, {useCallback, useEffect, useState, ReactElement} from 'react';
import AlreadyCompletedDialog from './components/AlreadyCompletedDialog';
import GridQuestionnaire from './components/GridQuestionnaire';
import Maintenance from "./components/Maintenance";
import SignIn from './components/SignIn';
import SignInDob from './components/SignInDob';
import Snackbars from './components/Snackbars';
import {apiEndpoint, apiFetch} from './utils/api';
export type User = {
id : string;
name : string;
type : string;
schoolName : string;
schoolId : string;
};
export type Questionnaire = {
Q01 : string;
Q02 : string;
Q03 : string;
Q04 : string;
Q05 : string;
Q06 : number;
isComplete : boolean;
recid : string;
};
export type Question = {
id : string;
number : number;
text : string;
subText : string;
type : string;
multipleAnswers : string[];
maxAcceptable ?: number;
maxValid ?: number;
minAcceptable ?: number;
minValid ?: number;
};
const App = () : ReactElement => {
const [user, setUser] = useState<User | null>(null);
const [questions, setQuestions] = useState<Question[] | null>(null);
const [questionnaire, setQuestionnaire] = useState<Questionnaire | null>(null);
const [authConfirmed, setAuthConfirmed] = useState(false);
const [snackbarOpen, setSnackbarOpen] = useState(false);
const [token, setToken] = useState<string | undefined>(undefined);
const [snackbarMessage, setSnackbarMessage] = useState('');
const [snackbarSeverity] = useState<"success" | "info" | "warning" | "error" | undefined>('error');
const [maintenance, setMaintenance] = useState<string | undefined>(process.env.REACT_APP_MAINTENANCE);
const getUser = useCallback(async () => {
setUser(null);
let url = new URL('/v1/user/', apiEndpoint);
const response = await apiFetch(url.toString(), {}, token);
const data = await response.json();
url = new URL('/v1/user/questions', apiEndpoint);
const questionsResponse = await apiFetch(url.toString(), {}, token);
const questions = await questionsResponse.json();
if (response.status !== 200 || data.length <= 0 || questionsResponse.status !== 200 || questions.length === 0) {
if (response.status === 503) {
setMaintenance(data.message);
return;
}
setSnackbarMessage('There was an error retrieving the user.');
setSnackbarOpen(true);
return;
}
setQuestions(questions);
setUser(data[0]);
}, [token]);
const signIn = useCallback(async (username, password) => {
setToken(undefined);
const url = new URL('/V1/login/authenticate', apiEndpoint);
const response = await apiFetch(url.href, {
method: 'POST',
body: JSON.stringify({
"username": username,
"password": password
}),
})
if (response.status !== 200) {
setSnackbarMessage(response.status === 401 ? 'Invalid id or date of birth.' : 'There was an error signing you in.');
setSnackbarOpen(true);
return;
}
const data = await response.json();
setToken(data.jwt);
setAuthConfirmed(true);
}, []);
const signInDob = useCallback(async (id, dob) => {
setToken(undefined);
const url = new URL('/V1/login/authenticate', apiEndpoint);
const response = await apiFetch(url.href, {
method: 'POST',
body: JSON.stringify({
"id": id,
"dob": dob
}),
})
if (response.status !== 200) {
if (response.status === 503) {
try {
const data = await response.json();
setMaintenance(data.message);
return;
} catch (e) {
console.log('error', e.message);
}
}
setSnackbarMessage(response.status === 401 ? 'Invalid id or date of birth.' : 'There was an error signing you in.');
setSnackbarOpen(true);
return;
}
const data = await response.json();
setToken(data.jwt);
setAuthConfirmed(true);
}, []);
const getQuestionnaire = useCallback(async (type) => {
const url = new URL('/v1/' + type + '/getCurrentQuestionnaire', apiEndpoint);
const response = await apiFetch(url.href, {}, token)
const data = await response.json();
if (response.status !== 201) {
setSnackbarMessage('There was an error retrieving the questionnaire.');
setSnackbarOpen(true);
return;
}
setQuestionnaire(data);
}, [token]);
useEffect(() => {
if (user === null) {
setAuthConfirmed(false);
setQuestionnaire(null);
}
}, [user]);
useEffect(() => {
if (undefined !== token) {
getUser();
}
}, [token, getUser]);
useEffect(() => {
if (user !== null) {
getQuestionnaire(user.type);
}
}, [user, getQuestionnaire]);
const displaySignIn = (method : string) : boolean => {
return !maintenance && !authConfirmed && process.env.REACT_APP_AUTH_MODE === method;
}
return (
<Container maxWidth="md">
<CssBaseline/>
{maintenance && <Maintenance message={maintenance}/>}
{displaySignIn('AD') && <SignIn signIn={signIn}/>}
{displaySignIn('DOB') && <SignInDob signIn={signInDob}/>}
<Snackbars severity={snackbarSeverity} open={snackbarOpen} setOpen={setSnackbarOpen} message={snackbarMessage}/>
{(authConfirmed && user !== null && questionnaire !== null && !questionnaire.isComplete && undefined !== token && questions) && <GridQuestionnaire
userType={user.type}
user={user}
setUser={setUser}
setSnackbarOpen={setSnackbarOpen}
setSnackbarMessage={setSnackbarMessage}
token={token}
questions={questions}
/>}
{(authConfirmed && user !== null && questionnaire !== null && questionnaire.isComplete) && <AlreadyCompletedDialog setUser={setUser}/>}
</Container>
);
}
export default App;
|
/***** BEGIN LICENSE BLOCK *****
* Version: EPL 1.0/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Eclipse Public
* License Version 1.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at http://www.eclipse.org/legal/epl-v10.html
*
* Software distributed under the License is distributed on an "AS
* IS" basis, WITHOUT WARRANTY OF ANY KIND, either express or
* implied. See the License for the specific language governing
* rights and limitations under the License.
*
* Copyright (C) 2006 <NAME> <<EMAIL>>
*
* Alternatively, the contents of this file may be used under the terms of
* either of the GNU General Public License Version 2 or later (the "GPL"),
* or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the EPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the EPL, the GPL or the LGPL.
***** END LICENSE BLOCK *****/
package org.jruby.javasupport.proxy;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.UndeclaredThrowableException;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.security.ProtectionDomain;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
import org.jruby.Ruby;
import org.jruby.javasupport.JavaSupport;
import org.jruby.javasupport.JavaSupportImpl;
import org.jruby.util.cli.Options;
import org.jruby.util.log.Logger;
import org.jruby.util.log.LoggerFactory;
import static org.jruby.javasupport.JavaClass.EMPTY_CLASS_ARRAY;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.ClassWriter;
import org.objectweb.asm.FieldVisitor;
import org.objectweb.asm.Label;
import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.Type;
import org.objectweb.asm.commons.GeneratorAdapter;
public class JavaProxyClassFactory {
private static final Logger LOG = LoggerFactory.getLogger("JavaProxyClassFactory");
static final Type[] EMPTY_TYPE_ARRAY = new Type[0];
static final Type JAVA_LANG_CLASS_TYPE = Type.getType(Class.class);
private static final org.objectweb.asm.commons.Method forName = org.objectweb.asm.commons.Method
.getMethod("java.lang.Class forName(java.lang.String)");
private static final String INVOCATION_HANDLER_FIELD_NAME = "__handler";
private static final String PROXY_CLASS_FIELD_NAME = "__proxy_class";
private static final Type PROXY_METHOD_TYPE = Type.getType(JavaProxyMethod.class);
private static final Type PROXY_CLASS_TYPE = Type.getType(JavaProxyClass.class);
private static final Type INVOCATION_HANDLER_TYPE = Type.getType(JavaProxyInvocationHandler.class);
// public Object invoke(Object receiver, JavaProxyMethod method, Object[] args)
private static final org.objectweb.asm.commons.Method invoke = org.objectweb.asm.commons.Method
.getMethod("java.lang.Object invoke(java.lang.Object, " + PROXY_METHOD_TYPE.getClassName() + ", java.lang.Object[])");
private static final Type INTERNAL_PROXY_HELPER_TYPE = Type.getType(InternalJavaProxyHelper.class);
// public static JavaProxyClass initProxyClass(Class)
private static final org.objectweb.asm.commons.Method initProxyClass = org.objectweb.asm.commons.Method
.getMethod(JavaProxyClass.class.getName() + " initProxyClass(java.lang.Class)");
// public static JavaProxyMethod initProxyMethod(JavaProxyClass proxyClass, String name, String desc, boolean hasSuper)
private static final org.objectweb.asm.commons.Method initProxyMethod = org.objectweb.asm.commons.Method
.getMethod(PROXY_METHOD_TYPE.getClassName() + " initProxyMethod("
+ JavaProxyClass.class.getName() + ",java.lang.String,java.lang.String,boolean)");
private static final Type JAVA_PROXY_TYPE = Type.getType(InternalJavaProxy.class);
private static final AtomicInteger counter = new AtomicInteger(0);
private static int nextId() { return counter.incrementAndGet(); }
public static JavaProxyClassFactory createFactory() {
final String factoryClassName = Options.JI_PROXYCLASSFACTORY.load();
JavaProxyClassFactory factory = null;
if ( factoryClassName != null ) {
try {
Class clazz = Class.forName(factoryClassName);
Object instance = clazz.newInstance();
if ( instance instanceof JavaProxyClassFactory ) {
factory = (JavaProxyClassFactory) instance;
LOG.info("Created proxy class factory: {}", factory);
} else {
LOG.error("Invalid proxy class factory: {}", instance);
}
}
catch (ClassNotFoundException e) {
LOG.error("ClassNotFoundException creating proxy class factory: ", e);
}
catch (InstantiationException e) {
LOG.error("InstantiationException creating proxy class factory: ", e);
}
catch (IllegalAccessException e) {
LOG.error("IllegalAccessException creating proxy class factory: ", e);
}
}
return factory != null ? factory : new JavaProxyClassFactory();
}
public JavaProxyClass newProxyClass(final Ruby runtime, ClassLoader loader,
String targetClassName, Class superClass, Class[] interfaces, Set<String> names)
throws InvocationTargetException {
if (loader == null) loader = JavaProxyClassFactory.class.getClassLoader();
if (superClass == null) superClass = Object.class;
if (interfaces == null) interfaces = EMPTY_CLASS_ARRAY;
if (names == null) names = Collections.EMPTY_SET; // so we can assume names != null
// TODO could we possibly avoid **names** gathering and keying ?!?
// ... currently this causes to regenerate proxy classes when a Ruby method is added on the type
JavaSupport.ProxyClassKey classKey = JavaSupport.ProxyClassKey.getInstance(superClass, interfaces, names);
JavaProxyClass proxyClass = JavaSupportImpl.fetchJavaProxyClass(runtime, classKey);
if (proxyClass == null) {
if (targetClassName == null) {
targetClassName = targetClassName(superClass);
}
validateArgs(runtime, targetClassName, superClass);
Type selfType = Type.getType('L' + toInternalClassName(targetClassName) + ';');
Map<MethodKey, MethodData> methods = collectMethods(superClass, interfaces, names);
proxyClass = generate(loader, targetClassName, superClass, interfaces, methods, selfType);
proxyClass = JavaSupportImpl.saveJavaProxyClass(runtime, classKey, proxyClass);
}
return proxyClass;
}
private JavaProxyClass generate(ClassLoader loader, String targetClassName,
Class superClass, Class[] interfaces,
Map<MethodKey, MethodData> methods, Type selfType) {
ClassWriter cw = beginProxyClass(targetClassName, superClass, interfaces);
GeneratorAdapter clazzInit = createClassInitializer(selfType, cw);
generateConstructors(superClass, selfType, cw);
generate___getProxyClass(selfType, cw);
generate___getInvocationHandler(selfType, cw);
generateProxyMethods(superClass, methods, selfType, cw, clazzInit);
// finish class initializer
clazzInit.returnValue();
clazzInit.endMethod();
// end class
cw.visitEnd();
Class clazz = invokeDefineClass(loader, selfType.getClassName(), cw.toByteArray());
// trigger class initialization for the class
try {
Field proxy_class = clazz.getDeclaredField(PROXY_CLASS_FIELD_NAME);
// proxy_class.setAccessible(true); // field is public
return (JavaProxyClass) proxy_class.get(clazz);
}
catch (Exception ex) {
InternalError ie = new InternalError();
ie.initCause(ex);
throw ie;
}
}
private static String targetClassName(final Class<?> clazz) {
// We always prepend an org.jruby.proxy package to the beginning
// because java and javax packages are protected and signed
// jars prevent us generating new classes with those package
// names. See JRUBY-2439.
final String fullName = clazz.getName();
final int idx = fullName.lastIndexOf('.');
String className = idx == -1 ? fullName : fullName.substring(idx + 1);
return proxyPackageName(fullName)
.append('.').append(className)
.append("$Proxy").append(nextId()).toString();
}
private static final Method defineClassMethod;
static {
defineClassMethod = AccessController.doPrivileged(new PrivilegedAction<Method>() {
public Method run() {
try {
final Class[] parameterTypes = { String.class,
byte[].class, int.class, int.class, ProtectionDomain.class
};
final Method method = ClassLoader.class.getDeclaredMethod("defineClass", parameterTypes);
method.setAccessible(true);
return method;
}
catch (Exception e) {
LOG.error("could not use ClassLoader.defineClass method", e);
return null; // should not happen!
}
}
});
}
protected Class invokeDefineClass(ClassLoader loader, String className, final byte[] data) {
try {
final Object[] parameters = { className, data, 0, data.length, JavaProxyClassFactory.class.getProtectionDomain() };
return (Class) defineClassMethod.invoke(loader, parameters);
}
catch (IllegalArgumentException|IllegalAccessException e) {
LOG.warn("defining class with name " + className + " failed", e);
return null;
}
catch (InvocationTargetException e) {
LOG.warn("defining class with name " + className + " failed", e.getTargetException());
return null;
}
}
private static ClassWriter beginProxyClass(final String className,
final Class superClass, final Class[] interfaces) {
ClassWriter cw = new ClassWriter(ClassWriter.COMPUTE_MAXS);
// start class
cw.visit(Opcodes.V1_6, Opcodes.ACC_PUBLIC | Opcodes.ACC_FINAL | Opcodes.ACC_SUPER,
toInternalClassName(className), /*signature*/ null,
toInternalClassName(superClass),
interfaceNamesForProxyClass(interfaces));
// private final JavaProxyInvocationHandler __handler;
cw.visitField(Opcodes.ACC_PRIVATE | Opcodes.ACC_FINAL,
INVOCATION_HANDLER_FIELD_NAME,
INVOCATION_HANDLER_TYPE.getDescriptor(), null, null
).visitEnd();
// /* public */ static final JavaProxyClass __proxy_class;
cw.visitField(Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC | Opcodes.ACC_FINAL,
PROXY_CLASS_FIELD_NAME,
PROXY_CLASS_TYPE.getDescriptor(), null, null
).visitEnd();
return cw;
}
private static String[] interfaceNamesForProxyClass(final Class[] interfaces) {
String[] interfaceNames = new String[interfaces.length + 1];
for (int i = 0; i < interfaces.length; i++) {
interfaceNames[i] = toInternalClassName(interfaces[i]);
}
// all proxies implement our InternalJavaProxy interface :
interfaceNames[interfaces.length] = toInternalClassName(InternalJavaProxy.class);
return interfaceNames;
}
private static void generateProxyMethods(Class superClass,
Map<MethodKey, MethodData> methods, Type selfType, ClassVisitor cw,
GeneratorAdapter clazzInit) {
for (MethodData md: methods.values()) {
Type superClassType = Type.getType(superClass);
generateProxyMethod(selfType, superClassType, cw, clazzInit, md);
}
}
/**
* @see InternalJavaProxy
*/
private static void generate___getInvocationHandler(Type selfType, ClassVisitor cw) {
// public JavaProxyInvocationHandler ___getInvocationHandler() { return this.__handler; }
// make getter for handler (due implements InternalJavaProxy)
GeneratorAdapter gh = new GeneratorAdapter(Opcodes.ACC_PUBLIC,
new org.objectweb.asm.commons.Method("___getInvocationHandler",
INVOCATION_HANDLER_TYPE, EMPTY_TYPE_ARRAY), null,
EMPTY_TYPE_ARRAY, cw);
gh.loadThis();
gh.getField(selfType, INVOCATION_HANDLER_FIELD_NAME, INVOCATION_HANDLER_TYPE);
gh.returnValue();
gh.endMethod();
}
/**
* @see InternalJavaProxy
*/
private static void generate___getProxyClass(Type selfType, ClassVisitor cw) {
// public JavaProxyClass __getProxyClass() { return /* static */ __proxy_class; }
// make getter for proxy class (due implements InternalJavaProxy)
GeneratorAdapter gpc = new GeneratorAdapter(Opcodes.ACC_PUBLIC,
new org.objectweb.asm.commons.Method("___getProxyClass",
PROXY_CLASS_TYPE, EMPTY_TYPE_ARRAY), null,
EMPTY_TYPE_ARRAY, cw);
gpc.getStatic(selfType, PROXY_CLASS_FIELD_NAME, PROXY_CLASS_TYPE);
gpc.returnValue();
gpc.endMethod();
}
private static void generateConstructors(Class superClass, Type selfType, ClassVisitor cw) {
Constructor[] cons = superClass.getDeclaredConstructors();
for (int i = 0; i < cons.length; i++) {
// if the constructor is private, pretend it doesn't exist
if (Modifier.isPrivate(cons[i].getModifiers())) continue;
// otherwise, define everything and let some of them fail at invocation
generateConstructor(selfType, cons[i], cw);
}
}
private static GeneratorAdapter createClassInitializer(Type selfType, ClassVisitor cw) {
GeneratorAdapter clazzInit = new GeneratorAdapter(Opcodes.ACC_PRIVATE | Opcodes.ACC_STATIC,
new org.objectweb.asm.commons.Method("<clinit>", Type.VOID_TYPE, EMPTY_TYPE_ARRAY),
null, EMPTY_TYPE_ARRAY, cw);
clazzInit.visitLdcInsn(selfType.getClassName());
clazzInit.invokeStatic(JAVA_LANG_CLASS_TYPE, forName);
clazzInit.invokeStatic(INTERNAL_PROXY_HELPER_TYPE, initProxyClass);
clazzInit.dup();
clazzInit.putStatic(selfType, PROXY_CLASS_FIELD_NAME, PROXY_CLASS_TYPE);
// __proxy_class = InternalJavaProxyHelper.initProxyClass( Class.forName(className) );
return clazzInit;
}
private static void generateProxyMethod(Type selfType, Type superType,
ClassVisitor cw, GeneratorAdapter clazzInit, MethodData md) {
if (!md.generateProxyMethod()) return;
org.objectweb.asm.commons.Method m = md.getMethod();
Type[] ex = toTypes(md.getExceptions());
String field_name = "__mth$" + md.getName() + md.scrambledSignature();
// private static JavaProxyMethod __mth$sort$java_util_Comparator;
FieldVisitor fv = cw.visitField(Opcodes.ACC_PRIVATE | Opcodes.ACC_STATIC,
field_name, PROXY_METHOD_TYPE.getDescriptor(), null, null);
fv.visitEnd();
// static { ... } initializer block
clazzInit.dup();
clazzInit.push(m.getName());
clazzInit.push(m.getDescriptor());
clazzInit.push(md.isImplemented());
// JavaProxyMethod initProxyMethod(JavaProxyClass proxyClass, String name, String desc, boolean hasSuper)
clazzInit.invokeStatic(INTERNAL_PROXY_HELPER_TYPE, initProxyMethod);
clazzInit.putStatic(selfType, field_name, PROXY_METHOD_TYPE);
org.objectweb.asm.commons.Method sm = new org.objectweb.asm.commons.Method(
"__super$" + m.getName(), m.getReturnType(), m.getArgumentTypes()
);
//
// construct the proxy method
//
GeneratorAdapter ga = new GeneratorAdapter(Opcodes.ACC_PUBLIC, m, null, ex, cw);
ga.loadThis();
ga.getField(selfType, INVOCATION_HANDLER_FIELD_NAME, INVOCATION_HANDLER_TYPE);
// if the method is extending something, then we have to test if the handler is initialized...
if (md.isImplemented()) {
ga.dup();
Label ok = ga.newLabel();
ga.ifNonNull(ok);
ga.loadThis();
ga.loadArgs();
ga.invokeConstructor(superType, m);
ga.returnValue();
ga.mark(ok);
}
ga.loadThis();
ga.getStatic(selfType, field_name, PROXY_METHOD_TYPE);
if (m.getArgumentTypes().length == 0) {
// load static empty array
ga.getStatic(JAVA_PROXY_TYPE, "NO_ARGS", toType(Object[].class));
} else {
// box arguments
ga.loadArgArray();
}
Label before = ga.mark();
ga.invokeInterface(INVOCATION_HANDLER_TYPE, invoke);
Label after = ga.mark();
ga.unbox(m.getReturnType());
ga.returnValue();
// this is a simple rethrow handler
Label rethrow = ga.mark();
ga.visitInsn(Opcodes.ATHROW);
for (int i = 0; i < ex.length; i++) {
ga.visitTryCatchBlock(before, after, rethrow, ex[i].getInternalName());
}
ga.visitTryCatchBlock(before, after, rethrow, "java/lang/Error");
ga.visitTryCatchBlock(before, after, rethrow, "java/lang/RuntimeException");
Type thr = toType(Throwable.class);
Label handler = ga.mark();
Type udt = toType(UndeclaredThrowableException.class);
int loc = ga.newLocal(thr);
ga.storeLocal(loc, thr);
ga.newInstance(udt);
ga.dup();
ga.loadLocal(loc, thr);
ga.invokeConstructor(udt, org.objectweb.asm.commons.Method.getMethod("void <init>(java.lang.Throwable)"));
ga.throwException();
ga.visitTryCatchBlock(before, after, handler, "java/lang/Throwable");
ga.endMethod();
//
// construct the super-proxy method
//
if (md.isImplemented()) {
GeneratorAdapter ga2 = new GeneratorAdapter(Opcodes.ACC_PUBLIC, sm, null, ex, cw);
ga2.loadThis();
ga2.loadArgs();
ga2.invokeConstructor(superType, m);
ga2.returnValue();
ga2.endMethod();
}
}
private static Class[] generateConstructor(Type selfType, Constructor constructor, ClassVisitor cw) {
Class[] superConstructorParameterTypes = constructor.getParameterTypes();
Class[] newConstructorParameterTypes = new Class[superConstructorParameterTypes.length + 1];
System.arraycopy(superConstructorParameterTypes, 0,
newConstructorParameterTypes, 0,
superConstructorParameterTypes.length);
newConstructorParameterTypes[superConstructorParameterTypes.length] = JavaProxyInvocationHandler.class;
int access = Opcodes.ACC_PUBLIC;
String name1 = "<init>";
String signature = null;
Class[] superConstructorExceptions = constructor.getExceptionTypes();
boolean superConstructorVarArgs = constructor.isVarArgs();
org.objectweb.asm.commons.Method super_m = new org.objectweb.asm.commons.Method(
name1, Type.VOID_TYPE, toTypes(superConstructorParameterTypes));
org.objectweb.asm.commons.Method m = new org.objectweb.asm.commons.Method(
name1, Type.VOID_TYPE, toTypes(newConstructorParameterTypes));
String[] exceptionNames = toInternalNames( superConstructorExceptions );
MethodVisitor mv = cw.visitMethod(access, m.getName(), m.getDescriptor(), signature, exceptionNames);
// marking with @SafeVarargs so that we can correctly detect proxied var-arg constructors :
if ( superConstructorVarArgs ) mv.visitAnnotation(Type.getDescriptor(VarArgs.class), true);
GeneratorAdapter ga = new GeneratorAdapter(access, m, mv);
ga.loadThis();
ga.loadArgs(0, superConstructorParameterTypes.length);
ga.invokeConstructor(toType(constructor.getDeclaringClass()), super_m);
ga.loadThis();
ga.loadArg(superConstructorParameterTypes.length);
ga.putField(selfType, INVOCATION_HANDLER_FIELD_NAME, INVOCATION_HANDLER_TYPE);
// do a void return
ga.returnValue();
ga.endMethod();
return newConstructorParameterTypes;
}
static boolean isVarArgs(final Constructor<?> ctor) {
return ctor.isVarArgs() || ctor.getAnnotation(VarArgs.class) != null;
}
//static boolean isVarArgs(final Method method) {
// return method.isVarArgs() || method.getAnnotation(VarArgs.class) != null;
//}
private static String toInternalClassName(Class clazz) {
return toInternalClassName(clazz.getName());
}
private static String toInternalClassName(String name) {
return name.replace('.', '/');
}
private static Type toType(Class clazz) {
return Type.getType(clazz);
}
private static Type[] toTypes(Class[] params) {
Type[] types = new Type[params.length];
for (int i = 0; i < types.length; i++) {
types[i] = Type.getType(params[i]);
}
return types;
}
private static String[] toInternalNames(final Class[] params) {
if (params == null) return null;
String[] names = new String[params.length];
for (int i = 0; i < names.length; ++i) {
names[i] = Type.getType(params[i]).getInternalName();
}
return names;
}
private static Map<MethodKey, MethodData> collectMethods(
final Class superClass,
final Class[] interfaces,
final Set<String> names) {
Map<MethodKey, MethodData> methods = new HashMap<>();
HashSet<Class> allClasses = new HashSet<>();
addClass(allClasses, methods, superClass, names);
addInterfaces(allClasses, methods, interfaces, names);
return methods;
}
static final class MethodData {
final Set<Method> methods = new HashSet<Method>();
final Method mostSpecificMethod;
final Class[] mostSpecificParameterTypes;
//private boolean hasPublicDecl = false;
MethodData(final Method method) {
this.mostSpecificMethod = method;
this.mostSpecificParameterTypes = mostSpecificMethod.getParameterTypes();
//hasPublicDecl = method.getDeclaringClass().isInterface() || Modifier.isPublic(method.getModifiers());
}
private StringBuilder scrambledSignature() {
StringBuilder sb = new StringBuilder();
for ( Class param : getParameterTypes() ) {
sb.append('$');
final char[] name = param.getName().toCharArray();
for (int i = 0; i < name.length; i++) {
final char c;
switch ( c = name[i] ) {
case '.' : sb.append('_'); break;
case '[' : sb.append('1'); break;
case ';' : sb.append('2'); break;
default : sb.append(c);
}
}
}
return sb;
}
public Class getDeclaringClass() {
return mostSpecificMethod.getDeclaringClass();
}
private org.objectweb.asm.commons.Method getMethod() {
return new org.objectweb.asm.commons.Method(getName(), Type
.getType(getReturnType()), getType(getParameterTypes()));
}
private static Type[] getType(Class[] parameterTypes) {
Type[] result = new Type[parameterTypes.length];
for (int i = 0; i < parameterTypes.length; i++) {
result[i] = Type.getType(parameterTypes[i]);
}
return result;
}
private String getName() {
return mostSpecificMethod.getName();
}
private Class[] getParameterTypes() {
return mostSpecificParameterTypes;
}
private Class[] getExceptions() {
final IdentityHashMap<Class, ?> exceptions = new IdentityHashMap<>(8);
for ( final Method method : this.methods ) {
Class[] exTypes = method.getExceptionTypes();
for (int i = 0; i < exTypes.length; i++) {
final Class<?> exType = exTypes[i];
if ( exceptions.containsKey(exType) ) continue;
boolean add = true;
Iterator<Class> it = exceptions.keySet().iterator();
while ( it.hasNext() ) {
final Class<?> curType = it.next();
if ( curType.isAssignableFrom(exType) ) {
add = false;
break;
}
if ( exType.isAssignableFrom(curType) ) {
it.remove();
add = true;
}
}
if ( add ) exceptions.put(exType, null);
}
}
return exceptions.isEmpty() ? EMPTY_CLASS_ARRAY : exceptions.keySet().toArray(new Class[ exceptions.size() ]);
}
private boolean generateProxyMethod() {
return ! isFinal() && ! isPrivate();
}
private void add(Method method) {
methods.add(method);
//hasPublicDecl |= Modifier.isPublic(method.getModifiers());
}
Class getReturnType() {
return mostSpecificMethod.getReturnType();
}
boolean isFinal() {
if ( mostSpecificMethod.getDeclaringClass().isInterface() ) {
return false;
}
return Modifier.isFinal( mostSpecificMethod.getModifiers() );
}
boolean isPrivate() {
if ( mostSpecificMethod.getDeclaringClass().isInterface() ) {
return false;
}
return Modifier.isPrivate( mostSpecificMethod.getModifiers() );
}
boolean isImplemented() {
if ( mostSpecificMethod.getDeclaringClass().isInterface() ) {
return false;
}
return ! Modifier.isAbstract( mostSpecificMethod.getModifiers() );
}
}
static final class MethodKey {
private final String name;
private final Class[] arguments;
MethodKey(final Method method) {
this.name = method.getName();
this.arguments = method.getParameterTypes();
}
@Override
public boolean equals(Object obj) {
if ( obj instanceof MethodKey ) {
MethodKey key = (MethodKey) obj;
return name.equals(key.name) && Arrays.equals(arguments, key.arguments);
}
return false;
}
@Override
public int hashCode() {
return name.hashCode();
}
@Override
public String toString() {
final StringBuilder str = new StringBuilder().append(name);
str.append('(');
final int last = arguments.length - 1;
for ( int i=0; i<last; i++ ) {
str.append(arguments[i].getName()).append(',');
}
if ( last >= 0 ) str.append(arguments[last].getName());
str.append(')');
return str.toString();
}
}
private static void addInterfaces(
final Set<Class> allClasses,
final Map<MethodKey, MethodData> methods,
final Class[] ifaces,
final Set<String> names) {
for ( int i = 0; i < ifaces.length; i++ ) {
addInterface(allClasses, methods, ifaces[i], names);
}
}
private static void addInterface(
final Set<Class> allClasses,
final Map<MethodKey, MethodData> methods,
final Class iface,
final Set<String> names) {
if ( allClasses.add(iface) ) {
addMethods(methods, iface, names);
addInterfaces(allClasses, methods, iface.getInterfaces(), names);
}
}
private static void addMethods(
final Map<MethodKey, MethodData> methods,
final Class classOrIface,
final Set<String> names) {
final Method[] decMethods = classOrIface.getDeclaredMethods();
for ( int i = 0; i < decMethods.length; i++ ) {
final Method decMethod = decMethods[i];
if ( names.contains(decMethod.getName()) ) {
addMethod(methods, decMethod);
}
}
}
private static void addMethod(final Map<MethodKey, MethodData> methods, final Method method) {
final int mod = method.getModifiers();
if ( Modifier.isStatic(mod) || Modifier.isPrivate(mod) ) {
return;
}
MethodKey methodKey = new MethodKey(method);
MethodData methodData = methods.get(methodKey);
if (methodData == null) {
methodData = new MethodData(method);
methods.put(methodKey, methodData);
}
methodData.add(method);
}
private static void addClass(
final Set<Class> allClasses,
final Map<MethodKey, MethodData> methods,
final Class clazz,
final Set<String> names) {
if ( allClasses.add(clazz) ) {
addMethods(methods, clazz, names);
Class superClass = clazz.getSuperclass();
if ( superClass != null ) {
addClass(allClasses, methods, superClass, names);
}
addInterfaces(allClasses, methods, clazz.getInterfaces(), names);
}
}
private static void validateArgs(Ruby runtime, String targetClassName, Class superClass) {
if ( Modifier.isFinal(superClass.getModifiers()) ) {
throw runtime.newTypeError("cannot extend final class " + superClass.getName());
}
if ( ! hasPublicOrProtectedConstructor(superClass) ) {
throw runtime.newTypeError("class " + superClass.getName() + " doesn't have a public or protected constructor");
}
String targetPackage = packageName(targetClassName);
String packagePath = targetPackage.replace('.', '/');
if (packagePath.startsWith("java")) {
throw runtime.newTypeError("cannot add classes to package " + packagePath);
}
final Package pkg = Package.getPackage(packagePath);
if ( pkg != null && pkg.isSealed() ) {
throw runtime.newTypeError("package " + pkg + " is sealed");
}
}
private static boolean hasPublicOrProtectedConstructor(final Class clazz) {
Constructor[] constructors = clazz.getDeclaredConstructors();
for ( Constructor constructor : constructors ) {
final int mod = constructor.getModifiers();
if ( Modifier.isPublic(mod) || Modifier.isProtected(mod) ) {
return true;
}
}
return false;
}
private static String packageName(String clazzName) {
int idx = clazzName.lastIndexOf('.');
if ( idx == -1 ) return "";
return clazzName.substring(0, idx);
}
private static StringBuilder proxyPackageName(final String className) {
final String proxyPackagePrefix = "org.jruby.proxy";
final StringBuilder str = new StringBuilder(proxyPackagePrefix.length() + className.length() + 8);
final int idx = className.lastIndexOf('.');
str.append(proxyPackagePrefix);
return idx == -1 ? str : str.append('.').append(className.substring(0, idx));
}
/**
* Variable arguments marker for generated constructor.
* @note could have used @SafeVarargs but it's Java 7+
*/
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.CONSTRUCTOR, ElementType.METHOD})
public static @interface VarArgs {}
}
|
<filename>app/controllers/application_controller.rb<gh_stars>1-10
class ApplicationController < ActionController::Base
protect_from_forgery
before_filter :set_go
def set_go
if params[:go] and Util.valid_path?(params[:go]) then
@go = params[:go]
end
end
def authorize
authenticate_with_http_basic do |username, password|
user = User.authenticate(username, password)
if user then
session[:user_id] = user.id
return true
end
end
if session[:user_id] then
user=User.find(session[:user_id])
return true if user.is_active
end
redirect_to("main#index")
return false
end
private
def is_admin
user_id=session[:user_id]
user=nil
if user_id
user=User.find(user_id)
return true if not user.nil? and user.is_admin
end
return false
end
def require_admin
if not is_admin
render :text => "This action requires an Administrator.", :status => "401"
return false
end
end
end
|
#!/bin/bash
# TMP
if [ -d '/tmpfs' ]; then TMP='/tmpfs'; else TMP='/tmp'; fi
if [ -z "${HAL_PERIOD:-}" ]; then HAL_PERIOD=300; fi
CONFIG='{"date":'$(date +%s)',"log_level":"'${LOG_LEVEL}'","debug":'${DEBUG}',"period":'${HAL_PERIOD}'}'
echo "${CONFIG}" > ${TMP}/${SERVICE_LABEL}.json
while true; do
DATE=$(date +%s)
OUTPUT="${CONFIG}"
for ls in lshw lsusb lscpu lspci lsblk; do
OUT="$(${ls}.sh | jq '.'${ls}'?')"
if [ ${DEBUG:-} == 'true' ]; then echo "${ls} == ${OUT}" &> /dev/stderr; fi
if [ -z "${OUT:-}" ]; then OUT=null; fi
OUTPUT=$(echo "$OUTPUT" | jq '.'${ls}'='"${OUT}")
if [ ${DEBUG:-} == 'true' ]; then echo "OUTPUT == ${OUTPUT}" &> /dev/stderr; fi
done
echo "${OUTPUT}" | jq '.date='$(date +%s) > "${TMP}/$$"
mv -f "${TMP}/$$" "${TMP}/${SERVICE_LABEL}.json"
# wait for ..
SECONDS=$((HAL_PERIOD - $(($(date +%s) - DATE))))
if [ ${SECONDS} -gt 0 ]; then
sleep ${SECONDS}
fi
done
|
<gh_stars>1-10
import { registerTheme } from '../../theme';
import { DEFAULT_COLUMN_THEME } from '../column/theme';
registerTheme('stackedColumn', DEFAULT_COLUMN_THEME);
//# sourceMappingURL=theme.js.map
|
<reponame>jimtje/cloudworker-proxy
/**
* This is the entry point for running the proxy locally using the node-cloudworker lib.
*/
// eslint-disable-next-line
require('dotenv').config();
// eslint-disable-next-line
const ncw = require('node-cloudworker');
ncw.applyShims();
const handler = require('./handler');
ncw.start(handler);
|
/* © 2017 NauStud.io
* @author Eric
*/
export const slugify = st => {
let str = st.toLowerCase();
// Chuyển hết sang chữ thường
str = str.replace(/(à|á|ạ|ả|ã|â|ầ|ấ|ậ|ẩ|ẫ|ă|ằ|ắ|ặ|ẳ|ẵ)/g, 'a');
str = str.replace(/(è|é|ẹ|ẻ|ẽ|ê|ề|ế|ệ|ể|ễ)/g, 'e');
str = str.replace(/(ì|í|ị|ỉ|ĩ)/g, 'i');
str = str.replace(/(ò|ó|ọ|ỏ|õ|ô|ồ|ố|ộ|ổ|ỗ|ơ|ờ|ớ|ợ|ở|ỡ)/g, 'o');
str = str.replace(/(ù|ú|ụ|ủ|ũ|ư|ừ|ứ|ự|ử|ữ)/g, 'u');
str = str.replace(/(ỳ|ý|ỵ|ỷ|ỹ)/g, 'y');
str = str.replace(/(đ)/g, 'd');
// Xóa ký tự đặc biệt
str = str.replace(/([^0-9a-z-\s.])/g, '');
// Xóa khoảng trắng thay bằng ký tự -
str = str.replace(/([\s.]+)/g, '-');
// Xóa ký tự - liên tiếp
str = str.replace(/-+/g, '-');
// xóa phần dự - ở đầu
str = str.replace(/^-+/g, '');
// xóa phần dư - ở cuối
str = str.replace(/-+$/g, '');
// return
return str;
};
|
git clone --depth=1 https://gitlab.com/kaitaiStructCompile.py/kaitaiStructCompile.py.git
export KSCP=./kaitaiStructCompile.py
git clone --depth=1 https://gitlab.com/kaitaiStructCompile.py/kaitaiStructCompile.tests.ksys $KSCP/tests/ksys
pip install --upgrade $KSCP
|
<reponame>TheButlah/Battlecode-2018<filename>src/main/java/com/lodborg/cache/LRUCache.java
/*
MIT License
Copyright (c) 2016 lodborg
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
package com.lodborg.cache;
import java.util.ConcurrentModificationException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.NoSuchElementException;
/**
* The LRU cache consists of a doubly linked list and a hash map. Each node in
* the linked list holds a page in the cache (essentially, a key-value pair).
* Whenever a key is accessed, either by an update or a get operation, the
* corresponding node is bumped to the head of the list. This ensures that the
* nodes are ordered by their access time with the most recently nodes at the head
* of the list and the least recently used at the tail.
*
* List operations take constant time once the desired node is found. To avoid
* linearly searching the list for a given node, the implementation uses a HashMap
* indexed on the keys and mapping the keys to nodes in the linked list. Since
* we need to store the concrete nodes in the list as values in the map, using
* the LinkedList implementation from java.util is not an option. Instead,
* a custom private list class is provided.
*
* The iterator of the cache returns the Nodes of the linked list in the order
* they are stored, the most recent elements first. The iterator is fail-fast,
* meaning that it will throw a ConcurrentModificationException, if the cache
* gets modified by any means other than the iterator's remove() method.
* Iterating via the iterator doesn't cause the elements to be bumped to the
* head of the cache.
*
* @param <K> The generic type of the keys
* @param <V> The generic type of the values
*/
public class LRUCache<K, V> implements Iterable<LRUCache.Node<K, V>>{
/**
* A class representing a node in the doubly linked list. The node is the only
* place in the cache actually containing the real value mapped to a key.
*/
public static class Node<K, V>{
private K key;
private V value;
private Node<K, V> prev, next;
private Node(K key, V value){
this.key = key;
this.value = value;
}
public K getKey(){
return key;
}
public V getValue(){
return value;
}
}
/**
* An implementation of a queue using a doubly linked list.
*/
private class LinkedList {
private Node<K, V> head, last;
/**
* Detaches the last node from the tail of the list and returns it.
* @return The last node from the tail of the list, after it was detached
* from the list. Returns null, if the list was empty.
*/
private Node<K, V> poll(){
if (head == null)
return null;
if (head == last){
Node<K, V> node = last;
head = last = null;
return node;
}
Node<K, V> node = last;
last = last.prev;
last.next = null;
node.prev = null;
return node;
}
/**
* Adds a node at the head of the list.
* @param node The node that will be added.
*/
private void offer(Node<K, V> node){
if (head == null){
head = last = node;
return;
}
head.prev = node;
node.next = head;
head = node;
}
/**
* Removes a node from the linked list. Assumes that the node is a part
* of the current list, but doesn't check explicitly for that. The user
* has to make sure that she is removing nodes from the correct list.
* @param node A reference to he node to be removed.
*/
private void remove(Node<K, V> node){
if (node == last){
poll();
} else {
if (head == node)
head = node.next;
else
node.prev.next = node.next;
node.next.prev = node.prev;
node.prev = null;
node.next = null;
}
}
/**
* Removes all elements from the list. Currently, it only dereferences the
* head an tail pointers and leaves the cleanup to the garbage collector.
* Make sure that you are not using the nodes somewhere else in the code or
* they will not be garbage collected, possibly resulting in a memory leak.
*/
private void clear(){
list.head = list.last = null;
}
}
private LinkedList list;
private HashMap<K, Node<K, V>> map;
private int maxSize;
private int size;
private EvictionListener<K, V> listener;
private int modCount;
/**
* Instantiates a new cache instance.
* @param capacity The total amount of key-value pairs that can be stored in the
* cache. If the cache overflows, the least recently used pair
* will be removed.
*/
public LRUCache(int capacity){
maxSize = capacity;
list = new LinkedList();
map = new HashMap<>();
}
public void setListener(EvictionListener<K, V> listener){
this.listener = listener;
}
public void removeListener(){
this.listener = null;
}
/**
* Returns the value for the given key, if it is stored in the cache, or null
* otherwise. It bumps the linked list node corresponding to that key to the
* head of the list.
* @param key The key for the lookup.
* @return The value associated with the key or null, if not in the cache
*/
public V get(K key){
modCount++;
Node<K, V> node = map.get(key);
if (node == null)
return null;
list.remove(node);
list.offer(node);
return node.value;
}
/**
* Inserts a new key-value pair in the cache, or updates the value associated
* with a key already in the cache. Either way, the node corresponding to the
* key in the linked list is moved at the head of the list.
* @param key The key to be changed or inserted.
* @param value The value associated to the key.
*/
public void put(K key, V value){
modCount++;
Node<K, V> node = map.get(key);
if (node == null){
node = new Node<>(key, value);
list.offer(node);
map.put(key, node);
if (size == maxSize){
Node<K, V> removed = list.poll();
if (listener != null)
listener.onEvict(removed.key, removed.value);
map.remove(removed.key);
} else
size++;
} else {
node.value = value;
list.remove(node);
list.offer(node);
}
}
/**
* Removes a key and its associated value from the cache.
* @param key The key to be removed.
*/
public void evict(K key){
modCount++;
Node<K, V> node = map.get(key);
if (node != null) {
list.remove(node);
map.remove(key);
size--;
}
}
/**
* @return The total amount of key-value pairs stored in the cache.
*/
public int size(){
return size;
}
public void evictAll(){
modCount++;
list.clear();
map.clear();
size = 0;
}
@Override
public String toString() {
if (list.head == null)
return "()";
StringBuilder builder = new StringBuilder();
builder.append('(');
builder.append(list.head.key);
Node node = list.head.next;
while (node != null){
builder.append(',');
builder.append(node.key);
node = node.next;
}
builder.append(')');
return builder.toString();
}
/**
* The iterator will return the elements in the cache in the order they are stored,
* most recent elements first. The iterator is fail-fast and will fail, if the
* cache has been modified by any means other than the iterator's own remove() method.
*/
@Override
public Iterator<LRUCache.Node<K, V>> iterator() {
// The prev pointer of the head is not set, because this is only a temporary node.
// It must be garbage collected once the iterator moves to the next element, that's
// why there has to be no permanent references to it.
final Node<K, V> placeholder = new Node<>(null, null);
placeholder.next = list.head;
return new Iterator<LRUCache.Node<K, V>>() {
Node<K, V> node = placeholder;
int currentModCount = modCount;
@Override
public boolean hasNext() {
return node.next != null;
}
@Override
public Node<K, V> next() {
if (currentModCount != modCount)
throw new ConcurrentModificationException();
if (!hasNext())
throw new NoSuchElementException();
node = node.next;
return node;
}
@Override
public void remove() {
if (node.key != null ) {
Node<K, V> placeholder = new Node<>(null, null);
placeholder.next = node.next;
evict(node.key);
node = placeholder;
currentModCount++;
}
}
};
}
}
|
<filename>src/translations/pt_PT.js
'use strict';
module.exports = {
"payingWith" : "Pagamento com {{paymentSource}}",
"chooseAnotherWayToPay" : "Escolher outra forma de pagamento",
"chooseAWayToPay" : "Escolher forma de pagamento",
"otherWaysToPay" : "Outras formas de pagamento",
"edit" : "Editar",
"doneEditing" : "Concluído",
"editPaymentMethods" : "Editar meios de pagamento",
"CreditCardDeleteConfirmationMessage" : "Eliminar cartão {{secondaryIdentifier}} terminado em {{identifier}}?",
"PayPalAccountDeleteConfirmationMessage" : "Eliminar conta PayPal {{identifier}}?",
"VenmoAccountDeleteConfirmationMessage" : "Quer mesmo eliminar a conta Venmo com o nome de utilizador {{identifier}}?",
"genericDeleteConfirmationMessage" : "Quer mesmo eliminar este meio de pagamento?",
"deleteCancelButton" : "Cancelar",
"deleteConfirmationButton" : "Eliminar",
"fieldEmptyForCvv" : "Introduza um CVV.",
"fieldEmptyForExpirationDate" : "Introduza uma data de validade.",
"fieldEmptyForCardholderName" : "Introduza um nome do titular do cartão.",
"fieldTooLongForCardholderName" : "O Nome do titular do cartão deve ter menos de 256 carateres.",
"fieldEmptyForNumber" : "Introduza um número.",
"fieldEmptyForPostalCode" : "Introduza um código postal.",
"fieldInvalidForCardholderName" : "Este nome do titular do cartão não é válido.",
"fieldInvalidForCvv" : "Este código de segurança não é válido.",
"fieldInvalidForExpirationDate" : "Esta data de validade não é válida.",
"fieldInvalidForNumber" : "Este número de cartão não é válido.",
"fieldInvalidForPostalCode" : "Este código postal não é válido.",
"genericError" : "Tudo indica que houve um problema.",
"hostedFieldsTokenizationFailOnDuplicateError" : "Este cartão de crédito já existe como meio de pagamento guardado.",
"hostedFieldsFailedTokenizationError" : "Verifique as suas informações e tente novamente.",
"hostedFieldsFieldsInvalidError" : "Verifique as suas informações e tente novamente.",
"hostedFieldsTokenizationNetworkErrorError" : "Erro de rede. Tente novamente.",
"hostedFieldsTokenizationCvvVerificationFailedError" : "A verificação do cartão de crédito falhou. Verifique as suas informações e tente novamente.",
"paypalButtonMustBeUsed" : "Use o botão PayPal para continuar o seu pagamento.",
"paypalAccountTokenizationFailedError" : "Ocorreu um erro ao adicionar a conta PayPal. Tente novamente.",
"paypalFlowFailedError" : "Ocorreu um erro ao ligar ao PayPal. Tente novamente.",
"paypalTokenizationRequestActiveError" : "A autorização de pagamento com PayPal já está em curso.",
"venmoCanceledError" : "Ocorreu um erro. Certifique-se de que tem a versão mais recente da aplicação Venmo instalada no seu dispositivo e que o seu navegador suporta a mudança para Venmo.",
"vaultManagerPaymentMethodDeletionError" : "Não é possível eliminar o meio de pagamento; tente novamente.",
"venmoAppFailedError" : "A aplicação Venmo não foi encontrada no seu dispositivo.",
"unsupportedCardTypeError" : "Este tipo de cartão não é suportado. Tente com outro cartão.",
"applePayTokenizationError" : "Ocorreu um erro de rede ao processar o pagamento com Apple Pay. Tente novamente.",
"applePayActiveCardError" : "Adicione um cartão suportado à sua carteira Apple Pay.",
"cardholderNameLabel" : "Nome do titular do cartão",
"cardNumberLabel" : "Número do cartão",
"cvvLabel" : "CVV",
"cvvThreeDigitLabelSubheading" : "(3 dígitos)",
"cvvFourDigitLabelSubheading" : "(4 dígitos)",
"cardholderNamePlaceholder" : "Nome do titular do cartão",
"expirationDateLabel" : "Data de validade",
"expirationDateLabelSubheading" : "(MM/AA)",
"expirationDatePlaceholder" : "MM/AA",
"postalCodeLabel" : "Código postal",
"saveCardLabel" : "Guardar cartão",
"payWithCard" : "Pagar com cartão",
"endingIn" : "Termina em {{lastFourCardDigits}}",
"Apple Pay" : "Apple Pay",
"Venmo" : "Venmo",
"Card" : "Cartão",
"PayPal" : "PayPal",
"PayPal Credit" : "PayPal Credit",
"Google Pay" : "Google Pay",
"American Express" : "American Express",
"Discover" : "Discover",
"Diners Club" : "Diners Club",
"Elo" : "Elo",
"Hiper" : "Hiper",
"Hipercard" : "Hipercard",
"MasterCard" : "Mastercard",
"Visa" : "Visa",
"JCB" : "JCB",
"Maestro" : "Maestro",
"UnionPay" : "UnionPay"
}
;
|
<reponame>wiqun/route<filename>internal/crdt/crdtstate_test.go
package crdt
import (
"github.com/stretchr/testify/assert"
"testing"
"time"
)
func TestCrdtState_BatchAdd(t *testing.T) {
peer1 := uint64(1)
peer2 := uint64(2)
topicList := []string{"topic1", "topic2"}
s := NewCrdtState()
r1 := s.BatchAdd(peer1, topicList)
r2 := s.BatchAdd(peer2, topicList)
assert.Len(t, s.State.Set, 2)
assert.Len(t, s.State.Set[peer1].Map, 2)
assert.True(t, FlagValue(s.State.Set[peer1].Map[topicList[0]]).IsAdd())
assert.True(t, FlagValue(s.State.Set[peer1].Map[topicList[1]]).IsAdd())
assert.Len(t, s.State.Set[peer2].Map, 2)
assert.True(t, FlagValue(s.State.Set[peer2].Map[topicList[0]]).IsAdd())
assert.True(t, FlagValue(s.State.Set[peer2].Map[topicList[1]]).IsAdd())
assert.Len(t, r1.Set[peer1].Map, 2)
assert.True(t, FlagValue(r1.Set[peer1].Map[topicList[0]]).IsAdd())
assert.True(t, FlagValue(r1.Set[peer1].Map[topicList[1]]).IsAdd())
assert.Len(t, r2.Set[peer2].Map, 2)
assert.True(t, FlagValue(r2.Set[peer2].Map[topicList[0]]).IsAdd())
assert.True(t, FlagValue(r2.Set[peer2].Map[topicList[1]]).IsAdd())
}
func TestCrdtState_BatchDel(t *testing.T) {
peer1 := uint64(1)
peer2 := uint64(2)
topicList := []string{"topic1", "topic2"}
s := NewCrdtState()
r1 := s.BatchDel(peer1, topicList)
r2 := s.BatchDel(peer2, topicList)
assert.Len(t, s.State.Set, 2)
assert.Len(t, s.State.Set[peer1].Map, 2)
assert.True(t, FlagValue(s.State.Set[peer1].Map[topicList[0]]).IsRemove())
assert.True(t, FlagValue(s.State.Set[peer1].Map[topicList[1]]).IsRemove())
assert.Len(t, s.State.Set[peer2].Map, 2)
assert.True(t, FlagValue(s.State.Set[peer2].Map[topicList[0]]).IsRemove())
assert.True(t, FlagValue(s.State.Set[peer2].Map[topicList[1]]).IsRemove())
assert.Len(t, r1.Set[peer1].Map, 2)
assert.True(t, FlagValue(r1.Set[peer1].Map[topicList[0]]).IsRemove())
assert.True(t, FlagValue(r1.Set[peer1].Map[topicList[1]]).IsRemove())
assert.Len(t, r2.Set[peer2].Map, 2)
assert.True(t, FlagValue(r2.Set[peer2].Map[topicList[0]]).IsRemove())
assert.True(t, FlagValue(r2.Set[peer2].Map[topicList[1]]).IsRemove())
}
func TestCrdtState_Merge(t *testing.T) {
s := NewCrdtState()
other := NewCrdtState()
peer1 := uint64(1)
peer2 := uint64(2)
topicList := []string{"topic1", "topic2"}
s.BatchAdd(peer1, topicList)
other.BatchAdd(peer2, topicList)
s.Merge(other)
assert.Len(t, s.State.Set, 2)
assert.Len(t, s.State.Set[peer1].Map, 2)
assert.True(t, FlagValue(s.State.Set[peer1].Map[topicList[0]]).IsAdd())
assert.True(t, FlagValue(s.State.Set[peer1].Map[topicList[1]]).IsAdd())
assert.Len(t, s.State.Set[peer2].Map, 2)
assert.True(t, FlagValue(s.State.Set[peer2].Map[topicList[0]]).IsAdd())
assert.True(t, FlagValue(s.State.Set[peer2].Map[topicList[1]]).IsAdd())
other.BatchDel(peer1, topicList)
other.BatchDel(peer2, topicList)
s.Merge(other)
assert.Len(t, s.State.Set, 2)
assert.Len(t, s.State.Set[peer1].Map, 2)
assert.True(t, FlagValue(s.State.Set[peer1].Map[topicList[0]]).IsRemove())
assert.True(t, FlagValue(s.State.Set[peer1].Map[topicList[1]]).IsRemove())
assert.Len(t, s.State.Set[peer2].Map, 2)
assert.True(t, FlagValue(s.State.Set[peer2].Map[topicList[0]]).IsRemove())
assert.True(t, FlagValue(s.State.Set[peer2].Map[topicList[1]]).IsRemove())
}
func TestCrdtState_MergeDelta(t *testing.T) {
s := NewCrdtState()
other1 := NewCrdtState()
peer1 := uint64(1)
peer2 := uint64(2)
topicList := []string{"topic1", "topic2"}
s.BatchAdd(peer1, topicList)
other1.BatchAdd(peer2, topicList)
count, delta := s.MergeDelta(other1, uint64(300), time.Hour)
assert.Nil(t, delta)
assert.EqualValues(t, count, 2)
assert.Len(t, other1.State.Set, 1)
assert.Len(t, s.State.Set, 2)
assert.Len(t, s.State.Set[peer1].Map, 2)
assert.True(t, FlagValue(s.State.Set[peer1].Map[topicList[0]]).IsAdd())
assert.True(t, FlagValue(s.State.Set[peer1].Map[topicList[1]]).IsAdd())
assert.Len(t, s.State.Set[peer2].Map, 2)
assert.True(t, FlagValue(s.State.Set[peer2].Map[topicList[0]]).IsAdd())
assert.True(t, FlagValue(s.State.Set[peer2].Map[topicList[1]]).IsAdd())
count, delta = s.MergeDelta(other1, uint64(300), time.Hour)
assert.Nil(t, delta)
assert.EqualValues(t, count, 0)
assert.Len(t, other1.State.Set, 0)
other2 := NewCrdtState()
other2.BatchDel(peer1, topicList)
other2.BatchDel(peer2, topicList)
count, delta = s.MergeDelta(other2, uint64(300), time.Hour)
assert.Nil(t, delta)
assert.EqualValues(t, count, 4)
assert.Len(t, other2.State.Set, 2)
assert.Len(t, s.State.Set, 2)
assert.Len(t, s.State.Set[peer1].Map, 2)
assert.True(t, FlagValue(s.State.Set[peer1].Map[topicList[0]]).IsRemove())
assert.True(t, FlagValue(s.State.Set[peer1].Map[topicList[1]]).IsRemove())
assert.Len(t, s.State.Set[peer2].Map, 2)
assert.True(t, FlagValue(s.State.Set[peer2].Map[topicList[0]]).IsRemove())
assert.True(t, FlagValue(s.State.Set[peer2].Map[topicList[1]]).IsRemove())
count, delta = s.MergeDelta(other2, uint64(300), time.Hour)
assert.Nil(t, delta)
assert.EqualValues(t, count, 0)
assert.Len(t, other2.State.Set, 0)
}
func TestCrdtState_DelPeerAllTopic(t *testing.T) {
peer2 := uint64(2)
topicList := []string{"topic1", "topic2"}
s := NewCrdtState()
s.BatchAdd(peer2, topicList)
assert.Len(t, s.State.Set, 1)
assert.Len(t, s.State.Set[peer2].Map, 2)
assert.True(t, FlagValue(s.State.Set[peer2].Map[topicList[0]]).IsAdd())
assert.True(t, FlagValue(s.State.Set[peer2].Map[topicList[1]]).IsAdd())
r := s.DelPeerAllTopic(peer2)
assert.Len(t, s.State.Set, 1)
assert.Len(t, s.State.Set[peer2].Map, 2)
assert.True(t, FlagValue(s.State.Set[peer2].Map[topicList[0]]).IsRemove())
assert.True(t, FlagValue(s.State.Set[peer2].Map[topicList[1]]).IsRemove())
assert.ElementsMatch(t, r, topicList)
}
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package org.fhwa.c2cri.ntcip2306v109.status;
/**
* The Interface NTCIP2306Status specifies the various error types reported by NTCIP 2306.
*
* @author TransCore ITS, LLC
* Last Updated: 1/8/2014
*/
public interface NTCIP2306Status {
/**
* The Enum TRANSPORTERRORTYPES.
*/
public enum TRANSPORTERRORTYPES {
/** The none. */
NONE,
/** The socketconnecterror. */
SOCKETCONNECTERROR,
/** The socketreceiveerror. */
SOCKETRECEIVEERROR,
/** The internalservererror. */
INTERNALSERVERERROR,
/** The datatransfererror. */
DATATRANSFERERROR}
/**
* The Enum ENCODINGERRORTYPES.
*/
public enum ENCODINGERRORTYPES {
/** The none. */
NONE,
/** The xmlerror. */
XMLERROR,
/** The gziperror. */
GZIPERROR,
/** The soaperror. */
SOAPERROR,
/** The schemavalidationerror. */
SCHEMAVALIDATIONERROR}
/**
* The Enum MESSAGEERRORTYPES.
*/
public enum MESSAGEERRORTYPES {
/** The none. */
NONE,
/** The invalidmessage. */
INVALIDMESSAGE}
}
|
package main.unused.fieldStaticAndOverridesStatic;
public class FieldStaticAndOverridesStaticMulti extends FieldStaticAndOverridesStaticMultiS {
}
|
<filename>app/controllers/artists_controller.rb
class ArtistsController < ApiController
# GET /artists
def index
artists = Artist.all
render json: @artists
end
# GET /artists/1
def show
artist = Artist.find(params[:id])
render json: artist
end
end
|
'use strict';
/**
* Module for displaying patient events in time line form.
*/
angular.module('chronos.timeline', [
'chronos.services',
'users.commons.filters',
'ui.notifications',
'ui.generic',
'infinite-scroll'
]);
|
import React, { Component } from 'react';
import { Provider, connect } from 'react-redux';
import ReactGA from 'react-ga';
import './App.css';
import Header from './Components/Header';
import Footer from './Components/Footer';
import About from './Components/About';
import Resume from './Components/Resume';
import Contact from './Components/Contact';
import Testimonials from './Components/Testimonials';
import Portfolio from './Components/Portfolio';
import store from './store';
import * as actions from './store/actions';
class App extends Component {
constructor(props){
super(props);
this.state = {
foo: 'bar',
resumeData: {}
};
ReactGA.initialize('UA-110570651-1');
ReactGA.pageview(window.location.pathname);
}
componentDidMount(){
this.props.getResumeData();
}
render() {
return (
<div className="App margin-top">
{this.props.resumeData &&
<div>
<Header data={this.props.resumeData.main}/>
<About data={this.props.resumeData.main}/>
<Resume data={this.props.resumeData.resume}/>
{/*<Portfolio data={this.props.resumeData.portfolio}/>*/}
{/*<Testimonials data={this.props.resumeData.testimonials}/>*/}
<Contact data={this.props.resumeData.main}/>
<Footer data={this.props.resumeData.main}/>
</div>
}
</div>
);
}
}
const mapStateToProps = (state) => {
return {
resumeData: state.profile.resumeData,
};
};
const mapDispatchToProps = (dispatch) => {
return {
getResumeData: () => dispatch(actions.profile.getProfile())
};
};
export const Base= connect(mapStateToProps, mapDispatchToProps)(App);
export default () => {
return (
<Provider store={store}>
<Base />
</Provider>
);
};
|
<gh_stars>1-10
#!/usr/bin/env node
import * as program from 'commander';
const packageConfig = require('../../package.json');
program
.allowUnknownOption(false)
.version(packageConfig.version)
.description(packageConfig.description)
.command('init', 'initialize a lamington project')
.command('build [contract_path]', 'build all smart contracts')
.command('start', 'start the eos blockchain in docker')
.command('stop', 'stop the eos blockchain in docker')
.command('test', 'run your unit / integration tests')
.on('*', () => {
console.log('Unknown Command: ' + program.args.join(' '));
program.help();
})
.parse(process.argv);
|
from dialogflow_v2.SessionClient import SessionClient
from dialogflow_v2.Objects import QueryInput, TextInput
session_client = SessionClient.SessionClient() #create a session
def get_response(query):
inputs = QueryInput(text=TextInput(text=query, language_code='en-US'))
query_result = session_client.detect_intent(session_id="1", query_input=inputs)
return query_result.query_result.fulfillment_text
|
#!/bin/bash
# LinuxGSM fix_terraria.sh function
# Author: Daniel Gibbs
# Website: https://linuxgsm.com
# Description: Resolves an issue with Terraria.
local commandname="FIX"
local commandaction="Fix"
local function_selfname="$(basename "$(readlink -f "${BASH_SOURCE[0]}")")"
export TERM=xterm
|
#!/bin/bash
arp > arp.out
arp -v > arp-v.out
arp -a > arp-a.out
df > df.out
df -h > df-h.out
dig www.google.com AAAA > dig-aaaa.out
dig www.cnn.com www.google.com > dig.out
dig -x 1.1.1.1 > dig-x.out
env > env.out
free > free.out
free -h > free-h.out
history > history.out
ifconfig > ifconfig.out
sudo iptables -A INPUT -i lo -j ACCEPT
sudo iptables -A OUTPUT -o lo -j ACCEPT
sudo iptables -A INPUT -m conntrack --ctstate ESTABLISHED,RELATED -j ACCEPT
sudo iptables -A OUTPUT -m conntrack --ctstate ESTABLISHED -j ACCEPT
sudo iptables -A INPUT -m conntrack --ctstate INVALID -j DROP
sudo iptables -A INPUT -i lo -s 15.15.15.51 -j DROP
sudo iptables -A INPUT -p tcp -s 15.15.15.0/24 --dport 22 -m conntrack --ctstate NEW,ESTABLISHED -j ACCEPT
sudo iptables -A OUTPUT -p tcp --sport 22 -m conntrack --ctstate ESTABLISHED -j ACCEPT
sudo iptables -L -t filter > iptables-filter.out
sudo iptables --line-numbers -L -t filter > iptables-filter-line-numbers.out
sudo iptables -L -t nat > iptables-nat.out
sudo iptables -L -t mangle > iptables-mangle.out
sudo iptables -L -t raw > iptables-raw.out
sudo iptables -nvL -t filter > iptables-filter-nv.out
sleep 11 & sleep 12 & sleep 13 & sleep 14 &
jobs > jobs.out
ls / > ls.out
ls -al / > ls-al.out
ls -alh / > ls-alh.out
ls -R /usr > ls-R.out
ls -alR /usr > ls-alR.out
ls /usr/* > ls-glob.out
cd /tmp/lstest
touch 'a regular filename'
touch $'\nthis file starts with one newline'
touch $'\n\n\n\nthis file starts with four newlines'
touch $'this file has\na newline inside'
touch $'this file has\n\n\n\nfour contiguous newlines inside'
touch $'this file\nhas\nsix\n\nnewlines\n\nwithin'
touch $'\n\nthis file has\na combination\n\n\nof everything\n\n\n\n'
cd /tmp
ls -R > ~/utils/ls-R-newlines.out
ls -lR > ~/utils/ls-lR-newlines.out
cd lstest
ls > ~/utils/ls-newlines.out
ls -l > ~/utils/ls-l-newlines.out
lsblk > lsblk.out
lsblk -o +KNAME,FSTYPE,LABEL,UUID,PARTLABEL,PARTUUID,RA,MODEL,SERIAL,STATE,OWNER,GROUP,MODE,ALIGNMENT,MIN-IO,OPT-IO,PHY-SEC,LOG-SEC,ROTA,SCHED,RQ-SIZE,DISC-ALN,DISC-GRAN,DISC-MAX,DISC-ZERO,WSAME,WWN,RAND,PKNAME,HCTL,TRAN,REV,VENDOR > lsblk-allcols.out
lsmod > lsmod.out
lsof > lsof.out
sudo lsof > lsof-sudo.out
mount > mount.out
rm -rf /tmp/jc
git clone https://github.com/kellyjonbrazil/jc.git /tmp/jc & sleep 1; netstat > netstat.out
netstat -p > netstat-p.out
netstat -l > netstat-l.out
sudo netstat -lnp > netstat-sudo-lnp.out
sudo netstat -aeep > netstat-sudo-aeep.out
ps -ef > ps-ef.out
ps axu > ps-axu.out
route > route.out
route -vn > route-vn.out
uname -a > uname-a.out
uptime > uptime.out
w > w.out
cat /etc/hosts > hosts.out
cat /etc/fstab > fstab.out
systemctl -a > systemctl.out
systemctl -a list-unit-files > systemctl-luf.out
systemctl -a list-sockets > systemctl-ls.out
systemctl -a list-jobs > systemctl-jobs.out
du /usr > du.out
pip3 list > pip-list.out
pip3 show wheel pip jc > pip-show.out
blkid > blkid.out
blkid /dev/sda2 > blkid-sda2.out
sudo blkid -ip /dev/sda2 /dev/sda1 > blkid-ip-multi.out
sudo blkid -o udev -ip /dev/sr0 > blkid-ip-udev.out
sudo blkid -o udev -ip /dev/sda2 /dev/sda1 > blkid-ip-udev-multi.out
last > last.out
last -w | cat > last-w.out
sudo lastb > lastb.out
cat /etc/group > group.out
sudo cat /etc/gshadow > gshadow.out
# linux:
ping -4 www.cnn.com -c 20 -O > ping-hostname-O.out
ping -4 www.cnn.com -c 20 -O -p abcd > ping-hostname-O-p.out
ping -4 www.cnn.com -c 20 -O -D -p abcd -s 1400 > ping-hostname-O-D-p-s.out
ping 127.0.0.1 -c 20 -O > ping-ip-O.out
ping 127.0.0.1 -c 20 -O -D > ping-ip-O-D.out
ping6 2a04:4e42:600::323 -c 20 -O -p abcd > ping6-ip-O-p.out
ping6 2a04:4e42:600::323 -c 20 -O -D -p abcd > ping6-ip-O-D-p.out
ping6 www.cnn.com -c 20 -O -D -p abcd -s 1400 > ping6-hostname-O-D-p-s.out
ping6 www.cnn.com -c 20 -O -D -p abcd > ping6-hostname-O-D-p.out
ping6 www.cnn.com -c 20 -O -p abcd > ping6-hostname-O-p.out
# osx/bsd:
ping -c 3 -s 40 127.0.0.1 > ping-ip-s.out
ping -c 3 -s 40 localhost > ping-hostname-s.out
ping -c 3 -p ff 127.0.0.1 > ping-ip-p.out
ping -c 3 127.0.0.1 > ping-ip.out
ping -c 3 -p ff cnn.com > ping-hostname-p.out
ping -c 3 cnn.com > ping-hostname.out
ping6 -c 3 -s 40 localhost > ping6-hostname-s.out
ping6 -c 3 -s 40 ::1 > ping6-ip-s.out
ping6 -c 3 -p ff ::1 > ping6-ip-p.out
ping6 -c 3 ::1 > ping6-ip.out
ping6 -c 3 -p ff localhost > ping6-hostname-p.out
ping6 -c 3 localhost > ping6-hostname.out
|
import { mapGetters } from 'vuex';
export default {
name: 'badge',
computed: {
...mapGetters({
badge: 'badgeNumber'
}),
badgeNumber() {
return this.badge > 99 ? '99+' : this.badge;
}
}
}
|
import cv2
import os
import sys
import dsl
import time
import logging
# Plugin Name and Plugin Category
def createPluginMetaData(plugin):
pmd = dsl.PluginMetaData("doPythonWork", "Basic")
pmd.setAuthor("<NAME>")
pmd.setDescription("Create a Python Work.")
pmd.setCopyright("Allen Institute for Brain Science, 2018")
pmd.setHint("Do Python Work")
pmd.setCategory("Misc Plugins")
plugin.assignMetaData(pmd)
return pmd
# Setup the plugins properties, used later on when execution occurs
def createPluginProperties(plugin):
logFileName = dsl.stringProperty("Log.txt", "LogFile")
t = dsl.floatProperty(.01, "Sleep")
plugin.addProperty(logFileName)
plugin.addProperty(t)
return True
## The execute function is called from C/C++ with a PythonPlugin object
## as argument. The python code below assume that the plugins properties and metadata has been setup from
## within c/c++, using the above functions
def execute(thePlugin):
try:
logFormat = 'PYTHON_%(levelname)s: %(message)s'
logFileName = thePlugin.getPropertyValueString("LogFile")
logging.basicConfig(filename=logFileName, level=logging.DEBUG, format=logFormat)
logging.debug('Entering Plugin Execute Method')
sleepTime = thePlugin.getPropertyValueFloat("Sleep")
progressData = dsl.intProperty(0, "Progress")
loop = 0
x = 10
while loop < 101:
loop = loop + 1
x = x + 45.34
sqrt(x)
if thePlugin.isBeingTerminated() == True:
logging.debug('Forcing Plugin To Exit')
break
progressData.setValue(loop)
# Call plugins callback
if thePlugin.hasProgressEvent():
thePlugin.workProgressEvent(thePlugin, progressData)
time.sleep(sleepTime)
logging.debug('Looped ' + str(loop) +' times')
logging.debug('Exiting execute function in plugin: ' + thePlugin.getName())
except:
e = sys.exc_info()
print ()
logging.error("Plugin Exception: " + str(e))
return False
return True
def sqrt(y, n=10000):
x = y / 2
while n > 0:
n -= 1
x = (x + y/x) / 2
return x
#Main function is used for testing..
def main():
try:
pm = dsl.PythonPluginManager()
plugin = pm.createBarePlugin("doPythonWork")
print(type(plugin))
createPluginMetaData(plugin)
createPluginProperties(plugin)
plugin.setPropertyValue("Sleep", 1.1)
res = execute(plugin)
except: # catch exceptions
e = sys.exc_info()
print ("There was a problem: " + str(e))
finally:
#Its important to call this, in order to clean up memory properly
pm.unloadAll()
if __name__ == '__main__':
main()
|
from sympy import symbols, diag
def calculate_line_element(metric):
t, x, y, z = symbols('t x y z')
dt, dx, dy, dz = symbols('dt dx dy dz')
g_tt, g_xx, g_yy, g_zz = metric
line_element = f"{g_tt}*dt**2 + {g_xx}*dx**2 + {g_yy}*dy**2 + {g_zz}*dz**2"
return line_element
|
<reponame>StellarCrow/wfh-client<gh_stars>0
import {TestBed} from '@angular/core/testing';
import {HideContentService} from './hide-content.service';
describe('HideContentService', () => {
let service: HideContentService;
beforeEach(() => {
TestBed.configureTestingModule({});
service = TestBed.inject(HideContentService);
});
it('should be created', () => {
expect(service).toBeTruthy();
});
});
|
#ifndef IO_HPP
#define IO_HPP
#pragma once
#include <fstream>
#include <string>
#include <fmt/ostream.h>
#include "util.hpp"
struct Dataset {
unsigned int cardinality; // k
unsigned int universe;
unsigned long totalElements;
unsigned int* sizes;
unsigned int* elements;
unsigned int* offsets;
~Dataset() {
delete[] sizes;
delete[] elements;
}
};
Dataset* readDataset(std::string& path)
{
Dataset* d = new Dataset;
std::ifstream infile;
infile.open(path, std::ios::binary | std::ios::in);
infile.read((char*)&(d->cardinality), sizeof(d->cardinality));
infile.read((char*)&(d->universe), sizeof(d->universe));
infile.read((char*)&(d->totalElements), sizeof(d->totalElements));
d->sizes = new unsigned int[d->cardinality];
for (unsigned int i = 0; i < d->cardinality; ++i) {
unsigned int tmp;
infile.read((char*)&tmp, sizeof(tmp));
d->sizes[i] = tmp;
}
d->elements = new unsigned int[d->totalElements];
for (unsigned long i = 0; i < d->totalElements; ++i) {
unsigned int tmp;
infile.read((char*)&tmp, sizeof(tmp));
d->elements[i] = tmp;
}
infile.close();
return d;
}
std::vector<std::vector<unsigned int>> datasetToCollection(Dataset* d) {
std::vector<std::vector<unsigned int>> sets(d->cardinality);
unsigned long offset = 0;
// initialize sets
for (unsigned int i = 0; i < d->cardinality; ++i) {
sets[i].reserve(d->sizes[i]);
for (unsigned int j = 0; j < d->sizes[i]; ++j) {
sets[i].push_back(d->elements[offset + j]);
}
offset += d->sizes[i];
}
return sets;
}
void writeDataset(unsigned int k, unsigned int universe, unsigned long totalElements,
std::vector<std::vector<unsigned int>>& dataset, std::string& path)
{
std::ofstream outfile;
outfile.open(path, std::ios::binary | std::ios::out);
outfile.write((char*)&k, sizeof(k));
outfile.write((char*)&universe, sizeof(universe));
outfile.write((char*)&totalElements, sizeof(totalElements));
// write set lengths
for (const auto& set : dataset) {
unsigned int tmp = set.size();
outfile.write((char*)&tmp, sizeof(tmp));
}
for (const auto& set : dataset) {
for (unsigned int tmp : set) {
outfile.write((char*)&tmp, sizeof(tmp));
}
}
outfile.close();
}
void writeResult(unsigned int k, std::vector<unsigned int> counts, std::string& output) {
std::ofstream file;
file.open(output.c_str());
for (unsigned int a = 0; a < k; a++) {
for (unsigned int b = a + 1; b < k; b++) {
fmt::print(file, "({},{}): {}\n", a + 1, b + 1, counts[triangular_index(k, a, b)]);
}
}
file.close();
}
template <typename T, bool mm = false>
void writeResult(std::vector<tile_pair>& runs, unsigned int partition,
std::vector<T>& counts, std::string& output) {
std::ofstream file;
file.open(output.c_str());
unsigned int iter = 0;
for (auto& run : runs) {
tile& A = run.first;
tile& B = run.second;
bool selfJoin = (A.id == B.id);
unsigned int partitionOffset = iter * partition * partition;
for (unsigned int i = A.start; i < A.end; ++i) {
for (unsigned int j = (selfJoin ? i + 1 : B.start); j < B.end; ++j) {
unsigned long pairOffset;
if (selfJoin && !mm) {
pairOffset = triangular_index(partition, i - A.id * partition, j - B.id * partition);
} else {
pairOffset = quadratic_index(partition, i - A.id * partition, j - B.id * partition);
}
fmt::print(file, "({},{}): {}\n", i + 1, j + 1,
(&counts[0] + partitionOffset)[pairOffset]);
}
}
iter++;
}
file.close();
}
#endif //IO_HPP
|
<filename>extension/popup.js
window.addEventListener('DOMContentLoaded', () => {
// ...query for the active tab...
chrome.tabs.query({
active: true,
currentWindow: true
}, tabs => {
// ...and send a request for the DOM info...
chrome.tabs.sendMessage(
tabs[0].id,
{from: 'popup', status: 'ready'}, setDOMInfo);
});
});
const setDOMInfo = info => {
document.getElementById('pText').textContent = info.status;
document.getElementById("waiting").style.display = info.waiting;
document.getElementById("notSupported").style.display = info.notSupported;
};
|
func maxSubarraySum(_ nums: [Int]) -> Int {
var maxSum = nums[0]
var currentSum = nums[0]
for i in 1..<nums.count {
currentSum = max(nums[i], currentSum + nums[i])
maxSum = max(maxSum, currentSum)
}
return maxSum
}
|
#!/bin/bash
cat << PREAMBLE
/*
* RELIC is an Efficient LIbrary for Cryptography
* Copyright (C) 2007-2017 RELIC Authors
*
* This file is part of RELIC. RELIC is legal property of its developers,
* whose names are not listed here. Please refer to the COPYRIGHT file
* for contact information.
*
* RELIC is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* RELIC is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with RELIC. If not, see <http://www.gnu.org/licenses/>.
*/
/**
* @file
*
* Symbol renaming to a#undef clashes when simultaneous linking multiple builds.
*
* @ingroup core
*/
#ifndef RELIC_LABEL_H
#define RELIC_LABEL_H
#include <relic_conf.h>
#define PREFIX(F) _PREFIX(LABEL, F)
#define _PREFIX(A, B) __PREFIX(A, B)
#define __PREFIX(A, B) A ## _ ## B
/*============================================================================*/
/* Macro definitions */
/*============================================================================*/
#ifdef LABEL
PREAMBLE
REDEF() {
cat "relic_$1.h" | grep "$1_" | grep -v define | grep -v typedef | grep -v '\\' | grep '(' | grep -v '^ \*' | sed 's/const //' | sed 's/\*//' | sed -r 's/[a-z,_0-9]+ ([a-z,_,0-9]+)\(.*/\#undef \1/'
echo
cat "relic_$1.h" | grep "$1_" | grep -v define | grep -v typedef | grep -v '\\' | grep '(' | sed 's/\*//' | sed 's/const //' | sed -r 's/[a-z,_,0-9]+ ([a-z,_,0-9]+)\(.*/\#define \1 \tPREFIX\(\1\)/'
echo
}
REDEF2() {
cat "relic_$1.h" | grep "$2_" | grep -v define | grep -v typedef | grep -v '\\' | grep '(' | grep -v '^ \*' | sed 's/const //' | sed 's/\*//' | sed -r 's/[a-z,_0-9]+ ([a-z,_,0-9]+)\(.*/\#undef \1/'
echo
cat "relic_$1.h" | grep "$2_" | grep -v define | grep -v typedef | grep -v '\\' | grep '(' | sed 's/\*//' | sed 's/const //' | sed -r 's/[a-z,_,0-9]+ ([a-z,_,0-9]+)\(.*/\#define \1 \tPREFIX\(\1\)/'
echo
}
REDEF_LOW() {
cat "low/relic_$1_low.h" | grep "$1_" | grep -v define | grep -v typedef | grep -v '\\' | grep -v '\}' | grep -v '^ \*' | sed 's/const //' | sed 's/\*//' | sed -r 's/[a-z,_]+ ([a-z,_,0-9]+)\(.*/\#undef \1/'
echo
cat "low/relic_$1_low.h" | grep "$1_" | grep -v define | grep -v @version | grep -v typedef | grep -v '\\' | grep -v '\}' | grep -v '^ \*' | sed 's/const //' | sed 's/\*//' | sed -r 's/[a-z,_]+ ([a-z,_,0-9]+)\(.*/\#define \1 \tPREFIX\(\1\)/'
echo
}
REDEF2_LOW() {
cat "low/relic_$1_low.h" | grep "$2_" | grep -v define | grep -v typedef | grep -v '\\' | grep -v '\}' | grep -v '^ \*' | sed 's/const //' | sed 's/\*//' | sed -r 's/[a-z,_]+ ([a-z,_,0-9]+)\(.*/\#undef \1/'
echo
cat "low/relic_$1_low.h" | grep "$2_" | grep -v define | grep -v @version | grep -v typedef | grep -v '\\' | grep -v '\}' | grep -v '^ \*' | sed 's/const //' | sed 's/\*//' | sed -r 's/[a-z,_]+ ([a-z,_,0-9]+)\(.*/\#define \1 \tPREFIX\(\1\)/'
echo
}
echo "#undef first_ctx"
echo "#define first_ctx PREFIX(first_ctx)"
echo "#undef core_ctx"
echo "#define core_ctx PREFIX(core_ctx)"
echo
REDEF core
REDEF arch
REDEF bench
REDEF err
REDEF rand
REDEF pool
REDEF test
REDEF trace
REDEF util
echo "#undef conf_print"
echo "#define conf_print PREFIX(conf_print)"
echo
echo "#undef dv_t"
echo "#define dv_t PREFIX(dv_t)"
echo
REDEF dv
REDEF_LOW dv
echo "#undef bn_st"
echo "#undef bn_t"
echo "#define bn_st PREFIX(bn_st)"
echo "#define bn_t PREFIX(bn_t)"
echo
REDEF bn
REDEF_LOW bn
echo "#undef fp_st"
echo "#undef fp_t"
echo "#define fp_st PREFIX(fp_st)"
echo "#define fp_t PREFIX(fp_t)"
echo
REDEF fp
REDEF_LOW fp
echo "#undef fp_st"
echo "#undef fp_t"
echo "#define fp_st PREFIX(fp_st)"
echo "#define fp_t PREFIX(fp_t)"
echo
REDEF fb
REDEF_LOW fb
echo "#undef ep_st"
echo "#undef ep_t"
echo "#define ep_st PREFIX(ep_st)"
echo "#define ep_t PREFIX(ep_t)"
echo
REDEF ep
echo "#undef ed_st"
echo "#undef ed_t"
echo "#define ed_st PREFIX(ed_st)"
echo "#define ed_t PREFIX(ed_t)"
echo
REDEF ed
echo "#undef eb_st"
echo "#undef eb_t"
echo "#define eb_st PREFIX(eb_st)"
echo "#define eb_t PREFIX(eb_t)"
echo
REDEF eb
echo "#undef ep2_st"
echo "#undef ep2_t"
echo "#define ep2_st PREFIX(ep2_st)"
echo "#define ep2_t PREFIX(ep2_t)"
echo
REDEF2 epx ep2
echo "#undef fp2_st"
echo "#undef fp2_t"
echo "#undef dv2_t"
echo "#undef fp3_st"
echo "#undef fp3_t"
echo "#undef dv3_t"
echo "#undef fp6_st"
echo "#undef fp6_t"
echo "#undef dv6_t"
echo "#undef fp12_t"
echo "#undef fp18_t"
echo
REDEF2 fpx fp2
REDEF2_LOW fpx fp2
REDEF2 fpx fp3
REDEF2_LOW fpx fp3
REDEF2 fpx fp6
REDEF2 fpx fp12
REDEF2 fpx fp18
REDEF2 fbx fb2
REDEF2 fbx fb4
REDEF pp
echo "#undef rsa_t"
echo "#undef rabin_t"
echo "#undef bdpe_t"
echo "#undef sokaka_t"
echo "#define rsa_t PREFIX(rsa_t)"
echo "#define rabin_t PREFIX(rabin_t)"
echo "#define bdpe_t PREFIX(bdpe_t)"
echo "#define sokaka_t PREFIX(sokaka_t)"
echo
REDEF cp
echo "#endif /* LABEL */"
echo
echo "#endif /* !RELIC_LABEL_H */"
|
<reponame>comptech-soft/CRV<filename>resources/js/Views/Sistem/Colors/columns.js<gh_stars>0
const Columns = require('./../../../Columns/Columns')
module.exports = {
reccount: Columns.RecCount(10),
color: ComptechApp.CreateColumn('color', {
width: 80,
caption: 'Denumire',
orderby: ['colors.color'],
direction: 'asc',
source: 'color'
}),
actions: Columns.Actions(10),
}
|
package com.ramusthastudio.mygcmnetworkmanager;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.view.View;
import android.widget.Button;
import android.widget.Toast;
public class MainActivity extends AppCompatActivity implements View.OnClickListener {
private Button fStartBtn;
private Button fCancelBtn;
private SchedulerTask fSchedulerTask;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
fStartBtn = findViewById(R.id.btn_start);
fCancelBtn = findViewById(R.id.btn_cancel);
fStartBtn.setOnClickListener(this);
fCancelBtn.setOnClickListener(this);
fSchedulerTask = new SchedulerTask(this);
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.btn_start:
startJob();
break;
case R.id.btn_cancel:
cancelJob();
break;
}
}
private void startJob() {
fSchedulerTask.createPeriodicTask();
Toast.makeText(this, "Periodic Task Created", Toast.LENGTH_SHORT).show();
}
private void cancelJob() {
fSchedulerTask.cancelPeriodicTask();
Toast.makeText(this, "Periodic Task Cancelled", Toast.LENGTH_SHORT).show();
}
}
|
<reponame>johanneswolfgruber/Learn-To-Listen<filename>app/src/main/java/com/johanneswolfgruber/learntolisten/MainMenuActivity.java
package com.johanneswolfgruber.learntolisten;
import android.app.DialogFragment;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
//import android.speech.tts.TextToSpeech;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.widget.Button;
import android.widget.CompoundButton;
import android.widget.Switch;
import android.widget.TextView;
import java.util.Locale;
public class MainMenuActivity extends AppCompatActivity implements CompoundButton.OnCheckedChangeListener{
//private TextToSpeech mTTS;
private Animation mAnimationBlendIn, mAnimationBlinking;
private TextView mHigh;
private Switch mSwitch;
private static Sound sSound;
private final float VOL_GAME = 0.5f;
private static int sModeID;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main_menu);
//Initialize Buttons, TextToSpeech, Animation
//set OnClickListeners for Buttons
//mTTS = new TextToSpeech(this, this);
mAnimationBlendIn = AnimationUtils.loadAnimation(this, R.anim.blend_in);
mAnimationBlinking = AnimationUtils.loadAnimation(this, R.anim.blinking);
mHigh = (TextView) findViewById(R.id.highscore_text_view);
mSwitch = (Switch) findViewById(R.id.switch_tutorial_mode);
if (mSwitch != null) {
mSwitch.setOnCheckedChangeListener(this); // note this
}
sSound = new Sound();
sSound.initSounds(getApplicationContext());
Button mNewGameButton = (Button) findViewById(R.id.new_game_button);
mNewGameButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
sSound.playSound(sSound.getSoundIDnewgame(), VOL_GAME);
//start GameActivity.java
Intent newGameIntent = new Intent(MainMenuActivity.this,
GameActivity.class);
startActivityForResult(newGameIntent, 1);
}
});
Button mTutorialButton = (Button) findViewById(R.id.tutorial_button);
mTutorialButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
sSound.playSound(sSound.getSoundIDbutton(), VOL_GAME);
//start TutorialActivity.java
Intent tutorialIntent = new Intent(MainMenuActivity.this,
TutorialActivity.class);
startActivity(tutorialIntent);
}
});
Button mGamesoundsButton = (Button) findViewById(R.id.gamesounds_button);
mGamesoundsButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
sSound.playSound(sSound.getSoundIDbutton(), VOL_GAME);
//start GamesoundsActivity.java
Intent gamesoundsIntent = new Intent(MainMenuActivity.this,
GamesoundsActivity.class);
startActivity(gamesoundsIntent);
}
});
Button mResetHighscoreButton = (Button) findViewById(R.id.reset_highscore_button);
mResetHighscoreButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
sSound.playSound(sSound.getSoundIDbutton(), VOL_GAME);
DialogFragment mDialog = new ResetHighscore();
mDialog.show(getFragmentManager(), "DialogFragment");
}
});
}
public void onUserPositiveClick() {
deleteHighscore();
}
public static Sound getSounds() {
return sSound;
}
public static int getModeID() {
return sModeID;
}
/*
@Override
public void onInit(int status) {
mTTS.setLanguage(Locale.US);
}
*/
@Override
public void onResume() {
super.onResume();
View v = findViewById(R.id.root_constraint_layout_main_menu);
v.startAnimation(mAnimationBlendIn);
mHigh.setText(String.format(Locale.getDefault(), "%d", readHighscore()));
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if(requestCode == 1 && sModeID == 0) {
if(resultCode > readHighscore()) {
writeHighscore(resultCode);
mHigh.startAnimation(mAnimationBlinking);
}
}
}
private void writeHighscore(int highscore) {
SharedPreferences sharedPref = getSharedPreferences("com.johanneswolfgruber.learntolisten",
Context.MODE_PRIVATE);
SharedPreferences.Editor editor = sharedPref.edit();
editor.putInt(getString(R.string.highscore), highscore);
editor.apply();
}
private int readHighscore() {
SharedPreferences sharedPref = getSharedPreferences("com.johanneswolfgruber.learntolisten",
Context.MODE_PRIVATE);
return sharedPref.getInt(getString(R.string.highscore), 0);
}
private void deleteHighscore() {
SharedPreferences sharedPref = getSharedPreferences("com.johanneswolfgruber.learntolisten",
Context.MODE_PRIVATE);
SharedPreferences.Editor editor = sharedPref.edit();
editor.remove(getString(R.string.highscore));
editor.apply();
onResume();
}
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
if(isChecked) {
mSwitch.setText(R.string.tutorial_mode_text);
sModeID = 1;
} else {
mSwitch.setText(R.string.normal_mode_text);
sModeID = 0;
}
}
}
|
<filename>sqlalchemy_jsonapi/unittests/test_serializer_post_collection.py
"""Test for serializer's post_collection."""
from sqlalchemy_jsonapi import errors
from sqlalchemy_jsonapi.unittests.utils import testcases
from sqlalchemy_jsonapi.unittests import models
from sqlalchemy_jsonapi import __version__
class PostCollection(testcases.SqlalchemyJsonapiTestCase):
"""Tests for serializer.post_collection."""
def test_add_resource(self):
"""Create resource successfully."""
payload = {
'data': {
'type': 'users',
'attributes': {
'first': 'Sally',
'last': 'Smith',
'username': 'SallySmith1',
'password': 'password',
}
}
}
response = models.serializer.post_collection(
self.session, payload, 'users')
user = self.session.query(models.User).get(
response.data['data']['id'])
self.assertEqual(user.first, 'SET-ATTR:Sally')
self.assertEqual(user.last, 'Smith')
self.assertEqual(user.username, 'SallySmith1')
self.assertEqual(user.password, 'password')
@testcases.fragile
def test_add_resource_response(self):
"""Create resource returns data response and 201.
This test is very fragile.
"""
payload = {
'data': {
'type': 'users',
'attributes': {
'first': 'Sally',
'last': 'Smith',
'username': 'SallySmith1',
'password': 'password'
}
}
}
response = models.serializer.post_collection(
self.session, payload, 'users')
expected = {
'data': {
'attributes': {
'first': u'Sally',
'last': u'Smith',
'username': u'SallySmith1'
},
'id': 1,
'relationships': {
'posts': {
'links': {
'related': '/users/1/posts',
'self': '/users/1/relationships/posts'
}
},
'logs': {
'links': {
'related': '/users/1/logs',
'self': '/users/1/relationships/logs'
}
},
'comments': {
'links': {
'related': '/users/1/comments',
'self': '/users/1/relationships/comments'
}
}
},
'type': 'users'
},
'included': [],
'jsonapi': {
'version': '1.0'
},
'meta': {
'sqlalchemy_jsonapi_version': __version__
}
}
actual = response.data
self.assertEqual(expected, actual)
self.assertEqual(201, response.status_code)
def test_add_resource_with_relationship(self):
"""Create resource succesfully with many-to-one relationship."""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
self.session.commit()
payload = {
'data': {
'type': 'posts',
'attributes': {
'title': 'Some Title',
'content': 'Some Content Inside'
},
'relationships': {
'author': {
'data': {
'type': 'users',
'id': user.id
}
}
}
}
}
response = models.serializer.post_collection(
self.session, payload, 'posts')
blog_post = self.session.query(models.Post).get(
response.data['data']['id'])
self.assertEqual(blog_post.title, 'Some Title')
self.assertEqual(blog_post.content, 'Some Content Inside')
self.assertEqual(blog_post.author_id, user.id)
self.assertEqual(blog_post.author, user)
@testcases.fragile
def test_add_resource_with_many_to_one_relationship_response(self):
"""Create resource succesfully with many-to-one relationship returns 201."""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
self.session.commit()
payload = {
'data': {
'type': 'posts',
'attributes': {
'title': 'Some Title',
'content': 'Some Content Inside'
},
'relationships': {
'author': {
'data': {
'type': 'users',
'id': user.id
}
}
}
}
}
response = models.serializer.post_collection(
self.session, payload, 'posts')
expected = {
'data': {
'type': 'posts',
'attributes': {
'title': u'Some Title',
'content': u'Some Content Inside'
},
'id': 1,
'relationships': {
'author': {
'links': {
'related': '/posts/1/author',
'self': '/posts/1/relationships/author'
}
},
'comments': {
'links': {
'related': '/posts/1/comments',
'self': '/posts/1/relationships/comments'
}
}
}
},
'included': [],
'jsonapi': {
'version': '1.0'
},
'meta': {
'sqlalchemy_jsonapi_version': __version__
}
}
actual = response.data
self.assertEqual(expected, actual)
self.assertEqual(response.status_code, 201)
def test_add_resource_twice(self):
"""Creating same resource twice results in 409 conflict."""
payload = {
'data': {
'type': 'users',
'attributes': {
'first': 'Sally',
'last': 'Smith',
'username': 'SallySmith1',
'password': 'password',
}
}
}
models.serializer.post_collection(self.session, payload, 'users')
with self.assertRaises(errors.ValidationError) as error:
models.serializer.post_collection(
self.session, payload, 'users')
self.assertEqual(error.exception.status_code, 409)
def test_add_resource_mismatched_endpoint(self):
"""Create resource with mismatched returns 409.
A InvalidTypeEndpointError is raised.
"""
payload = {
'data': {
'type': 'posts'
}
}
with self.assertRaises(errors.InvalidTypeForEndpointError) as error:
models.serializer.post_collection(self.session, payload, 'users')
self.assertEqual(
error.exception.detail, 'Expected users, got posts')
self.assertEqual(error.exception.status_code, 409)
def test_add_resource_with_missing_data(self):
"""Create resource with missing content data results in 400.
A BadRequestError is raised.
"""
payload = {}
with self.assertRaises(errors.BadRequestError) as error:
models.serializer.post_collection(self.session, payload, 'users')
self.assertEqual(
error.exception.detail, 'Request should contain data key')
self.assertEqual(error.exception.status_code, 400)
def test_add_resource_with_missing_type(self):
"""Creat resource without type results in 409.
A MissingTypeError is raised.
"""
payload = {
'data': {
'attributes': {
'first': 'Sally',
'last': 'Smith',
'username': 'SallySmith1',
'password': 'password',
}
}
}
with self.assertRaises(errors.MissingTypeError) as error:
models.serializer.post_collection(self.session, payload, 'users')
self.assertEqual(
error.exception.detail, 'Missing /data/type key in request body')
self.assertEqual(error.exception.status_code, 409)
def test_add_resource_with_unknown_field_name(self):
"""Create resource with unknown field results in 409.
A ValidationError is raised.
"""
payload = {
'data': {
'type': 'users',
'attributes': {
'first': 'Sally',
'last': 'Smith',
'username': 'SallySmith1',
'password': 'password',
'unknown-attribute': 'test'
}
}
}
with self.assertRaises(errors.ValidationError) as error:
models.serializer.post_collection(
self.session, payload, 'users')
self.assertEqual(error.exception.detail, 'Incompatible data type')
self.assertEqual(error.exception.status_code, 409)
def test_add_resource_access_denied(self):
"""Add a resource with access denied results in 403."""
payload = {
'data': {
'type': 'logs'
}
}
with self.assertRaises(errors.PermissionDeniedError) as error:
models.serializer.post_collection(
self.session, payload, 'logs')
self.assertEqual(error.exception.detail, 'CREATE denied on logs.None')
self.assertEqual(error.exception.status_code, 403)
def test_add_resource_with_given_id(self):
"""Create resource successfully with specified id."""
payload = {
'data': {
'type': 'users',
'id': 3,
'attributes': {
'first': 'Sally',
'last': 'Smith',
'username': 'SallySmith1',
'password': 'password',
}
}
}
response = models.serializer.post_collection(
self.session, payload, 'users')
user = self.session.query(models.User).get(
response.data['data']['id'])
self.assertEqual(user.first, 'SET-ATTR:Sally')
self.assertEqual(user.last, 'Smith')
self.assertEqual(user.username, 'SallySmith1')
self.assertEqual(user.password, 'password')
def test_add_resource_with_invalid_one_to_many_relationships(self):
"""Create resource with invalid one-to-many relationship returns 400.
In a one-to-many relationship, the data in the relationship must be
of type array.
A BadRequestError is raised.
"""
payload = {
'data': {
'attributes': {
'first': 'Sally',
'last': 'Smith',
'username': 'SallySmith1',
'password': 'password',
},
'type': 'users',
'relationships': {
'posts': {
'data': {
'type': 'posts',
'id': 1
}
}
}
}
}
with self.assertRaises(errors.BadRequestError) as error:
models.serializer.post_collection(
self.session, payload, 'users')
self.assertEqual(error.exception.detail, 'posts must be an array')
self.assertEqual(error.exception.status_code, 400)
def test_add_resource_with_no_data_in_many_to_one_relationship(self):
"""Create resource without data in many-to-one relationships returns 400.
A BadRequestError is raised.
"""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
self.session.commit()
payload = {
'data': {
'type': 'posts',
'attributes': {
'title': 'Some Title',
'content': 'Some Content Inside'
},
'relationships': {
'author': {
'test': {
'type': 'users',
'id': user.id
}
}
}
}
}
with self.assertRaises(errors.BadRequestError) as error:
models.serializer.post_collection(
self.session, payload, 'posts')
self.assertEqual(
error.exception.detail, 'Missing data key in relationship author')
self.assertEqual(error.exception.status_code, 400)
def test_add_resource_when_data_in_many_to_one_relationship_not_dict(self):
"""Create resource with many-to-one relationship whose data is not a dict returns 400.
A BadRequestError is raised.
"""
payload = {
'data': {
'type': 'posts',
'attributes': {
'title': 'Some Title',
'content': 'Some Content Inside'
},
'relationships': {
'author': {
'data': 'Test that not being a dictionary fails'
}
}
}
}
with self.assertRaises(errors.BadRequestError) as error:
models.serializer.post_collection(
self.session, payload, 'posts')
self.assertEqual(error.exception.detail, 'author must be a hash')
self.assertEqual(error.exception.status_code, 400)
def test_add_resource_with_invalid_many_to_one_relationship_data(self):
"""Create resource with invalid many-to-one relationship data returns 400.
A BadRequestError is raised.
"""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
self.session.commit()
payload = {
'data': {
'type': 'posts',
'attributes': {
'title': 'Some Title',
'content': 'Some Content Inside'
},
'relationships': {
'author': {
'data': {
'type': 'users',
'id': 1,
'name': 'Sally'
}
}
}
}
}
with self.assertRaises(errors.BadRequestError) as error:
models.serializer.post_collection(
self.session, payload, 'posts')
self.assertEqual(
error.exception.detail, 'author must have type and id keys')
self.assertEqual(error.exception.status_code, 400)
def test_add_resource_with_missing_one_to_many_relationship_type(self):
"""Create resource with missing one-to-many relationship type returns 400.
The relationship data must contain 'id' and 'type'.
A BadRequestError is raised.
"""
payload = {
'data': {
'attributes': {
'first': 'Sally',
'last': 'Smith',
'username': 'SallySmith1',
'password': 'password',
},
'type': 'users',
'relationships': {
'posts': {
'data': [{
'type': 'posts',
}]
}
}
}
}
with self.assertRaises(errors.BadRequestError) as error:
models.serializer.post_collection(
self.session, payload, 'users')
self.assertEqual(
error.exception.detail, 'posts must have type and id keys')
self.assertEqual(error.exception.status_code, 400)
def test_add_resource_with_invalid_json_payload(self):
"""Create resource with invalid json payload returns 400.
A BadRequestError is raised.
"""
payload = {'foo'}
with self.assertRaises(errors.BadRequestError) as error:
models.serializer.post_collection(
self.session, payload, 'users')
self.assertEqual(
error.exception.detail, 'Request body should be a JSON hash')
self.assertEqual(error.exception.status_code, 400)
@testcases.fragile
def test_add_resource_with_a_null_relationship(self):
"""Create resource with a null relationship returns 201."""
payload = {
'data': {
'type': 'posts',
'attributes': {
'title': 'Some Title',
'content': 'Some Content Inside'
},
'relationships': {
'author': {
'data': None
}
}
}
}
response = models.serializer.post_collection(
self.session, payload, 'posts')
expected = {
'data': {
'type': 'posts',
'relationships': {
'author': {
'links': {
'self': '/posts/1/relationships/author',
'related': '/posts/1/author'
}
},
'comments': {
'links': {
'self': '/posts/1/relationships/comments',
'related': '/posts/1/comments'
}
}
},
'id': 1,
'attributes': {
'title': u'Some Title',
'content': u'Some Content Inside'
}
},
'jsonapi': {
'version': '1.0'
},
'meta': {
'sqlalchemy_jsonapi_version': __version__
},
'included': []
}
actual = response.data
self.assertEqual(expected, actual)
self.assertEqual(201, response.status_code)
|
package org.paasta.container.platform.broker.serviceInstance;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.paasta.container.platform.common.api.common.Constants;
import org.springframework.test.context.TestPropertySource;
import org.springframework.test.context.junit4.SpringRunner;
import java.util.ArrayList;
import java.util.List;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Mockito.when;
@RunWith(SpringRunner.class)
@TestPropertySource("classpath:application.yml")
public class ServiceInstanceServiceTest {
private static final String SERVICE_INSTANCE_ID ="9f102f4c-05bc-4bc7-8c8c-adfcfokbcb27e";
private static final String SERVICE_ORG_ID ="1f102f4c-05bc-4bc7-8c8c-adfcfokbcb27e";
private static final String NAMESPACE = "cp-namespace";
private static final String USER_ID = "paasta";
private static List<ServiceInstance> serviceInstanceList = null;
private static ServiceInstance serviceInstance = null;
private static ServiceInstanceList finalServiceInstanceList = null;
@Mock
ServiceInstanceRepository serviceInstanceRepository;
@InjectMocks
ServiceInstanceService serviceInstanceService;
@Before
public void setUp() {
serviceInstance = new ServiceInstance();
serviceInstance.setUserId(USER_ID);
serviceInstance.setServiceInstanceId(SERVICE_INSTANCE_ID);
serviceInstance.setOrganizationGuid(SERVICE_ORG_ID);
serviceInstance.setNamespace(NAMESPACE);
serviceInstanceList = new ArrayList<>();
serviceInstanceList.add(serviceInstance);
finalServiceInstanceList = new ServiceInstanceList(Constants.RESULT_STATUS_SUCCESS, serviceInstanceList);
}
@Test
public void getServiceInstanceList() {
when(serviceInstanceRepository.findAllByServiceInstanceId(SERVICE_INSTANCE_ID)).thenReturn(serviceInstanceList);
ServiceInstanceList result = serviceInstanceService.getServiceInstanceList(SERVICE_INSTANCE_ID);
assertNotNull(result);
}
}
|
import numpy as np
import scipy.misc
import skimage.measure
def count_objects(image):
# Use skimage.measure.label to label distinct objects in the image
labeled_image, num_objects = skimage.measure.label(image, return_num=True)
return num_objects
# Load the grayscale image using scipy.misc.face()
face = scipy.misc.face(gray=True)
# Call the count_objects function to get the count of distinct objects in the image
num_distinct_objects = count_objects(face)
print("Number of distinct objects in the image:", num_distinct_objects)
|
<reponame>rishabhbatra10/india
""" This enables access to indian towns in respective cities """
# @coding: utf-8
# @author: <NAME>
# @email: <EMAIL>
# Write class Town and creating a Town object for every town in database
# python imports
# module imports
from . import cities
TOWNS = []
class Town:
"""
Defines a Town
"""
def __init__(self,
pin_code: int,
name: str,
city: str,
latitude: float,
longitude: float,
accuracy: float):
self.pin_code = pin_code
self.name = name
self.city = cities.lookup(city)
self.state = self.city.state
self.latitude = latitude
self.longitude = longitude
self.accuracy = accuracy
def __repr__(self):
return '<Town: %s>' % self.name
def __str__(self):
return self.name
|
#ifndef INCLUDED_MAP_EDITOR_MODE_CHANGED_EVENT_H
#define INCLUDED_MAP_EDITOR_MODE_CHANGED_EVENT_H
#include "platform/event.h"
namespace map {
struct EditorModeChangedEvent : public platform::Event
{
std::string mMode;
std::string mPrevMode;
EditorModeChangedEvent(std::string mode, std::string prevMode )
: mMode(mode)
, mPrevMode(prevMode){}
};
} // namespace map
#endif//INCLUDED_MAP_EDITOR_MODE_CHANGED_EVENT_H
//command: "classgenerator.exe" -g "event" -n "map" -c "editor_mode_changed_event" -m "std::string-mode"
|
. ./install-all.sh
npm run flow && npm run eslint && npm run test
|
<reponame>chrisprice/honesty-store-1
import apifetch from './apirequest';
import history from '../history';
export const UPDATE_STOCK_COUNT_REQUEST = 'UPDATE_STOCK_COUNT_REQUEST';
export const UPDATE_STOCK_COUNT_SUCCESS = 'UPDATE_STOCK_COUNT_SUCCESS';
export const UPDATE_STOCK_COUNT_FAILURE = 'UPDATE_STOCK_COUNT_FAILURE';
const updateStockCountRequest = () => {
return {
type: UPDATE_STOCK_COUNT_REQUEST
};
};
const updateStockCountSuccess = (response, itemId) => {
return {
type: UPDATE_STOCK_COUNT_SUCCESS,
itemId,
response
};
};
const updateStockCountFailure = error => {
return {
type: UPDATE_STOCK_COUNT_FAILURE,
error
};
};
export const performUpdateStockCount = ({ itemId, storeCode, count }) => async (
dispatch,
getState
) => {
dispatch(updateStockCountRequest());
try {
const response = await apifetch(
{
url: `/api/v1/store/${storeCode}/item/${itemId}/count`,
getToken: () => getState().accessToken,
body: { count },
method: 'POST'
},
dispatch,
getState
);
dispatch(updateStockCountSuccess(response, itemId));
history.push(`/item/${itemId}/update-stock-count/success`);
} catch (e) {
dispatch(updateStockCountFailure(e));
}
};
|
#!/bin/bash
set -ev
IN_DOT=$1
OUT_DOT=$2
TEMP=${OUT_DOT}__temp
mkdir -p $TEMP
python3 src/batchtree_layout/dot2mtx.py $IN_DOT $TEMP/network
libs/BatchTree/bin/BatchTree -input $TEMP/network.mtx -label $TEMP/network.labels -output $TEMP/ -algo 2
python3 src/batchtree_layout/add_pos_to_dot.py $IN_DOT $TEMP/network.mtx*.txt $OUT_DOT
rm -r $TEMP
# Remove BatchTree leavings
rm -f Results.txt
|
export interface Map<T> {
[s: string]: T;
}
export function equal(x: any[] | number | string | boolean | null, y: any[] | number | string | boolean | null): boolean {
if (x === y) return true;
else if (x == null || y == null) return false;
else if (Array.isArray(x) && Array.isArray(y)) {
if (x.length !== y.length) return false;
for(var i = 0; i < x.length; i++) {
if (!equal(x[i], y[i])) {
return false;
}
}
return true;
}
return false;
}
export function chooseRandom<T>(l: T[]): T {
return l[Math.floor(Math.random() * l.length)];
}
export function findKey<T, U>(l: T[], f: (t: T) => U): U | undefined {
for (const x of l) {
const key = f(x);
if (key) {
return key;
}
}
}
export function concatMap<T,U>(l: T[], f: (t: T) => U[]): U[] {
const result: U[] = [];
for (const x of l) {
for (const ret of f(x)) {
result.push(ret);
}
}
return result;
}
export function cross<T,U>(ts:T[], us: U[]): [T, U][] {
const pairs: [T, U][] = [];
for (const t of ts) {
for (const u of us) {
pairs.push([t, u]);
}
}
return pairs;
}
export function zipWith<T,U,V>(ts: T[], us: U[], f: (t: T, u: U) => V): V[] {
return ts.map((t, i) => f(t, us[i]));
}
|
<filename>src/parsers/gltf2/Pose.ts
class PoseJoint{
//#region MAIN
index : number;
rot ?: number[];
pos ?: number[];
scl ?: number[];
constructor( idx:number, rot ?: number[], pos ?: number[], scl ?: number[] ){
this.index = idx;
this.rot = rot;
this.pos = pos;
this.scl = scl;
}
//#endregion
}
class Pose{
name : string = '';
joints : Array< PoseJoint > = [];
constructor( name ?: string ){
if( name ) this.name = name;
}
add( idx:number, rot ?: number[], pos ?: number[], scl ?: number[] ): void{
this.joints.push( new PoseJoint( idx, rot, pos, scl ) );
}
}
export { Pose, PoseJoint };
|
package com.nextbreakpoint;
import java.io.IOException;
import java.util.function.Function;
public class TryMain {
public static void main(String[] args) {
Try.of(() -> doSomething()).map(x -> x.toLowerCase()).ifPresent(System.out::println);
Try.of(() -> doSomething()).filter(v -> "X".equals(v)).ifPresent(System.out::println);
Try.of(() -> alwaysFail()).ifFailure(TryMain::handleException);
Try.of(() -> alwaysFail()).mapper(mapper()).ifFailure(TryMain::handleException);
Try.of(() -> alwaysFail()).or(() -> "Y").ifPresent(System.out::println);
}
private static Function<Exception, IOException> mapper() {
return e -> (e instanceof IOException) ? (IOException)e : new IOException("IO Error", e);
}
private static void handleException(Exception e) {
System.out.println("Exception: " + e.getMessage());
}
private static void handleException(IOException e) {
System.out.println("IOException: " + e.getMessage());
}
public static String doSomething() throws Exception {
return "X";
}
public static String alwaysFail() throws Exception {
throw new Exception("Error");
}
}
|
def all_subsets(s):
subsets = []
for i in range(2**len(s)):
bitmask = bin(i)[2:]
subset = []
for j in range(len(s)):
if bitmask[j] == "1":
subset.append(s[j])
subsets.append(subset)
return subsets
|
<gh_stars>0
/*
Navicat MySQL Data Transfer
Source Server : localhost
Source Server Version : 50553
Source Host : localhost:3306
Source Database : 91yxq_manage
Target Server Type : MYSQL
Target Server Version : 50553
File Encoding : 65001
Date: 2017-12-29 10:28:55
*/
SET FOREIGN_KEY_CHECKS=0;
-- ----------------------------
-- Table structure for 91yxq_ad
-- ----------------------------
DROP TABLE IF EXISTS `91yxq_ad`;
CREATE TABLE `91yxq_ad` (
`id` int(10) NOT NULL AUTO_INCREMENT COMMENT '广告位ID',
`game_id` int(10) NOT NULL DEFAULT '0' COMMENT '游戏ID',
`ad_name` varchar(100) NOT NULL DEFAULT '' COMMENT '广告位名称',
`position_pic` varchar(200) NOT NULL DEFAULT '' COMMENT '位置预览图片',
`ad_pic` varchar(200) NOT NULL DEFAULT '' COMMENT '广告位图片',
`click_count` int(10) NOT NULL DEFAULT '0' COMMENT '工会ID',
`created_at` int(10) NOT NULL DEFAULT '0' COMMENT '创建时间',
`updated_at` int(10) NOT NULL DEFAULT '0' COMMENT '修改时间',
`state` tinyint(1) NOT NULL DEFAULT '1' COMMENT '1代表显示,0代表不显示',
PRIMARY KEY (`id`),
UNIQUE KEY `ad_name` (`ad_name`)
) ENGINE=InnoDB AUTO_INCREMENT=29 DEFAULT CHARSET=utf8 COMMENT='广告位列表';
-- ----------------------------
-- Records of 91yxq_ad
-- ----------------------------
INSERT INTO `91yxq_ad` VALUES ('13', '1', '搜索 右侧广告 150*280', 'static/backend/img/ad/20170414134504_643.gif', 'static/backend/img/ad/20170414134504_778.gif', '16', '1491555378', '1492148704', '1');
INSERT INTO `91yxq_ad` VALUES ('14', '1', '论坛/群组 帖间通栏广告 800*60 ', 'static/backend/img/ad/20170407165939_516.gif', 'static/backend/img/ad/20170407174645_104.gif', '4', '1491555579', '1491558405', '1');
INSERT INTO `91yxq_ad` VALUES ('15', '1', '论坛/群组 帖子列表帖位广告 960*60 ', 'static/backend/img/ad/20170407170033_776.gif', 'static/backend/img/ad/20170407174706_589.gif', '1', '1491555633', '1491558426', '1');
INSERT INTO `91yxq_ad` VALUES ('16', '1', '全局 右下角广告 180× 180 ', 'static/backend/img/ad/20170407170156_560.gif', 'static/backend/img/ad/20170407174729_528.gif', '343', '1491555716', '1491558449', '1');
INSERT INTO `91yxq_ad` VALUES ('17', '1', '门户/论坛/群组/空间 格子广告 390 × 120 ', 'static/backend/img/ad/20170407170245_928.gif', 'static/backend/img/ad/20170407174753_752.gif', '1', '1491555765', '1491558473', '1');
INSERT INTO `91yxq_ad` VALUES ('18', '1', '全局 漂浮广告 180 ×180 ', 'static/backend/img/ad/20170407170318_568.gif', 'static/backend/img/ad/20170407174811_441.gif', '2', '1491555798', '1491558491', '1');
INSERT INTO `91yxq_ad` VALUES ('19', '1', '论坛 分类间广告 960 × 130 ', 'static/backend/img/ad/20170407170357_449.gif', 'static/backend/img/ad/20170407174831_868.gif', '0', '1491555837', '1491558511', '1');
INSERT INTO `91yxq_ad` VALUES ('20', '1', '全局 页尾通栏广告 1190*70 ', 'static/backend/img/ad/20170407171051_910.gif', 'static/backend/img/ad/20170407174847_769.gif', '0', '1491556251', '1491558527', '1');
INSERT INTO `91yxq_ad` VALUES ('21', '1', '全局 对联广告 150*300 ', 'static/backend/img/ad/20170407171110_934.gif', 'static/backend/img/ad/20170407174909_662.gif', '0', '1491556270', '1491558549', '1');
INSERT INTO `91yxq_ad` VALUES ('22', '1', '全局 页头通栏广告 960 × 130 ', 'static/backend/img/ad/20170407171130_766.gif', 'static/backend/img/ad/20170407174924_942.gif', '3', '1491556290', '1491558564', '1');
INSERT INTO `91yxq_ad` VALUES ('23', '1', '全局 页头二级导航栏广告 960 × 60 ', 'static/backend/img/ad/20170407171151_767.gif', 'static/backend/img/ad/20170407174945_747.gif', '0', '1491556311', '1491558585', '1');
INSERT INTO `91yxq_ad` VALUES ('24', '1', '论坛/群组 帖内广告 200 × 300 ', 'static/backend/img/ad/20170407171221_205.gif', 'static/backend/img/ad/20170407175000_138.gif', '0', '1491556341', '1491558600', '1');
INSERT INTO `91yxq_ad` VALUES ('25', '1', '空间 日志广告 250×300 ', 'static/backend/img/ad/20170407171249_310.gif', 'static/backend/img/ad/20170407175018_631.gif', '0', '1491556369', '1491558618', '1');
INSERT INTO `91yxq_ad` VALUES ('26', '1', '空间 动态广告 960*240 ', 'static/backend/img/ad/20170407172049_162.gif', 'static/backend/img/ad/20170407175055_653.gif', '0', '1491556849', '1491558655', '1');
INSERT INTO `91yxq_ad` VALUES ('27', '1', '门户 文章列表广告 960*240 ', 'static/backend/img/ad/20170407172112_163.gif', 'static/backend/img/ad/20170407175107_179.gif', '0', '1491556872', '1491558667', '1');
INSERT INTO `91yxq_ad` VALUES ('28', '1', '门户 文章广告 250×300 ', 'static/backend/img/ad/20170407172138_907.gif', 'static/backend/img/ad/20170407175117_238.gif', '0', '1491556898', '1491558677', '1');
-- ----------------------------
-- Table structure for 91yxq_admin_logger
-- ----------------------------
DROP TABLE IF EXISTS `91yxq_admin_logger`;
CREATE TABLE `91yxq_admin_logger` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`user_id` int(11) NOT NULL,
`catalog` varchar(50) NOT NULL,
`resources` varchar(128) NOT NULL,
`module_id` varchar(128) NOT NULL,
`controller_id` varchar(128) NOT NULL,
`action_id` varchar(128) NOT NULL,
`url` varchar(128) NOT NULL,
`intro` varchar(256) NOT NULL,
`ip` varchar(50) NOT NULL,
`create_time` int(11) NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=36 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of 91yxq_admin_logger
-- ----------------------------
INSERT INTO `91yxq_admin_logger` VALUES ('1', '1', 'delete', 'user_user_del', 'user', 'user', 'del', '/admin.php?r=user%2Fuser%2Findex', '删除用户 - service(id:3)', '127.0.0.1', '1491801831');
INSERT INTO `91yxq_admin_logger` VALUES ('2', '1', 'create', 'user_user_create', 'user', 'user', 'create', '/admin.php?r=user%2Fuser%2Findex', '创建后台用户 - service', '127.0.0.1', '1491801851');
INSERT INTO `91yxq_admin_logger` VALUES ('3', '1', 'update', 'user_user_pass-word-reset', 'user', 'user', 'pass-word-reset', '/admin.php?r=user%2Fuser%2Findex', '重置用户密码 - service(id:4)', '127.0.0.1', '1491801982');
INSERT INTO `91yxq_admin_logger` VALUES ('4', '1', 'delete', 'user_user_del', 'user', 'user', 'del', '/admin.php?r=user%2Fuser%2Findex', '删除用户 - service(id:4)', '127.0.0.1', '1491802980');
INSERT INTO `91yxq_admin_logger` VALUES ('5', '1', 'login', 'app-backend_site_login', 'app-backend', 'site', 'login', '', '成功登录:admin(id:1)', '127.0.0.1', '1491808409');
INSERT INTO `91yxq_admin_logger` VALUES ('6', '1', 'logout', 'app-backend_site_logout', 'app-backend', 'site', 'logout', '', '成功登出:admin(id:1)', '127.0.0.1', '1492507168');
INSERT INTO `91yxq_admin_logger` VALUES ('7', '1', 'login', 'app-backend_site_login', 'app-backend', 'site', 'login', '', '成功登录:admin(id:1)', '127.0.0.1', '1492572000');
INSERT INTO `91yxq_admin_logger` VALUES ('8', '1', 'logout', 'app-backend_site_logout', 'app-backend', 'site', 'logout', '', '成功登出:admin(id:1)', '127.0.0.1', '1492572360');
INSERT INTO `91yxq_admin_logger` VALUES ('9', '1', 'logout', 'app-backend_site_logout', 'app-backend', 'site', 'logout', '', '成功登出:admin(id:1)', '127.0.0.1', '1492579549');
INSERT INTO `91yxq_admin_logger` VALUES ('10', '1', 'login', 'app-backend_site_login', 'app-backend', 'site', 'login', '', '成功登录:admin(id:1)', '127.0.0.1', '1492583750');
INSERT INTO `91yxq_admin_logger` VALUES ('11', '1', 'logout', 'app-backend_site_logout', 'app-backend', 'site', 'logout', '', '成功登出:admin(id:1)', '127.0.0.1', '1492583942');
INSERT INTO `91yxq_admin_logger` VALUES ('12', '1', 'login', 'app-backend_site_login', 'app-backend', 'site', 'login', '', '成功登录:admin(id:1)', '127.0.0.1', '1492583953');
INSERT INTO `91yxq_admin_logger` VALUES ('13', '1', 'logout', 'app-backend_site_logout', 'app-backend', 'site', 'logout', '', '成功登出:admin(id:1)', '127.0.0.1', '1492583982');
INSERT INTO `91yxq_admin_logger` VALUES ('14', '1', 'login', 'app-backend_site_login', 'app-backend', 'site', 'login', '', '成功登录:admin(id:1)', '127.0.0.1', '1492583988');
INSERT INTO `91yxq_admin_logger` VALUES ('15', '1', 'login', 'app-backend_site_login', 'app-backend', 'site', 'login', '', '成功登录:admin(id:1)', '127.0.0.1', '1492584679');
INSERT INTO `91yxq_admin_logger` VALUES ('16', '1', 'logout', 'app-backend_site_logout', 'app-backend', 'site', 'logout', '', '成功登出:admin(id:1)', '127.0.0.1', '1492584841');
INSERT INTO `91yxq_admin_logger` VALUES ('17', '1', 'login', 'app-backend_site_login', 'app-backend', 'site', 'login', '', '成功登录:admin(id:1)', '127.0.0.1', '1492584912');
INSERT INTO `91yxq_admin_logger` VALUES ('18', '1', 'logout', 'app-backend_site_logout', 'app-backend', 'site', 'logout', '', '成功登出:admin(id:1)', '127.0.0.1', '1493100091');
INSERT INTO `91yxq_admin_logger` VALUES ('19', '1', 'login', 'app-backend_site_login', 'app-backend', 'site', 'login', '', '成功登录:admin(id:1)', '127.0.0.1', '1493100110');
INSERT INTO `91yxq_admin_logger` VALUES ('20', '1', 'login', 'app-backend_site_login', 'app-backend', 'site', 'login', '', '成功登录:admin(id:1)', '127.0.0.1', '1502331965');
INSERT INTO `91yxq_admin_logger` VALUES ('21', '1', 'logout', 'app-backend_site_logout', 'app-backend', 'site', 'logout', '', '成功登出:admin(id:1)', '127.0.0.1', '1503277986');
INSERT INTO `91yxq_admin_logger` VALUES ('22', '1', 'login', 'app-backend_site_login', 'app-backend', 'site', 'login', '', '成功登录:admin(id:1)', '127.0.0.1', '1503278013');
INSERT INTO `91yxq_admin_logger` VALUES ('23', '1', 'logout', 'app-backend_site_logout', 'app-backend', 'site', 'logout', '', '成功登出:admin(id:1)', '127.0.0.1', '1503278117');
INSERT INTO `91yxq_admin_logger` VALUES ('24', '1', 'login', 'app-backend_site_login', 'app-backend', 'site', 'login', '', '成功登录:admin(id:1)', '127.0.0.1', '1503278121');
INSERT INTO `91yxq_admin_logger` VALUES ('25', '1', 'login', 'app-backend_site_login', 'app-backend', 'site', 'login', '', '成功登录:admin(id:1)', '127.0.0.1', '1511316027');
INSERT INTO `91yxq_admin_logger` VALUES ('26', '1', 'login', 'app-backend_site_login', 'app-backend', 'site', 'login', '', '成功登录:admin(id:1)', '127.0.0.1', '1514512538');
INSERT INTO `91yxq_admin_logger` VALUES ('27', '1', 'logout', 'app-backend_site_logout', 'app-backend', 'site', 'logout', '', '成功登出:admin(id:1)', '127.0.0.1', '1514512770');
INSERT INTO `91yxq_admin_logger` VALUES ('28', '1', 'login', 'app-backend_site_login', 'app-backend', 'site', 'login', '', '成功登录:admin(id:1)', '127.0.0.1', '1514513130');
INSERT INTO `91yxq_admin_logger` VALUES ('29', '1', 'update', 'user_role_update', 'user', 'role', 'update', '/index.php?r=user%2Frole%2Findex', '角色管理-修改 - 普通管理员(name:manager)', '127.0.0.1', '1514513205');
INSERT INTO `91yxq_admin_logger` VALUES ('30', '1', 'update', 'user_user_pass-word-reset', 'user', 'user', 'pass-word-reset', '/index.php?r=user%2Fuser%2Findex', '重置用户密码 - manager(id:2)', '127.0.0.1', '1514513241');
INSERT INTO `91yxq_admin_logger` VALUES ('31', '1', 'logout', 'app-backend_site_logout', 'app-backend', 'site', 'logout', '', '成功登出:admin(id:1)', '127.0.0.1', '1514513253');
INSERT INTO `91yxq_admin_logger` VALUES ('32', '2', 'login', 'app-backend_site_login', 'app-backend', 'site', 'login', '', '成功登录:manager(id:2)', '127.0.0.1', '1514513279');
INSERT INTO `91yxq_admin_logger` VALUES ('33', '2', 'logout', 'app-backend_site_logout', 'app-backend', 'site', 'logout', '', '成功登出:manager(id:2)', '127.0.0.1', '1514513346');
INSERT INTO `91yxq_admin_logger` VALUES ('34', '1', 'login', 'app-backend_site_login', 'app-backend', 'site', 'login', '', '成功登录:admin(id:1)', '127.0.0.1', '1514513353');
INSERT INTO `91yxq_admin_logger` VALUES ('35', '1', 'update', 'user_user_update', 'user', 'user', 'update', '/index.php?r=user%2Fuser%2Findex', '修改用户信息 - test11(id:2)', '127.0.0.1', '1514514306');
-- ----------------------------
-- Table structure for 91yxq_auth_assignment
-- ----------------------------
DROP TABLE IF EXISTS `91yxq_auth_assignment`;
CREATE TABLE `91yxq_auth_assignment` (
`item_name` varchar(64) NOT NULL,
`user_id` varchar(64) NOT NULL,
`created_at` int(11) DEFAULT NULL,
PRIMARY KEY (`item_name`,`user_id`),
CONSTRAINT `91yxq_auth_assignment_ibfk_1` FOREIGN KEY (`item_name`) REFERENCES `91yxq_auth_item` (`name`) ON DELETE CASCADE ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of 91yxq_auth_assignment
-- ----------------------------
INSERT INTO `91yxq_auth_assignment` VALUES ('admin', '1', '1491800027');
INSERT INTO `91yxq_auth_assignment` VALUES ('manager', '2', '1514513234');
INSERT INTO `91yxq_auth_assignment` VALUES ('manager', '3', '1491801778');
INSERT INTO `91yxq_auth_assignment` VALUES ('manager', '4', '1491801966');
-- ----------------------------
-- Table structure for 91yxq_auth_item
-- ----------------------------
DROP TABLE IF EXISTS `91yxq_auth_item`;
CREATE TABLE `91yxq_auth_item` (
`name` varchar(64) NOT NULL,
`type` int(11) NOT NULL,
`description` text,
`rule_name` varchar(64) DEFAULT NULL,
`data` text,
`created_at` int(11) DEFAULT NULL,
`updated_at` int(11) DEFAULT NULL,
PRIMARY KEY (`name`),
KEY `rule_name` (`rule_name`),
KEY `type` (`type`),
CONSTRAINT `91yxq_auth_item_ibfk_1` FOREIGN KEY (`rule_name`) REFERENCES `91yxq_auth_rule` (`name`) ON DELETE SET NULL ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of 91yxq_auth_item
-- ----------------------------
INSERT INTO `91yxq_auth_item` VALUES ('admin', '1', '超级管理员', null, null, '1491798921', '1491798921');
INSERT INTO `91yxq_auth_item` VALUES ('ad_ad_create', '2', '广告管理_创建', null, null, '1491800580', '1491800580');
INSERT INTO `91yxq_auth_item` VALUES ('ad_ad_delete', '2', '广告管理_删除', null, null, '1491800625', '1491800625');
INSERT INTO `91yxq_auth_item` VALUES ('ad_ad_index', '2', '广告管理_列表', null, null, '1491800550', '1491800550');
INSERT INTO `91yxq_auth_item` VALUES ('ad_ad_update', '2', '广告管理_修改', null, null, '1491800599', '1491800599');
INSERT INTO `91yxq_auth_item` VALUES ('ad_ad_view', '2', '广告管理_查看', null, null, '1491800650', '1491800650');
INSERT INTO `91yxq_auth_item` VALUES ('manager', '1', '普通管理员', null, null, '1491799307', '1514513205');
INSERT INTO `91yxq_auth_item` VALUES ('service', '1', '客服', null, null, '1491801875', '1491801875');
INSERT INTO `91yxq_auth_item` VALUES ('user_resources_create', '2', '资源管理_创建', null, null, '1491800354', '1491800354');
INSERT INTO `91yxq_auth_item` VALUES ('user_resources_delete', '2', '资源管理_删除', null, null, '1491800401', '1491800401');
INSERT INTO `91yxq_auth_item` VALUES ('user_resources_index', '2', '资源管理_列表', null, null, '1491800331', '1491800331');
INSERT INTO `91yxq_auth_item` VALUES ('user_resources_update', '2', '资源管理_修改', null, null, '1491800378', '1491800378');
INSERT INTO `91yxq_auth_item` VALUES ('user_resources_view', '2', '资源管理_查看', null, null, '1491800747', '1491800747');
INSERT INTO `91yxq_auth_item` VALUES ('user_role_create', '2', '角色管理_创建', null, null, '1491800455', '1491800455');
INSERT INTO `91yxq_auth_item` VALUES ('user_role_delete', '2', '角色管理_删除', null, null, '1491800501', '1491800501');
INSERT INTO `91yxq_auth_item` VALUES ('user_role_index', '2', '角色管理_列表', null, null, '1491800434', '1491800434');
INSERT INTO `91yxq_auth_item` VALUES ('user_role_update', '2', '角色管理_修改', null, null, '1491800484', '1491800484');
INSERT INTO `91yxq_auth_item` VALUES ('user_user_create', '2', '用户管理_创建', null, null, '1491800220', '1491800244');
INSERT INTO `91yxq_auth_item` VALUES ('user_user_del', '2', '用户管理_删除', null, null, '1491800277', '1491800277');
INSERT INTO `91yxq_auth_item` VALUES ('user_user_index', '2', '用户管理_列表', null, null, '1491800180', '1491800237');
INSERT INTO `91yxq_auth_item` VALUES ('user_user_update', '2', '用户管理_修改', null, null, '1491800260', '1491800260');
-- ----------------------------
-- Table structure for 91yxq_auth_item_child
-- ----------------------------
DROP TABLE IF EXISTS `91yxq_auth_item_child`;
CREATE TABLE `91yxq_auth_item_child` (
`parent` varchar(64) NOT NULL,
`child` varchar(64) NOT NULL,
PRIMARY KEY (`parent`,`child`),
KEY `child` (`child`),
CONSTRAINT `91yxq_auth_item_child_ibfk_1` FOREIGN KEY (`parent`) REFERENCES `91yxq_auth_item` (`name`) ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT `91yxq_auth_item_child_ibfk_2` FOREIGN KEY (`child`) REFERENCES `91yxq_auth_item` (`name`) ON DELETE CASCADE ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of 91yxq_auth_item_child
-- ----------------------------
INSERT INTO `91yxq_auth_item_child` VALUES ('manager', 'ad_ad_index');
INSERT INTO `91yxq_auth_item_child` VALUES ('manager', 'user_resources_index');
INSERT INTO `91yxq_auth_item_child` VALUES ('manager', 'user_role_index');
-- ----------------------------
-- Table structure for 91yxq_auth_rule
-- ----------------------------
DROP TABLE IF EXISTS `91yxq_auth_rule`;
CREATE TABLE `91yxq_auth_rule` (
`name` varchar(64) NOT NULL,
`data` text,
`created_at` int(11) DEFAULT NULL,
`updated_at` int(11) DEFAULT NULL,
PRIMARY KEY (`name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of 91yxq_auth_rule
-- ----------------------------
-- ----------------------------
-- Table structure for 91yxq_user
-- ----------------------------
DROP TABLE IF EXISTS `91yxq_user`;
CREATE TABLE `91yxq_user` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`username` varchar(255) NOT NULL,
`nickname` varchar(255) NOT NULL,
`auth_key` varchar(32) NOT NULL,
`password_hash` varchar(255) NOT NULL,
`password_reset_token` varchar(255) DEFAULT NULL,
`email` varchar(255) NOT NULL,
`role` smallint(6) NOT NULL DEFAULT '10',
`status` smallint(6) NOT NULL DEFAULT '10',
`group_type` tinyint(2) NOT NULL DEFAULT '4',
`created_at` int(11) NOT NULL,
`updated_at` int(11) NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of 91yxq_user
-- ----------------------------
INSERT INTO `91yxq_user` VALUES ('1', 'admin', 'admin', 'fv31ShgcQ996U3RTWE8YYO7bF7EGXGoq', '965eb72c92a549dd', null, '', '10', '10', '4', '1491800027', '1491800027');
INSERT INTO `91yxq_user` VALUES ('2', 'test11', '普通管理员', 'ySzxFhyeRhWu02gZNKn39rCb3jQPFJmX', '965eb72c92a549dd', null, '', '10', '10', '4', '1491800694', '1514514306');
|
<reponame>seawindnick/javaFamily
package com.java.study.algorithm.zuo.dadvanced.advanced_class_03;
/**
* 波那契系列问题的递归和动态规划
* 【题目】
* 给定整数N,返回斐波那契数列的第N项。
* 【补充题目1】
* 给定整数N,代表台阶数,一次可以跨2个或者1个台阶,返回有多少种走法。
* 【举例】
* N=3,可以三次都跨1个台阶;也
* 可以先跨2个台阶,再跨1个台阶;
* 还可以先跨1个台阶,再跨2个台阶。所以有 三种走法,返回3。
* 【补充题目2】
* 假设农场中成熟的母牛每年只会生1头小母牛,并且永远不会死。
* 第一年农场有1只成熟的母牛,从第二年开始, 母牛开始生小母牛。
* 每只小母牛3年之后成熟又可以生小母牛。给定整数N,求出N年后牛的数量。
* <p>
* 【补充题目3】
*/
public class Code_06_FibonacciProblem {
public static int Fibonacci(int n) {
if (n <= 0) {
throw new IllegalArgumentException("不支持");
}
if (n == 1) {
return 1;
}
if (n == 2) {
return 2;
}
int pre1 = 1;
int pre2 = 2;
int sum = 0;
for (int i = 3; i <= n; i++) {
sum = pre1 + pre2;
pre1 = pre2;
pre2 = sum;
}
return sum;
}
public static void main(String[] args) {
System.out.println(Fibonacci(5));
}
}
|
#!/bin/bash
# Enable serial console
sudo sed -i 's/^GRUB_CMDLINE_LINUX=.*$/GRUB_CMDLINE_LINUX="console=ttyS0,115200n8 console=tty1"/g' /etc/default/grub
sudo grep -q 'GRUB_SERIAL_COMMAND' /etc/default/grub && sudo sed -i 's/^GRUB_SERIAL_COMMAND=.*$/GRUB_SERIAL_COMMAND="serial --unit=0 --speed=115200 --word=8 --parity=no --stop=1"/g' /etc/default/grub || sudo echo 'GRUB_SERIAL_COMMAND="serial --unit=0 --speed=115200 --word=8 --parity=no --stop=1"' >> /etc/default/grub
sudo update-grub
|
<gh_stars>1-10
import { Post } from './Post';
import { Comment } from './Comment';
import sequelize from './sequelize';
Post.hasMany(Comment);
Comment.belongsTo(Post);
export {
Post,
Comment,
sequelize,
}
|
import React, { useState } from 'react';
const App = () => {
const [word, setWord] = useState('');
const options = ['apple', 'banana', 'mango', 'orange'];
const handleChange = (e) => {
const value = e.target.value;
setWord(value);
}
const filteredOptions = options.filter(option => option.includes(word));
return (
<div>
<input type="text" onChange={handleChange} value={word}/>
{filteredOptions.map(option => (
<div key={option}>{option}</div>
))}
</div>
);
};
export default App;
|
const router = require("express").Router()
const adminController = require("../../controllers/pageController")
// 📄 "/api/page" +
router.route("/:sol/:page")
.get(adminController.findAll)
module.exports = router
|
#!/bin/bash
# This needs to work for vagrant, Travis builds, and Docker builds.
# in a python virtualenv. in the virtual machine provisioning,
# we're passing the directory this should be run from. in travis-ci,
# its run from the root of the repository.
if [ "$#" -eq 1 ]; then
cd $1
fi
# Install the requirements for this package as well as this module.
pip install -r requirements/python
pip install .
# Install the requirements for this package in development
pip install -r requirements/python-dev
|
import { InputField } from '@segment/actions-core'
/**
* The common fields defined by Amplitude's events api
* @see {@link https://developers.amplitude.com/docs/http-api-v2#keys-for-the-event-argument}
*/
export const eventSchema: Record<string, InputField> = {
user_id: {
label: 'User ID',
type: 'string',
allowNull: true,
description:
'A readable ID specified by you. Must have a minimum length of 5 characters. Required unless device ID is present. **Note:** If you send a request with a user ID that is not in the Amplitude system yet, then the user tied to that ID will not be marked new until their first event.',
default: {
'@path': '$.userId'
}
},
device_id: {
label: 'Device ID',
type: 'string',
description:
'A device-specific identifier, such as the Identifier for Vendor on iOS. Required unless user ID is present. If a device ID is not sent with the event, it will be set to a hashed version of the user ID.',
default: {
'@if': {
exists: { '@path': '$.context.device.id' },
then: { '@path': '$.context.device.id' },
else: { '@path': '$.anonymousId' }
}
}
},
event_type: {
label: 'Event Type',
type: 'string',
description: 'A unique identifier for your event.',
required: true,
default: {
'@path': '$.event'
}
},
session_id: {
label: 'Session ID',
type: 'datetime',
description:
'The start time of the session, necessary if you want to associate events with a particular system. To use automatic Amplitude session tracking in browsers, enable Analytics 2.0 on your connected source.',
default: {
'@path': '$.integrations.Amplitude.session_id'
}
},
time: {
label: 'Timestamp',
type: 'datetime',
description:
'The timestamp of the event. If time is not sent with the event, it will be set to the request upload time.',
default: {
'@path': '$.timestamp'
}
},
event_properties: {
label: 'Event Properties',
type: 'object',
description:
'An object of key-value pairs that represent additional data to be sent along with the event. You can store property values in an array, but note that Amplitude only supports one-dimensional arrays. Date values are transformed into string values. Object depth may not exceed 40 layers.',
default: {
'@path': '$.properties'
}
},
user_properties: {
label: 'User Properties',
type: 'object',
description:
'An object of key-value pairs that represent additional data tied to the user. You can store property values in an array, but note that Amplitude only supports one-dimensional arrays. Date values are transformed into string values. Object depth may not exceed 40 layers.',
default: {
'@path': '$.traits'
}
},
groups: {
label: 'Groups',
type: 'object',
description:
'Groups of users for the event as an event-level group. You can only track up to 5 groups. **Note:** This Amplitude feature is only available to Enterprise customers who have purchased the Accounts add-on.'
},
app_version: {
label: 'App Version',
type: 'string',
description: 'The current version of your application.',
default: {
'@path': '$.context.app.version'
}
},
platform: {
label: 'Platform',
type: 'string',
description: 'Platform of the device.',
default: {
'@path': '$.context.device.type'
}
},
os_name: {
label: 'OS Name',
type: 'string',
description: 'The name of the mobile operating system or browser that the user is using.',
default: {
'@path': '$.context.os.name'
}
},
os_version: {
label: 'OS Version',
type: 'string',
description: 'The version of the mobile operating system or browser the user is using.',
default: {
'@path': '$.context.os.version'
}
},
device_brand: {
label: 'Device Brand',
type: 'string',
description: 'The device brand that the user is using.',
default: {
'@path': '$.context.device.brand'
}
},
device_manufacturer: {
label: 'Device Manufacturer',
type: 'string',
description: 'The device manufacturer that the user is using.',
default: {
'@path': '$.context.device.manufacturer'
}
},
device_model: {
label: 'Device Model',
type: 'string',
description: 'The device model that the user is using.',
default: {
'@path': '$.context.device.model'
}
},
carrier: {
label: 'Carrier',
type: 'string',
description: 'The carrier that the user is using.',
default: {
'@path': '$.context.network.carrier'
}
},
country: {
label: 'Country',
type: 'string',
description: 'The current country of the user.',
default: {
'@path': '$.context.location.country'
}
},
region: {
label: 'Region',
type: 'string',
description: 'The current region of the user.',
default: {
'@path': '$.context.location.region'
}
},
city: {
label: 'City',
type: 'string',
description: 'The current city of the user.',
default: {
'@path': '$.context.location.city'
}
},
dma: {
label: 'Designated Market Area',
type: 'string',
description: 'The current Designated Market Area of the user.'
},
language: {
label: 'Language',
type: 'string',
description: 'The language set by the user.',
default: {
'@path': '$.context.locale'
}
},
price: {
label: 'Price',
type: 'number',
description:
'The price of the item purchased. Required for revenue data if the revenue field is not sent. You can use negative values to indicate refunds.',
default: {
'@path': '$.properties.price'
}
},
quantity: {
label: 'Quantity',
type: 'integer',
description: 'The quantity of the item purchased. Defaults to 1 if not specified.',
default: {
'@path': '$.properties.quantity'
}
},
revenue: {
label: 'Revenue',
type: 'number',
description:
'Revenue = price * quantity. If you send all 3 fields of price, quantity, and revenue, then (price * quantity) will be used as the revenue value. You can use negative values to indicate refunds. **Note:** You will need to explicitly set this if you are using the Amplitude in cloud-mode.',
default: {
'@path': '$.properties.revenue'
}
},
productId: {
label: 'Product ID',
type: 'string',
description: 'An identifier for the item purchased. You must send a price and quantity or revenue with this field.',
default: {
'@path': '$.properties.productId'
}
},
revenueType: {
label: 'Revenue Type',
type: 'string',
description:
'The type of revenue for the item purchased. You must send a price and quantity or revenue with this field.',
default: {
'@path': '$.properties.revenueType'
}
},
location_lat: {
label: 'Latitude',
type: 'number',
description: 'The current Latitude of the user.',
default: {
'@path': '$.context.location.latitude'
}
},
location_lng: {
label: 'Longtitude',
type: 'number',
description: 'The current Longitude of the user.',
default: {
'@path': '$.context.location.longitude'
}
},
ip: {
label: 'IP Address',
type: 'string',
description:
'The IP address of the user. Use "$remote" to use the IP address on the upload request. Amplitude will use the IP address to reverse lookup a user\'s location (city, country, region, and DMA). Amplitude has the ability to drop the location and IP address from events once it reaches our servers. You can submit a request to Amplitude\'s platform specialist team here to configure this for you.',
default: {
'@path': '$.context.ip'
}
},
idfa: {
label: 'Identifier For Advertiser (IDFA)',
type: 'string',
description: 'Identifier for Advertiser. _(iOS)_',
default: {
'@if': {
exists: { '@path': '$.context.device.advertisingId' },
then: { '@path': '$.context.device.advertisingId' },
else: { '@path': '$.context.device.idfa' }
}
}
},
idfv: {
label: 'Identifier For Vendor (IDFV)',
type: 'string',
description: 'Identifier for Vendor. _(iOS)_',
default: {
'@path': '$.context.device.id'
}
},
adid: {
label: 'Google Play Services Advertising ID',
type: 'string',
description: 'Google Play Services advertising ID. _(Android)_',
default: {
'@if': {
exists: { '@path': '$.context.device.advertisingId' },
then: { '@path': '$.context.device.advertisingId' },
else: { '@path': '$.context.device.idfa' }
}
}
},
android_id: {
label: 'Android ID',
type: 'string',
description: 'Android ID (not the advertising ID). _(Android)_'
},
event_id: {
label: 'Event ID',
type: 'integer',
description:
'An incrementing counter to distinguish events with the same user ID and timestamp from each other. Amplitude recommends you send an event ID, increasing over time, especially if you expect events to occur simultanenously.'
},
insert_id: {
label: 'Insert ID',
type: 'string',
description:
'Amplitude will deduplicate subsequent events sent with this ID we have already seen before within the past 7 days. Amplitude recommends generating a UUID or using some combination of device ID, user ID, event type, event ID, and time.'
}
}
|
#!/bin/bash
curl --data "sub_id=$1" https://www.sinya.com.tw/diy/show_option/ |& sed 's/<i>/###/g'|grep -o 'title=[^#]*###'|sed 's/<[^>]*>/ /g;s/^[^>]*>//;s/###//;s/[ ]\+/ /g;s/^ //'|wc -l|sed "s/^/$1 -> /"
#curl -vs https://www.sinya.com.tw/diy/|& sed '/OPTION/!d;s/[^>]*>//;/OPTION value/d;/OPTGROUP/d;'
|
<reponame>nanagirl0720/pinggu_yj<gh_stars>1-10
package com.ruoyi.project.system.spdeptXm.controller;
import java.io.InputStream;
import java.util.List;
import javax.servlet.http.HttpSession;
import org.apache.shiro.authz.annotation.RequiresPermissions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.ModelMap;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.multipart.MultipartFile;
import com.ruoyi.common.utils.poi.ExcelUtil;
import com.ruoyi.framework.aspectj.lang.annotation.Log;
import com.ruoyi.framework.aspectj.lang.enums.BusinessType;
import com.ruoyi.framework.web.controller.BaseController;
import com.ruoyi.framework.web.domain.AjaxResult;
import com.ruoyi.framework.web.page.TableDataInfo;
import com.ruoyi.project.system.dept.service.IDeptService;
import com.ruoyi.project.system.spdeptXm.domain.SpdeptXm;
import com.ruoyi.project.system.spdeptXm.service.ISpdeptXmService;
/**
* 特殊科室操作项目 信息操作处理
*
* @author panda
* @date 2018-12-18
*/
@Controller
@RequestMapping("/system/spdeptXm")
public class SpdeptXmController extends BaseController
{
private String prefix = "system/spdeptXm";
@Autowired
private ISpdeptXmService spdeptXmService;
@Autowired
private IDeptService deptService;
@RequiresPermissions("system:spdeptXm:view")
@GetMapping()
public String spdeptXm(HttpSession session)
{
session.setAttribute("list", deptService.selectDeptAll());
return prefix + "/spdeptXm";
}
/**
* 查询特殊科室操作项目列表
*/
@RequiresPermissions("system:spdeptXm:list")
@PostMapping("/list")
@ResponseBody
public TableDataInfo list(SpdeptXm spdeptXm)
{
startPage();
List<SpdeptXm> list = spdeptXmService.selectSpdeptXmList(spdeptXm);
return getDataTable(list);
}
/**
* 导出特殊科室操作项目列表
*/
@RequiresPermissions("system:spdeptXm:export")
@PostMapping("/export")
@ResponseBody
public AjaxResult export(SpdeptXm spdeptXm)
{
List<SpdeptXm> list = spdeptXmService.selectSpdeptXmList(spdeptXm);
ExcelUtil<SpdeptXm> util = new ExcelUtil<SpdeptXm>(SpdeptXm.class);
return util.exportExcel(list, "spdeptXm");
}
/**
* 新增特殊科室操作项目
*/
@GetMapping("/add")
public String add()
{
return prefix + "/add";
}
/**
* 新增保存特殊科室操作项目
*/
@RequiresPermissions("system:spdeptXm:add")
@Log(title = "特殊科室操作项目", businessType = BusinessType.INSERT)
@PostMapping("/add")
@ResponseBody
public AjaxResult addSave(SpdeptXm spdeptXm)
{
return toAjax(spdeptXmService.insertSpdeptXm(spdeptXm));
}
/**
* 导入特殊科室操作项目
*/
@GetMapping("/importexcel")
public String importexcel()
{
return prefix + "/importexcel";
}
/**
* 导入保存特殊科室操作项目
* @throws Exception
*/
@RequiresPermissions("system:spdeptXm:importexcel")
@Log(title = "导入特殊科室操作项目", businessType = BusinessType.INSERT)
@PostMapping("/importexcel")
@ResponseBody
public List<SpdeptXm> importexcel(@RequestParam("file") MultipartFile file) throws Exception
{
InputStream input = file.getInputStream();
ExcelUtil<SpdeptXm> util = new ExcelUtil<SpdeptXm>(SpdeptXm.class);
List<SpdeptXm> xmlist = util.importExcel(input);
return xmlist;
}
/**
* 修改特殊科室操作项目
*/
@GetMapping("/edit/{id}")
public String edit(@PathVariable("id") Integer id, ModelMap mmap)
{
SpdeptXm spdeptXm = spdeptXmService.selectSpdeptXmById(id);
mmap.put("spdeptXm", spdeptXm);
return prefix + "/edit";
}
/**
* 修改保存特殊科室操作项目
*/
@RequiresPermissions("system:spdeptXm:edit")
@Log(title = "特殊科室操作项目", businessType = BusinessType.UPDATE)
@PostMapping("/edit")
@ResponseBody
public AjaxResult editSave(SpdeptXm spdeptXm)
{
return toAjax(spdeptXmService.updateSpdeptXm(spdeptXm));
}
/**
* 删除特殊科室操作项目
*/
@RequiresPermissions("system:spdeptXm:remove")
@Log(title = "特殊科室操作项目", businessType = BusinessType.DELETE)
@PostMapping( "/remove")
@ResponseBody
public AjaxResult remove(String ids)
{
return toAjax(spdeptXmService.deleteSpdeptXmByIds(ids));
}
/**
* 校验项目名称
*/
@PostMapping("/checkSpdeptXmNameUnique")
@ResponseBody
public String checkSpdeptXmNameUnique(SpdeptXm spdeptXm)
{
return spdeptXmService.checkSpdeptXmNameUnique(spdeptXm);
}
/**
* 校验项目编码
*/
@PostMapping("/checkSpdeptXmCodeUnique")
@ResponseBody
public String checkPostCodeUnique(SpdeptXm spdeptXm)
{
return spdeptXmService.checkSpdeptXmCodeUnique(spdeptXm);
}
}
|
import os
import imp
import traceback
import re
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
STACKS_DIR = os.path.join(SCRIPT_DIR, '../../../stacks/')
PARENT_FILE = os.path.abspath(os.path.join(STACKS_DIR, 'service_advisor.py'))
try:
with open(PARENT_FILE, 'rb') as fp:
service_advisor = imp.load_module('service_advisor', fp, PARENT_FILE, ('.py', 'rb', imp.PY_SOURCE))
except Exception as e:
traceback.print_exc()
print "Failed to load parent"
SOLR_CONFIG_ENV = "solr-config-env"
SOLR_CLOUD = "solr-cloud"
SOLR_HDFS = "solr-hdfs"
SOLR_SSL = "solr-ssl"
EXAMPLE_COLLECTION = "example-collection"
PROPERTIES = "properties"
class SOLR552ServiceAdvisor(service_advisor.ServiceAdvisor):
def is_not_null_or_empty(self, property_value):
if property_value is None:
return self.getErrorItem("Value cannot be null or empty")
return None
def is_absolute_path(self, property_value):
status = self.is_not_null_or_empty(property_value)
if status is not None:
return status
if not os.path.isabs(property_value):
return self.getErrorItem("An absolute path must be used")
return None
def to_number(self, property_value):
try:
return int(re.sub("\D", "", property_value))
except ValueError:
return None
def is_number(self, property_value):
status = self.is_not_null_or_empty(property_value)
if status is not None:
return status
value = self.to_number(property_value)
if value is None:
return self.getErrorItem("Value should be an integer")
return None
def is_boolean(self, property_value):
status = self.is_not_null_or_empty(property_value)
if status is not None:
return status
value = str(property_value).lower()
if value == "true" or value == "false":
return None
return self.getErrorItem("Value should be true or false")
def is_memory_format(self, property_value):
status = self.is_not_null_or_empty(property_value)
if status is not None:
return status
string_pattern = "^[0-9]+(m|g)$"
pattern = re.compile(string_pattern)
if pattern.match(property_value):
return None
return self.getErrorItem("Invalid JMX value, valid pattern: {0}".format(string_pattern))
def is_valid_path(self, property_value):
status = self.is_not_null_or_empty(property_value)
if status is not None:
return status
if not property_value.startswith('/'):
return self.getErrorItem("Path must start with '/'")
if property_value.endswith('/'):
return self.getErrorItem("Path cannot end with '/'")
return None
def validator_entry(self, config_name, validator, properties):
return {
"config-name": config_name,
"item": validator(properties[config_name])
}
def validate_solr_configuration(self):
items = [
self.validator_entry('solr_config_port', self.is_number, self.solr_config_properties),
self.validator_entry('solr_config_memory', self.is_memory_format, self.solr_config_properties),
self.validator_entry('solr_config_conf_dir', self.is_absolute_path, self.solr_config_properties),
self.validator_entry('solr_config_data_dir', self.is_absolute_path, self.solr_config_properties),
self.validator_entry('solr_config_pid_dir', self.is_absolute_path, self.solr_config_properties),
self.validator_entry('solr_config_log_dir', self.is_absolute_path, self.solr_config_properties),
self.validator_entry('solr_config_service_log_dir', self.is_absolute_path, self.solr_config_properties)
]
return self.stackAdvisor.toConfigurationValidationProblems(items, SOLR_CONFIG_ENV)
def validate_solr_cloud_configuration(self):
items = [] if "false" in self.solr_cloud_properties["solr_cloud_enable"] else \
[
self.validator_entry('solr_cloud_enable', self.is_boolean, self.solr_cloud_properties),
self.validator_entry('solr_cloud_zk_directory', self.is_valid_path, self.solr_cloud_properties),
]
return self.stackAdvisor.toConfigurationValidationProblems(items, SOLR_CLOUD)
def validate_solr_hdfs_configuration(self):
items = [] if "false" in self.solr_hdfs_properties["solr_hdfs_enable"] else \
[
self.validator_entry('solr_hdfs_enable', self.is_boolean, self.solr_hdfs_properties),
self.validator_entry('solr_hdfs_directory', self.is_valid_path, self.solr_hdfs_properties),
]
return self.stackAdvisor.toConfigurationValidationProblems(items, SOLR_HDFS)
def validate_solr_ssl_configuration(self):
items = [] if "false" in self.solr_ssl_properties["solr_ssl_enable"] else \
[
self.validator_entry('solr_ssl_enable', self.is_boolean, self.solr_ssl_properties),
self.validator_entry('solr_ssl_key_store', self.is_absolute_path, self.solr_ssl_properties),
self.validator_entry('solr_ssl_key_store_password', self.is_not_null_or_empty,
self.solr_ssl_properties),
self.validator_entry('solr_ssl_trust_store', self.is_absolute_path, self.solr_ssl_properties),
self.validator_entry('solr_ssl_trust_store_password', self.is_not_null_or_empty,
self.solr_ssl_properties),
self.validator_entry('solr_ssl_need_client_auth', self.is_boolean, self.solr_ssl_properties),
self.validator_entry('solr_ssl_want_client_auth', self.is_boolean, self.solr_ssl_properties)
]
return self.stackAdvisor.toConfigurationValidationProblems(items, SOLR_SSL)
def validate_example_collection_configuration(self):
items = [] if "false" in self.example_collection_properties["solr_collection_sample_create"] else \
[
self.validator_entry('solr_collection_sample_create', self.is_boolean,
self.example_collection_properties),
self.validator_entry('solr_collection_sample_name', self.is_not_null_or_empty,
self.example_collection_properties),
self.validator_entry('solr_collection_sample_config_directory', self.is_not_null_or_empty,
self.example_collection_properties),
self.validator_entry('solr_collection_sample_shards', self.is_number,
self.example_collection_properties),
self.validator_entry('solr_collection_sample_replicas', self.is_number,
self.example_collection_properties)
]
return self.stackAdvisor.toConfigurationValidationProblems(items, EXAMPLE_COLLECTION)
def getConfigurationsValidationItems(self, stackAdvisor, configurations, recommended_defaults, services, hosts):
if not SOLR_CONFIG_ENV in configurations:
return []
self.stackAdvisor = stackAdvisor
self.solr_config_properties = configurations[SOLR_CONFIG_ENV][PROPERTIES]
self.solr_cloud_properties = configurations[SOLR_CLOUD][PROPERTIES]
self.solr_hdfs_properties = configurations[SOLR_HDFS][PROPERTIES]
self.solr_ssl_properties = configurations[SOLR_SSL][PROPERTIES]
self.example_collection_properties = configurations[EXAMPLE_COLLECTION][PROPERTIES]
return self.validate_solr_configuration() + self.validate_solr_cloud_configuration() + \
self.validate_solr_hdfs_configuration() + self.validate_solr_ssl_configuration() + \
self.validate_example_collection_configuration()
|
<filename>test/data/app/view/details/Details.js
/*globals Ext:false*/
Ext.define('MyApp.view.details.Details', {
extend: 'Ext.window.Window'
});
|
class Customer:
def __init__(self, name, phone, address):
self.name = name
self.phone = phone
self.address = address
class CustomerRecord:
def __init__(self):
self.records = []
def add_record(self, customer):
self.records.append(customer)
def get_record(self, idx):
return self.records[idx]
|
<reponame>S96EA/spring-boot-demo
package com.xkcoding.dubbo.provider.info.service;
import com.alibaba.dubbo.config.annotation.Service;
import com.xkcoding.dubbo.common.service.InfoService;
import org.springframework.stereotype.Component;
@Component
@Service
public class InfoServiceImpl implements InfoService {
@Override
public String getInfo(String name) {
return "my name is " + name;
}
}
|
// Load modules
const express = require('express');
const router = express.Router();
const database = require('../database');
// Get list of items
router.get('/items', (req, res) => {
const sql = 'SELECT * FROM items';
database.query(sql, (err, results) => {
if (err) throw err;
res.json(results);
});
});
// Get info for a specific item
router.get('/items/:id', (req, res) => {
const sql = 'SELECT * FROM items WHERE id = ?';
const params = [req.params.id];
database.query(sql, params, (err, results) => {
if (err) throw err;
res.json(results);
});
});
// Create an item
router.post('/items/create', (req, res) => {
const sql = 'INSERT INTO items (description) VALUES (?)';
const params = [req.body.description];
database.query(sql, params, (err, results) => {
if (err) throw err;
res.sendStatus(201);
});
});
// Update an item
router.put('/items/update/:id', (req, res) => {
const sql = 'UPDATE items SET description = ? WHERE id = ?';
const params = [req.body.description, req.params.id];
database.query(sql, params, (err, results) => {
if (err) throw err;
res.sendStatus(200);
});
});
// Delete an item
router.delete('/items/delete/:id', (req, res) => {
const sql = 'DELETE FROM items WHERE id = ?';
const params = [req.params.id];
database.query(sql, params, (err, results) => {
if (err) throw err;
res.sendStatus(200);
});
});
module.exports = router;
|
echo "= Init create data"
echo "= Clear all data"
curl -XDELETE -u use:pass http://localhost:11222/rest/v2/caches/users
curl -XDELETE -u use:pass http://localhost:11222/rest/v2/caches/people
curl -XDELETE -u use:pass http://localhost:11222/rest/v2/caches/example
curl -XDELETE -u use:pass http://localhost:11222/rest/v2/caches/local_users
curl -XDELETE -u use:pass http://localhost:11222/rest/v2/caches/invalidated
curl -XDELETE -u use:pass http://localhost:11222/rest/v2/caches/invalidated-sync
curl -XDELETE -u use:pass http://localhost:11222/rest/v2/caches/xsiteCache
curl -XDELETE -u use:pass http://localhost:11222/rest/v2/caches/xsite-transactional
curl -XDELETE -u use:pass http://localhost:11222/rest/v2/counters/weak1
curl -XDELETE -u use:pass http://localhost:11222/rest/v2/counters/strong1
echo "= Create caches"
curl -XPOST -u use:pass http://localhost:11222/rest/v2/caches/users?template=org.infinispan.DIST_ASYNC
curl -XPOST -u use:pass http://localhost:11222/rest/v2/caches/people?template=org.infinispan.REPL_ASYNC
curl -XPOST -u use:pass http://localhost:11222/rest/v2/caches/example?template=org.infinispan.DIST_SYNC
curl -XPOST -u use:pass http://localhost:11222/rest/v2/caches/local_users?template=org.infinispan.LOCAL
curl -XPOST -u use:pass http://localhost:11222/rest/v2/caches/invalidated?template=org.infinispan.INVALIDATION_ASYNC
curl -XPOST -u use:pass http://localhost:11222/rest/v2/caches/invalidated-sync?template=org.infinispan.INVALIDATION_SYNC
curl -XPOST -u use:pass -H "Content-Type: application/xml" -d "@xsiteCache.xml" http://localhost:11222/rest/v2/caches/xsiteCache
curl -XPOST -u use:pass -H "Content-Type: application/xml" -d "@xsiteCache-transactional.xml" http://localhost:11222/rest/v2/caches/xsite-transactional
echo "= Put 1000 entries in people cache"
for i in {1..1000}
do
URL="http://localhost:11222/rest/v2/caches/people/$i"
DATA="data-$i"
curl -XPOST -u use:pass -d $DATA $URL
done
echo "= Create some counters"
curl -XPOST -u use:pass -H "Content-Type: application/json" -d "@weakCounter.json" http://localhost:11222/rest/v2/counters/weak1
curl -XPOST -u use:pass -H "Content-Type: application/json" -d "@strongCounter.json" http://localhost:11222/rest/v2/counters/strong1
echo "= End"
|
class ConnectFourAgent:
def __init__(self):
self.action_space = None
self.board_state = None
def create_action_space(self, board_state):
action_space = set()
for col_index in range(7):
# check if the column hasn't filled up yet
if board_state[5][col_index] == 0:
action_space.add(col_index)
# assign the action space
self.action_space = action_space
def find_move(self, board_state):
best_move = 0
best_score = float('-inf')
# create possible action space if it doesn't exist
if self.action_space is None:
self.create_action_space(board_state)
# iterate through the action space
possible_moves = list(self.action_space)
for move in possible_moves:
board_copy = board_state.copy()
make_move(board_copy, move)
# calculate the score of the move
score = evaluate_move(board_copy)
if score > best_score:
best_score = score
best_move = move
return best_move
|
package com.packtpub.yummy.users.config;
import com.packtpub.yummy.users.service.UserService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.context.event.ApplicationReadyEvent;
import org.springframework.context.ApplicationListener;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.security.core.userdetails.User;
import org.springframework.stereotype.Component;
import java.util.Arrays;
@Component
public class DataInitialization implements ApplicationListener<ApplicationReadyEvent> {
private static final Logger LOG = LoggerFactory.getLogger(DataInitialization.class);
@Autowired
UserService userService;
@Override
public void onApplicationEvent(ApplicationReadyEvent event) {
if (!userService.hasUser("admin")) {
userService.addUser(new User("admin", "password",
Arrays.asList(
new SimpleGrantedAuthority("ROLE_ADMIN"),
new SimpleGrantedAuthority("ROLE_USER")
)));
}
if (!userService.hasUser("user")) {
userService.addUser(new User("user", "password",
Arrays.asList(
new SimpleGrantedAuthority("ROLE_USER")
)));
}
}
}
|
const utilMixin = {
computed: {
showLoader: {
get () {
return this.$store.state.showLoader
},
set (value) { }
},
showNotification: {
get () {
return this.$store.state.showNotification
},
set (value) { }
},
showPlayer: {
get () {
return this.$store.state.showPlayer
},
set (value) { }
}
},
methods: {
showNotificationAction (isError, text) {
this.hideLoaderAction()
this.$store.commit('setShowNotification', {showNotification: true,
notificationIsError: isError,
notificationText: text})
},
hideNotificationAction () {
this.$store.commit('setShowNotification', {showNotification: false})
},
showLoaderAction () {
this.hideNotificationAction()
this.$store.commit('setShowLoader', true)
},
hideLoaderAction () {
this.$store.commit('setShowLoader', false)
},
showPlayerAction () {
this.$store.commit('setShowPlayer', true)
},
hidePlayerAction () {
this.$store.commit('setShowPlayer', false)
}
}
}
export default utilMixin
|
def is_prime(num):
# Return True if given number is prime; false otherwise
for i in range(2, num):
if num % i == 0:
return False
return True
def generate_prime_numbers(n):
prime_list = []
number = 2
while len(prime_list) < n:
if is_prime(number):
prime_list.append(number)
number += 1
return prime_list
n = 8
print(generate_prime_numbers(n))
# Output: [2, 3, 5, 7, 11, 13, 17, 19]
|
package org.prebid.server.privacy.gdpr.vendorlist.proto;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonValue;
import java.util.Arrays;
public enum Purpose {
ONE(1),
TWO(2),
THREE(3),
FOUR(4),
FIVE(5),
SIX(6),
SEVEN(7),
EIGHT(8),
NINE(9),
TEN(10),
UNKNOWN(0);
@JsonValue
private final int code;
Purpose(int code) {
this.code = code;
}
public int code() {
return code;
}
@JsonCreator
public static Purpose valueOf(int code) {
return Arrays.stream(values())
.filter(purpose -> purpose.code == code)
.findFirst()
.orElse(UNKNOWN);
}
}
|
#include "shift.h"
void shift_sll8(char *line, int size)
{
if (!line)
return;
if (size > 8)
{
for (int i = size - 9; i >= 0; --i)
{
line[i + 8] = line[i];
line[i] = 0;
}
}
else
{
for (int i = 0; i < size; ++i)
{
line[i] = 0;
}
}
}
void shift_sra1(char *line, int size)
{
if (!line)
return;
for (int i = 0; i < size - 1; ++i)
{
line[i] = line[i + 1];
}
}
|
#!/bin/bash
function disable {
mv $1 $1.back
ln -s /bin/true $1
}
function enable {
if [ -L $1 ]
then
mv $1.back $1
else
# No longer a symbolic link, must have been overwritten
rm -f $1.back
fi
}
function run_in_chroot {
local chroot=$1
local script=$2
# Disable daemon startup
disable $chroot/sbin/initctl
disable $chroot/usr/sbin/invoke-rc.d
unshare -m $SHELL <<EOF
mkdir -p $chroot/dev
mount -n --bind /dev $chroot/dev
mount -n --bind /dev/pts $chroot/dev/pts
mkdir -p $chroot/proc
mount -n -t proc proc $chroot/proc
chroot $chroot env -i $(cat $chroot/etc/environment) http_proxy=${http_proxy:-} bash -e -c "$script"
EOF
# Enable daemon startup
enable $chroot/sbin/initctl
enable $chroot/usr/sbin/invoke-rc.d
}
|
public static int maxAdjacentProduct(int[] array){
int max_prod = 0;
for (int i = 0; i < array.length-1; i++){
int prod = array[i] * array[i+1];
max_prod = Math.max(max_prod, prod);
}
return max_prod;
}
|
import { ExtensionPriority } from '@remirror/core-constants';
import { isNumber, isString, uniqueArray, uniqueId } from '@remirror/core-helpers';
import type {
AcceptUndefined,
CommandFunction,
CommandFunctionProps,
EditorState,
EditorView,
FromToProps,
Handler,
MakeRequired,
Static,
Transaction,
} from '@remirror/core-types';
import { findNodeAtPosition, isNodeSelection } from '@remirror/core-utils';
import { Decoration, DecorationSet } from '@remirror/pm/view';
import { DelayedCommand, DelayedPromiseCreator } from '../commands';
import { extension, Helper, PlainExtension } from '../extension';
import type { CreateExtensionPlugin } from '../types';
import { command, helper } from './builtin-decorators';
export interface DecorationsOptions {
/**
* This setting is for adding a decoration to the selected text and can be
* used to preserve the marker for the selection when the editor loses focus.
*
* You can set it as `'selection'` to match the default styles provided by
* `@remirror/styles`.
*
* @default undefined
*/
persistentSelectionClass?: AcceptUndefined<string | boolean>;
/**
* Add custom decorations to the editor via `extension.addHandler`. This can
* be used via the `useDecorations` hook available from `remirror/react`.
*/
decorations: Handler<(state: EditorState) => DecorationSet>;
/**
* The className that is added to all placeholder positions
*
* '@default 'placeholder'
*/
placeholderClassName?: Static<string>;
/**
* The default element that is used for all placeholders.
*
* @default 'span'
*/
placeholderNodeName?: Static<string>;
}
/**
* Simplify the process of adding decorations to the editor. All the decorations
* added to the document this way are automatically tracked which allows for
* custom components to be nested inside decorations.
*
* @category Builtin Extension
*/
@extension<DecorationsOptions>({
defaultOptions: {
persistentSelectionClass: undefined,
placeholderClassName: 'placeholder',
placeholderNodeName: 'span',
},
staticKeys: ['placeholderClassName', 'placeholderNodeName'],
handlerKeys: ['decorations'],
handlerKeyOptions: {
decorations: {
reducer: {
accumulator: (accumulated, latestValue, state) => {
return accumulated.add(state.doc, latestValue.find());
},
getDefault: () => DecorationSet.empty,
},
},
},
defaultPriority: ExtensionPriority.Low,
})
export class DecorationsExtension extends PlainExtension<DecorationsOptions> {
get name() {
return 'decorations' as const;
}
/**
* The placeholder decorations.
*/
private placeholders = DecorationSet.empty;
/**
* A map of the html elements to their decorations.
*/
private readonly placeholderWidgets = new Map<unknown, Decoration>();
onCreate(): void {
this.store.setExtensionStore('createPlaceholderCommand', this.createPlaceholderCommand);
}
/**
* Create the extension plugin for inserting decorations into the editor.
*/
createPlugin(): CreateExtensionPlugin {
return {
state: {
init: () => {},
apply: (tr) => {
// Get tracker updates from the meta data
const { added, clearTrackers, removed, updated } = this.getMeta(tr);
if (clearTrackers) {
this.placeholders = DecorationSet.empty;
for (const [, widget] of this.placeholderWidgets) {
widget.spec.onDestroy?.(this.store.view, widget.spec.element);
}
this.placeholderWidgets.clear();
return;
}
this.placeholders = this.placeholders.map(tr.mapping, tr.doc, {
onRemove: (spec) => {
// Remove any removed widgets.
const widget = this.placeholderWidgets.get(spec.id);
if (widget) {
widget.spec.onDestroy?.(this.store.view, widget.spec.element);
}
},
});
for (const [, widget] of this.placeholderWidgets) {
widget.spec.onUpdate?.(
this.store.view,
widget.from,
widget.spec.element,
widget.spec.data,
);
}
// Update the decorations with any added position trackers.
for (const placeholder of added) {
if (placeholder.type === 'inline') {
this.addInlinePlaceholder(placeholder as WithBase<InlinePlaceholder>, tr);
continue;
}
if (placeholder.type === 'node') {
this.addNodePlaceholder(placeholder as WithBase<NodePlaceholder>, tr);
continue;
}
if (placeholder.type === 'widget') {
this.addWidgetPlaceholder(placeholder as WithBase<WidgetPlaceholder>, tr);
continue;
}
}
for (const { id, data } of updated) {
const widget = this.placeholderWidgets.get(id);
// Only support updating widget decorations.
if (!widget) {
continue;
}
const updatedWidget = Decoration.widget(widget.from, widget.spec.element, {
...widget.spec,
data,
});
this.placeholders = this.placeholders.remove([widget]).add(tr.doc, [updatedWidget]);
this.placeholderWidgets.set(id, updatedWidget);
}
for (const id of removed) {
const found = this.placeholders.find(
undefined,
undefined,
(spec) => spec.id === id && spec.__type === __type,
);
const widget = this.placeholderWidgets.get(id);
if (widget) {
widget.spec.onDestroy?.(this.store.view, widget.spec.element);
}
this.placeholders = this.placeholders.remove(found);
this.placeholderWidgets.delete(id);
}
},
},
props: {
decorations: (state) => {
let decorationSet = this.options.decorations(state);
decorationSet = decorationSet.add(state.doc, this.placeholders.find());
for (const extension of this.store.extensions) {
// Skip this extension when the method doesn't exist.
if (!extension.createDecorations) {
continue;
}
const decorations = extension.createDecorations(state).find();
decorationSet = decorationSet.add(state.doc, decorations);
}
return decorationSet;
},
handleDOMEvents: {
// Dispatch a transaction for focus/blur events so that the editor state
// can be refreshed.
//
// https://discuss.prosemirror.net/t/handling-focus-in-plugins/1981/2
blur: (view) => {
if (this.options.persistentSelectionClass) {
view.dispatch(view.state.tr.setMeta(persistentSelectionFocusKey, false));
}
return false;
},
focus: (view) => {
if (this.options.persistentSelectionClass) {
view.dispatch(view.state.tr.setMeta(persistentSelectionFocusKey, true));
}
return false;
},
},
},
};
}
@command()
updateDecorations(): CommandFunction {
return ({ tr, dispatch }) => (dispatch?.(tr), true);
}
/**
* Command to dispatch a transaction adding the placeholder decoration to
* be tracked.
*
* @param id - the value that is used to identify this tracker. This can
* be any value. A promise, a function call, a string.
* @param options - the options to call the tracked position with. You can
* specify the range `{ from: number; to: number }` as well as the class
* name.
*/
@command()
addPlaceholder(
id: unknown,
placeholder: DecorationPlaceholder,
deleteSelection?: boolean,
): CommandFunction {
return ({ dispatch, tr }) => {
return this.addPlaceholderTransaction(id, placeholder, tr, !dispatch)
? (dispatch?.(deleteSelection ? tr.deleteSelection() : tr), true)
: false;
};
}
/**
* A command to updated the placeholder decoration.
*
* To update multiple placeholders you can use chained commands.
*
* ```ts
* let idsWithData: Array<{id: unknown, data: number}>;
*
* for (const { id, data } of idsWithData) {
* chain.updatePlaceholder(id, data);
* }
*
* chain.run();
* ```
*/
@command()
updatePlaceholder<Data = any>(id: unknown, data: Data): CommandFunction {
return ({ dispatch, tr }) => {
return this.updatePlaceholderTransaction({ id, data, tr, checkOnly: !dispatch })
? (dispatch?.(tr), true)
: false;
};
}
/**
* A command to remove the specified placeholder decoration.
*/
@command()
removePlaceholder(id: unknown): CommandFunction {
return ({ dispatch, tr }) => {
return this.removePlaceholderTransaction({ id, tr, checkOnly: !dispatch })
? (dispatch?.(tr), true)
: false;
};
}
/**
* A command to remove all active placeholder decorations.
*/
@command()
clearPlaceholders(): CommandFunction {
return ({ tr, dispatch }) => {
return this.clearPlaceholdersTransaction({ tr, checkOnly: !dispatch })
? (dispatch?.(tr), true)
: false;
};
}
/**
* Find the position for the tracker with the ID specified.
*
* @param id - the unique position id which can be any type
*/
@helper()
findPlaceholder(id: unknown): Helper<FromToProps | undefined> {
return this.findAllPlaceholders().get(id);
}
/**
* Find the positions of all the trackers in document.
*/
@helper()
findAllPlaceholders(): Helper<Map<unknown, FromToProps>> {
const trackers: Map<unknown, FromToProps> = new Map();
const found = this.placeholders.find(undefined, undefined, (spec) => spec.__type === __type);
for (const decoration of found) {
trackers.set(decoration.spec.id, { from: decoration.from, to: decoration.to });
}
return trackers;
}
/**
* Add some decorations based on the provided settings.
*/
createDecorations(state: EditorState): DecorationSet {
const { persistentSelectionClass } = this.options;
// Only show the selection decoration when the view doesn't have focus.
// Notice that we need to listen to the focus/blur DOM events to make
// it work since the focus state is not stored in `EditorState`.
if (
!persistentSelectionClass ||
this.store.view?.hasFocus() ||
this.store.helpers.isInteracting?.()
) {
return DecorationSet.empty;
}
// Add the selection decoration to the decorations array.
return generatePersistentSelectionDecorations(state, DecorationSet.empty, {
class: isString(persistentSelectionClass) ? persistentSelectionClass : 'selection',
});
}
/**
* This stores all tracked positions in the editor and maps them via the
* transaction updates.
*/
onApplyState(): void {}
/**
* Add a widget placeholder and track it as a widget placeholder.
*/
private addWidgetPlaceholder(placeholder: WithBase<WidgetPlaceholder>, tr: Transaction): void {
const { pos, createElement, onDestroy, onUpdate, className, nodeName, id, type } = placeholder;
const element = createElement?.(this.store.view, pos) ?? document.createElement(nodeName);
element.classList.add(className);
const decoration = Decoration.widget(pos, element, {
// @ts-expect-error: TS types here don't allow us to set custom properties
id,
__type,
type,
element,
onDestroy,
onUpdate,
});
this.placeholderWidgets.set(id, decoration);
this.placeholders = this.placeholders.add(tr.doc, [decoration]);
}
/**
* Add an inline placeholder.
*/
private addInlinePlaceholder(placeholder: WithBase<InlinePlaceholder>, tr: Transaction): void {
const {
from = tr.selection.from,
to = tr.selection.to,
className,
nodeName,
id,
type,
} = placeholder;
let decoration: Decoration;
if (from === to) {
// Add this as a widget if the range is empty.
const element = document.createElement(nodeName);
element.classList.add(className);
decoration = Decoration.widget(from, element, {
// @ts-expect-error: TS types here don't allow us to set custom properties
id,
type,
__type,
widget: element,
});
} else {
// Make this span across nodes if the range is not empty.
decoration = Decoration.inline(
from,
to,
{ nodeName, class: className },
{
// @ts-expect-error: TS types here don't allow us to set custom properties
id,
__type,
},
);
}
this.placeholders = this.placeholders.add(tr.doc, [decoration]);
}
/**
* Add a placeholder for nodes.
*/
private addNodePlaceholder(placeholder: WithBase<NodePlaceholder>, tr: Transaction): void {
const { pos, className, nodeName, id } = placeholder;
const $pos = isNumber(pos) ? tr.doc.resolve(pos) : tr.selection.$from;
const found = isNumber(pos)
? $pos.nodeAfter
? { pos, end: $pos.nodeAfter.nodeSize }
: undefined
: findNodeAtPosition($pos);
if (!found) {
return;
}
const decoration = Decoration.node(
found.pos,
found.end,
{ nodeName, class: className },
{ id, __type },
);
this.placeholders = this.placeholders.add(tr.doc, [decoration]);
}
/**
* Add the node and class name to the placeholder object.
*/
private withRequiredBase<Type extends BasePlaceholder>(
id: unknown,
placeholder: Type,
): WithBase<Type> {
const { placeholderNodeName, placeholderClassName } = this.options;
const { nodeName = placeholderNodeName, className, ...rest } = placeholder;
const classes = (className ? [placeholderClassName, className] : [placeholderClassName]).join(
' ',
);
return { nodeName, className: classes, ...rest, id };
}
/**
* Get the command metadata.
*/
private getMeta(tr: Transaction): Required<DecorationPlaceholderMeta> {
const meta = tr.getMeta(this.pluginKey) ?? {};
return { ...DEFAULT_PLACEHOLDER_META, ...meta };
}
/**
* Set the metadata for the command.
*/
private setMeta(tr: Transaction, update: DecorationPlaceholderMeta) {
const meta = this.getMeta(tr);
tr.setMeta(this.pluginKey, { ...meta, ...update });
}
/**
* Add a placeholder decoration with the specified params to the transaction
* and return the transaction.
*
* It is up to you to dispatch the transaction or you can just use the
* commands.
*/
private addPlaceholderTransaction(
id: unknown,
placeholder: DecorationPlaceholder,
tr: Transaction,
checkOnly = false,
): boolean {
const existingPosition = this.findPlaceholder(id);
if (existingPosition) {
return false;
}
if (checkOnly) {
return true;
}
const { added } = this.getMeta(tr);
this.setMeta(tr, {
added: [...added, this.withRequiredBase(id, placeholder)],
});
return true;
}
/**
* Update the data stored by a placeholder.
*
* This replaces the whole data value.
*/
private updatePlaceholderTransaction<Data = any>(props: {
id: unknown;
data: Data;
tr: Transaction;
checkOnly?: boolean;
}): boolean {
const { id, tr, checkOnly = false, data } = props;
const existingPosition = this.findPlaceholder(id);
if (!existingPosition) {
return false;
}
if (checkOnly) {
return true;
}
const { updated } = this.getMeta(tr);
this.setMeta(tr, { updated: uniqueArray([...updated, { id, data }]) });
return true;
}
/**
* Discards a previously defined tracker once not needed.
*
* This should be used to cleanup once the position is no longer needed.
*/
private removePlaceholderTransaction(props: {
id: unknown;
tr: Transaction;
checkOnly?: boolean;
}): boolean {
const { id, tr, checkOnly = false } = props;
const existingPosition = this.findPlaceholder(id);
if (!existingPosition) {
return false;
}
if (checkOnly) {
return true;
}
const { removed } = this.getMeta(tr);
this.setMeta(tr, { removed: uniqueArray([...removed, id]) });
return true;
}
/**
* This helper returns a transaction that clears all position trackers when
* any exist.
*
* Otherwise it returns undefined.
*/
private clearPlaceholdersTransaction(props: { tr: Transaction; checkOnly?: boolean }): boolean {
const { tr, checkOnly = false } = props;
const positionTrackerState = this.getPluginState();
if (positionTrackerState === DecorationSet.empty) {
return false;
}
if (checkOnly) {
return true;
}
this.setMeta(tr, { clearTrackers: true });
return true;
}
/**
* Handles delayed commands which rely on the
*/
private readonly createPlaceholderCommand = <Value>(
props: DelayedPlaceholderCommandProps<Value>,
): DelayedCommand<Value> => {
const id = uniqueId();
const { promise, placeholder, onFailure, onSuccess } = props;
return new DelayedCommand(promise)
.validate((props) => {
return this.addPlaceholder(id, placeholder)(props);
})
.success((props) => {
const { state, tr, dispatch, view, value } = props;
const range = this.store.helpers.findPlaceholder(id);
if (!range) {
const error = new Error('The placeholder has been removed');
return onFailure?.({ error, state, tr, dispatch, view }) ?? false;
}
this.removePlaceholder(id)({ state, tr, view, dispatch: () => {} });
return onSuccess(value, range, { state, tr, dispatch, view });
})
.failure((props) => {
this.removePlaceholder(id)({ ...props, dispatch: () => {} });
return onFailure?.(props) ?? false;
});
};
}
const DEFAULT_PLACEHOLDER_META: Required<DecorationPlaceholderMeta> = {
added: [],
updated: [],
clearTrackers: false,
removed: [],
};
const __type = 'placeholderDecoration';
const persistentSelectionFocusKey = 'persistentSelectionFocus';
export interface DecorationPlaceholderMeta {
/**
* The trackers to add.
*/
added?: Array<WithBase<DecorationPlaceholder>>;
/**
* The trackers to update with new data. Data is an object and is used to
* include properties like `progress` for progress indicators. Only `widget`
* decorations can be updated in this way.
*/
updated?: Array<{ id: unknown; data: any }>;
/**
* The trackers to remove.
*/
removed?: unknown[];
/**
* When set to true will delete all the active trackers.
*/
clearTrackers?: boolean;
}
interface BasePlaceholder {
/**
* A custom class name to use for the placeholder decoration. All the trackers
* will automatically be given the class name `remirror-tracker-position`
*
* @default ''
*/
className?: string;
/**
* A custom html element or string for a created element tag name.
*
* @default 'tracker'
*/
nodeName?: string;
}
interface DataProps<Data = any> {
/**
* The data to store for this placeholder.
*/
data?: Data;
}
interface InlinePlaceholder<Data = any>
extends BasePlaceholder,
Partial<FromToProps>,
DataProps<Data> {
type: 'inline';
}
interface NodePlaceholder<Data = any> extends BasePlaceholder, DataProps<Data> {
/**
* Set this as a node tracker.
*/
type: 'node';
/**
* If provided the The `pos` must be directly before the node in order to be
* valid. If not provided it will select the parent node of the current
* selection.
*/
pos: number | null;
}
export interface WidgetPlaceholder<Data = any> extends BasePlaceholder, DataProps<Data> {
/**
* Declare this as a widget tracker.
*
* Widget trackers support adding custom components to the created dom
* element.
*/
type: 'widget';
/**
* Widget trackers only support fixed positions.
*/
pos: number;
/**
* Called the first time this widget decoration is added to the dom.
*/
createElement?(view: EditorView, pos: number): HTMLElement;
/**
* Called whenever the position tracker updates with the new position.
*/
onUpdate?(view: EditorView, pos: number, element: HTMLElement, data: any): void;
/**
* Called when the widget decoration is removed from the dom.
*/
onDestroy?(view: EditorView, element: HTMLElement): void;
}
type WithBase<Type extends BasePlaceholder> = MakeRequired<Type, keyof BasePlaceholder> & {
id: unknown;
};
export type DecorationPlaceholder = WidgetPlaceholder | NodePlaceholder | InlinePlaceholder;
/**
* Generate the persistent selection decoration for when the editor loses focus.
*/
function generatePersistentSelectionDecorations(
state: EditorState,
decorationSet: DecorationSet,
attrs: { class: string },
): DecorationSet {
const { selection, doc } = state;
if (selection.empty) {
return decorationSet;
}
const { from, to } = selection;
const decoration = isNodeSelection(selection)
? Decoration.node(from, to, attrs)
: Decoration.inline(from, to, attrs);
return decorationSet.add(doc, [decoration]);
}
export interface DelayedPlaceholderCommandProps<Value> {
/**
* A function that returns a promise.
*/
promise: DelayedPromiseCreator<Value>;
/**
* The placeholder configuration.
*/
placeholder: DecorationPlaceholder;
/**
* Called when the promise succeeds and the placeholder still exists. If no
* placeholder can be found (for example, the user has deleted the entire
* document) then the failure handler is called instead.
*/
onSuccess: (value: Value, range: FromToProps, commandProps: CommandFunctionProps) => boolean;
/**
* Called when a failure is encountered.
*/
onFailure?: CommandFunction<{ error: any }>;
}
declare global {
namespace Remirror {
interface ExtensionStore {
/**
* Create delayed command which automatically adds a placeholder to the
* document while the delayed command is being run and also automatically
* removes it once it has completed.
*/
createPlaceholderCommand<Value = any>(
props: DelayedPlaceholderCommandProps<Value>,
): DelayedCommand<Value>;
}
interface BaseExtension {
/**
* Create a decoration set which adds decorations to your editor. The
* first parameter is the `EditorState`.
*
* This can be used in combination with the `onApplyState` handler which
* can map the decoration.
*
* @param state - the editor state which was passed in.
*/
createDecorations?(state: EditorState): DecorationSet;
}
interface AllExtensions {
decorations: DecorationsExtension;
}
}
}
|
<filename>src/components/Providers/TimeFrameSelectRow/index.js<gh_stars>10-100
export { default } from "./TimeFrameSelectRow";
|
package main
import (
"fmt"
"log"
"strconv"
"strings"
"github.com/dmies/adventOfGo/filehandler"
)
// BagAndCount is a struct that wraps a bag color and the number of bags of this kind, that can be contained within an other bag
type BagAndCount struct {
bag string
count int
}
// ParseAll parses the given rules and creates a map of color -> possible bags
func ParseAll(input []string) map[string][]BagAndCount {
result := make(map[string][]BagAndCount, 0)
for _, line := range input {
lineSplit := strings.Split(line, " bags contain ")
var contents []BagAndCount
if !strings.HasSuffix(line, "contain no other bags.") {
for _, bag := range strings.Split(lineSplit[1], ", ") {
bagName := strings.Join(strings.Split(bag, " ")[1:], " ")
bagName = strings.TrimSuffix(bagName, ".")
bagName = strings.TrimSuffix(bagName, " bag")
bagName = strings.TrimSuffix(bagName, " bags")
bagCount, _ := strconv.Atoi(strings.Split(bag, " ")[0])
bagStruct := BagAndCount{
bag: bagName,
count: bagCount,
}
contents = append(contents, bagStruct)
}
}
result[lineSplit[0]] = contents
}
return result
}
// FindBag checks if a bag of the given color (searched) can be found direct/indirect in the rules for the given key
func FindBag(searched string, rules map[string][]BagAndCount, key string) bool {
for _, bag := range rules[key] {
if bag.bag == searched {
return true
}
if FindBag(searched, rules, bag.bag) {
return true
}
}
return false
}
// CountBagsThatContainColor checks the rules and counts how many bag colors can eventually contain at least one bag with the given color?
func CountBagsThatContainColor(color string, rules map[string][]BagAndCount) (int, error) {
var result int
for name := range rules {
if FindBag(color, rules, name) {
result++
}
}
return result, nil
}
func countContainedBags(color string, rules map[string][]BagAndCount) int {
var result int
for _, bag := range rules[color] {
result += bag.count * countContainedBags(bag.bag, rules)
}
return result + 1
}
// GetNumberOfContainedBags counts how many individual bags are required inside the bag with the given color
func GetNumberOfContainedBags(color string, rules map[string][]BagAndCount) int {
return countContainedBags(color, rules) - 1
}
func main() {
ruleList, err := filehandler.ImportStringList("./input.txt")
if err != nil {
log.Fatal(err)
}
rules := ParseAll(ruleList)
solution1, err := CountBagsThatContainColor("shiny gold", rules)
if err != nil {
log.Fatal(err)
}
fmt.Printf("day 07, part1 %v\n", solution1)
solution2 := GetNumberOfContainedBags("shiny gold", rules)
fmt.Printf("day 07, part2 %v\n", solution2)
}
|
package com.algaworks.pedidovenda.model;
import java.io.Serializable;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.Table;
@Entity
@Table(name = "Fornecedores")
public class Fornecedor implements Serializable{
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue
private Integer id;
@Column(nullable = false)
private String nome;
private String apelido;
@Column
private String endereco;
@Column(nullable = false)
private String telefone;
private boolean isWhatsApp;
private String email;
private String cpfCnpj;
@Column(nullable = false)
private String categoria;
private String detalhes;
@Enumerated(EnumType.STRING)
@Column(nullable = false, length = 20,name="tipopessoa")
private TipoPessoa tipo;
public boolean isPessoaFisica(){
if(tipo == TipoPessoa.FISICA){
return true;
}
return false;
}
//get and set
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getNome() {
return nome;
}
public void setNome(String nome) {
this.nome = nome;
}
public String getEndereco() {
return endereco;
}
public void setEndereco(String endereco) {
this.endereco = endereco;
}
public String getTelefone() {
return telefone;
}
public void setTelefone(String telefone) {
this.telefone = telefone;
}
public boolean isWhatsApp() {
return isWhatsApp;
}
public void setWhatsApp(boolean isWhatsApp) {
this.isWhatsApp = isWhatsApp;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getCategoria() {
return categoria;
}
public void setCategoria(String categoria) {
this.categoria = categoria;
}
public String getDetalhes() {
return detalhes;
}
public void setDetalhes(String detalhes) {
this.detalhes = detalhes;
}
public String getCpfCnpj() {
return cpfCnpj;
}
public void setCpfCnpj(String cpfCnpj) {
this.cpfCnpj = cpfCnpj;
}
public TipoPessoa getTipo() {
return tipo;
}
public void setTipo(TipoPessoa tipo) {
this.tipo = tipo;
}
public String getApelido() {
return apelido;
}
public void setApelido(String apelido) {
this.apelido = apelido;
}
}
|
#include "lpcsnoop/snoop.hpp"
#include <sdbusplus/bus.hpp>
#include <sdbusplus/test/sdbus_mock.hpp>
#include <gmock/gmock.h>
#include <gtest/gtest.h>
using ::testing::_;
using ::testing::IsNull;
using ::testing::NiceMock;
using ::testing::Return;
using ::testing::StrEq;
namespace
{
// Fixture for testing class PostReporter
class PostReporterTest : public ::testing::Test
{
protected:
PostReporterTest() : bus_mock(), bus(sdbusplus::get_mocked_new(&bus_mock))
{
}
~PostReporterTest()
{
}
NiceMock<sdbusplus::SdBusMock> bus_mock;
sdbusplus::bus::bus bus;
};
TEST_F(PostReporterTest, EmitsObjectsOnExpectedDbusPath)
{
EXPECT_CALL(bus_mock,
sd_bus_emit_object_added(IsNull(), StrEq(SNOOP_OBJECTPATH)))
.WillOnce(Return(0));
PostReporter testReporter(bus, SNOOP_OBJECTPATH, true);
testReporter.emit_object_added();
}
TEST_F(PostReporterTest, AddsObjectWithExpectedName)
{
EXPECT_CALL(bus_mock,
sd_bus_add_object_vtable(IsNull(), _, StrEq(SNOOP_OBJECTPATH),
StrEq(SNOOP_BUSNAME), _, _))
.WillOnce(Return(0));
PostReporter testReporter(bus, SNOOP_OBJECTPATH, true);
}
TEST_F(PostReporterTest, ValueReadsDefaultToZero)
{
PostReporter testReporter(bus, SNOOP_OBJECTPATH, true);
EXPECT_EQ(0, std::get<primary_post_code_t>(testReporter.value()));
}
TEST_F(PostReporterTest, SetValueToPositiveValueWorks)
{
PostReporter testReporter(bus, SNOOP_OBJECTPATH, true);
secondary_post_code_t secondaryCode = {123, 124, 125};
testReporter.value(std::make_tuple(65537, secondaryCode));
EXPECT_EQ(65537, std::get<primary_post_code_t>(testReporter.value()));
EXPECT_EQ(secondaryCode,
std::get<secondary_post_code_t>(testReporter.value()));
}
TEST_F(PostReporterTest, SetValueMultipleTimesWorks)
{
PostReporter testReporter(bus, SNOOP_OBJECTPATH, true);
secondary_post_code_t secondaryCode = {10, 40, 0, 245, 56};
testReporter.value(std::make_tuple(123, secondaryCode));
EXPECT_EQ(123, std::get<primary_post_code_t>(testReporter.value()));
EXPECT_EQ(secondaryCode,
std::get<secondary_post_code_t>(testReporter.value()));
secondaryCode = {0, 0, 0, 0, 0};
testReporter.value(std::make_tuple(45, secondaryCode));
EXPECT_EQ(45, std::get<primary_post_code_t>(testReporter.value()));
EXPECT_EQ(secondaryCode,
std::get<secondary_post_code_t>(testReporter.value()));
secondaryCode = {23, 200, 0, 45, 2};
testReporter.value(std::make_tuple(0, secondaryCode));
EXPECT_EQ(0, std::get<primary_post_code_t>(testReporter.value()));
EXPECT_EQ(secondaryCode,
std::get<secondary_post_code_t>(testReporter.value()));
secondaryCode = {10, 40, 0, 35, 78};
testReporter.value(std::make_tuple(46, secondaryCode));
EXPECT_EQ(46, std::get<primary_post_code_t>(testReporter.value()));
EXPECT_EQ(secondaryCode,
std::get<secondary_post_code_t>(testReporter.value()));
secondaryCode = {10, 40, 0, 35, 78};
testReporter.value(std::make_tuple(46, secondaryCode));
EXPECT_EQ(46, std::get<primary_post_code_t>(testReporter.value()));
EXPECT_EQ(secondaryCode,
std::get<secondary_post_code_t>(testReporter.value()));
}
} // namespace
|
<reponame>xiaonanln/python-usaco
"""
ID: isaiahl1
LANG: PYTHON2
TASK: milk
"""
TASK = 'milk'
import operator
def readints(fin):
return tuple(int(x) for x in fin.readline().split())
def main(fin, fout):
N, M = map(int, fin.readline().strip().split())
print N, M
farmers = []
for i in xrange(M):
farmers.append( readints(fin) )
farmers.sort()
print farmers
cost = 0
for price, units in farmers:
buyUnits = min(units, N)
cost += buyUnits * price
N -= buyUnits
if N == 0:
break
print >>fout, cost
fin = open (TASK + '.in', 'r')
fout = open (TASK + '.out', 'w')
with fin:
with fout:
main(fin, fout)
|
<!DOCTYPE html>
<html>
<head>
<title>Text Preview</title>
<style>
body {
font-family: Arial;
font-size: 11pt;
}
#input {
margin: 10px;
padding: 10px;
width: 50%;
height: 300px;
border: 1px solid #ccc;
font-family: Arial;
font-size: 11pt;
}
#output {
margin: 10px;
padding: 10px;
width: 50%;
height: 300px;
border: 1px solid #ccc;
font-family: Arial;
font-size: 11pt;
}
</style>
</head>
<body>
<h1>Text Preview</h1>
<div>
<textarea id="input" onkeyup="updatePreview()"></textarea>
<div id="output"></div>
</div>
<script>
function updatePreview() {
const input = document.getElementById("input");
const output = document.getElementById("output");
output.innerHTML = input.value;
}
</script>
</body>
</html>
|
/*
* Copyright (c) 2016 QLogic Corporation.
* All rights reserved.
* www.qlogic.com
*
* See LICENSE.qede_pmd for copyright and licensing details.
*/
#ifndef __ECORE_HSI_INIT_TOOL__
#define __ECORE_HSI_INIT_TOOL__
/**************************************/
/* Init Tool HSI constants and macros */
/**************************************/
/* Width of GRC address in bits (addresses are specified in dwords) */
#define GRC_ADDR_BITS 23
#define MAX_GRC_ADDR ((1 << GRC_ADDR_BITS) - 1)
/* indicates an init that should be applied to any phase ID */
#define ANY_PHASE_ID 0xffff
/* Max size in dwords of a zipped array */
#define MAX_ZIPPED_SIZE 8192
enum init_modes {
MODE_BB_A0_DEPRECATED,
MODE_BB_B0,
MODE_K2,
MODE_ASIC,
MODE_EMUL_REDUCED,
MODE_EMUL_FULL,
MODE_FPGA,
MODE_CHIPSIM,
MODE_SF,
MODE_MF_SD,
MODE_MF_SI,
MODE_PORTS_PER_ENG_1,
MODE_PORTS_PER_ENG_2,
MODE_PORTS_PER_ENG_4,
MODE_100G,
MODE_E5,
MAX_INIT_MODES
};
enum init_phases {
PHASE_ENGINE,
PHASE_PORT,
PHASE_PF,
PHASE_VF,
PHASE_QM_PF,
MAX_INIT_PHASES
};
enum init_split_types {
SPLIT_TYPE_NONE,
SPLIT_TYPE_PORT,
SPLIT_TYPE_PF,
SPLIT_TYPE_PORT_PF,
SPLIT_TYPE_VF,
MAX_INIT_SPLIT_TYPES
};
struct fw_asserts_ram_section {
/* The offset of the section in the RAM in RAM lines (64-bit units) */
__le16 section_ram_line_offset;
/* The size of the section in RAM lines (64-bit units) */
__le16 section_ram_line_size;
/* The offset of the asserts list within the section in dwords */
u8 list_dword_offset;
/* The size of an assert list element in dwords */
u8 list_element_dword_size;
u8 list_num_elements /* The number of elements in the asserts list */;
/* The offset of the next list index field within the section in dwords */
u8 list_next_index_dword_offset;
};
struct fw_ver_num {
u8 major /* Firmware major version number */;
u8 minor /* Firmware minor version number */;
u8 rev /* Firmware revision version number */;
/* Firmware engineering version number (for bootleg versions) */
u8 eng;
};
struct fw_ver_info {
__le16 tools_ver /* Tools version number */;
u8 image_id /* FW image ID (e.g. main, l2b, kuku) */;
u8 reserved1;
struct fw_ver_num num /* FW version number */;
__le32 timestamp /* FW Timestamp in unix time (sec. since 1970) */;
__le32 reserved2;
};
struct fw_info {
struct fw_ver_info ver /* FW version information */;
/* Info regarding the FW asserts section in the Storm RAM */
struct fw_asserts_ram_section fw_asserts_section;
};
struct fw_info_location {
/* GRC address where the fw_info struct is located. */
__le32 grc_addr;
/* Size of the fw_info structure (thats located at the grc_addr). */
__le32 size;
};
/*
* Binary buffer header
*/
struct bin_buffer_hdr {
/* buffer offset in bytes from the beginning of the binary file */
__le32 offset;
__le32 length /* buffer length in bytes */;
};
/*
* binary init buffer types
*/
enum bin_init_buffer_type {
BIN_BUF_INIT_FW_VER_INFO /* fw_ver_info struct */,
BIN_BUF_INIT_CMD /* init commands */,
BIN_BUF_INIT_VAL /* init data */,
BIN_BUF_INIT_MODE_TREE /* init modes tree */,
BIN_BUF_INIT_IRO /* internal RAM offsets */,
MAX_BIN_INIT_BUFFER_TYPE
};
/*
* init array header: raw
*/
struct init_array_raw_hdr {
__le32 data;
/* Init array type, from init_array_types enum */
#define INIT_ARRAY_RAW_HDR_TYPE_MASK 0xF
#define INIT_ARRAY_RAW_HDR_TYPE_SHIFT 0
/* init array params */
#define INIT_ARRAY_RAW_HDR_PARAMS_MASK 0xFFFFFFF
#define INIT_ARRAY_RAW_HDR_PARAMS_SHIFT 4
};
/*
* init array header: standard
*/
struct init_array_standard_hdr {
__le32 data;
/* Init array type, from init_array_types enum */
#define INIT_ARRAY_STANDARD_HDR_TYPE_MASK 0xF
#define INIT_ARRAY_STANDARD_HDR_TYPE_SHIFT 0
/* Init array size (in dwords) */
#define INIT_ARRAY_STANDARD_HDR_SIZE_MASK 0xFFFFFFF
#define INIT_ARRAY_STANDARD_HDR_SIZE_SHIFT 4
};
/*
* init array header: zipped
*/
struct init_array_zipped_hdr {
__le32 data;
/* Init array type, from init_array_types enum */
#define INIT_ARRAY_ZIPPED_HDR_TYPE_MASK 0xF
#define INIT_ARRAY_ZIPPED_HDR_TYPE_SHIFT 0
/* Init array zipped size (in bytes) */
#define INIT_ARRAY_ZIPPED_HDR_ZIPPED_SIZE_MASK 0xFFFFFFF
#define INIT_ARRAY_ZIPPED_HDR_ZIPPED_SIZE_SHIFT 4
};
/*
* init array header: pattern
*/
struct init_array_pattern_hdr {
__le32 data;
/* Init array type, from init_array_types enum */
#define INIT_ARRAY_PATTERN_HDR_TYPE_MASK 0xF
#define INIT_ARRAY_PATTERN_HDR_TYPE_SHIFT 0
/* pattern size in dword */
#define INIT_ARRAY_PATTERN_HDR_PATTERN_SIZE_MASK 0xF
#define INIT_ARRAY_PATTERN_HDR_PATTERN_SIZE_SHIFT 4
/* pattern repetitions */
#define INIT_ARRAY_PATTERN_HDR_REPETITIONS_MASK 0xFFFFFF
#define INIT_ARRAY_PATTERN_HDR_REPETITIONS_SHIFT 8
};
/*
* init array header union
*/
union init_array_hdr {
struct init_array_raw_hdr raw /* raw init array header */;
/* standard init array header */
struct init_array_standard_hdr standard;
struct init_array_zipped_hdr zipped /* zipped init array header */;
struct init_array_pattern_hdr pattern /* pattern init array header */;
};
/*
* init array types
*/
enum init_array_types {
INIT_ARR_STANDARD /* standard init array */,
INIT_ARR_ZIPPED /* zipped init array */,
INIT_ARR_PATTERN /* a repeated pattern */,
MAX_INIT_ARRAY_TYPES
};
/*
* init operation: callback
*/
struct init_callback_op {
__le32 op_data;
/* Init operation, from init_op_types enum */
#define INIT_CALLBACK_OP_OP_MASK 0xF
#define INIT_CALLBACK_OP_OP_SHIFT 0
#define INIT_CALLBACK_OP_RESERVED_MASK 0xFFFFFFF
#define INIT_CALLBACK_OP_RESERVED_SHIFT 4
__le16 callback_id /* Callback ID */;
__le16 block_id /* Blocks ID */;
};
/*
* init operation: delay
*/
struct init_delay_op {
__le32 op_data;
/* Init operation, from init_op_types enum */
#define INIT_DELAY_OP_OP_MASK 0xF
#define INIT_DELAY_OP_OP_SHIFT 0
#define INIT_DELAY_OP_RESERVED_MASK 0xFFFFFFF
#define INIT_DELAY_OP_RESERVED_SHIFT 4
__le32 delay /* delay in us */;
};
/*
* init operation: if_mode
*/
struct init_if_mode_op {
__le32 op_data;
/* Init operation, from init_op_types enum */
#define INIT_IF_MODE_OP_OP_MASK 0xF
#define INIT_IF_MODE_OP_OP_SHIFT 0
#define INIT_IF_MODE_OP_RESERVED1_MASK 0xFFF
#define INIT_IF_MODE_OP_RESERVED1_SHIFT 4
/* Commands to skip if the modes dont match */
#define INIT_IF_MODE_OP_CMD_OFFSET_MASK 0xFFFF
#define INIT_IF_MODE_OP_CMD_OFFSET_SHIFT 16
__le16 reserved2;
/* offset (in bytes) in modes expression buffer */
__le16 modes_buf_offset;
};
/*
* init operation: if_phase
*/
struct init_if_phase_op {
__le32 op_data;
/* Init operation, from init_op_types enum */
#define INIT_IF_PHASE_OP_OP_MASK 0xF
#define INIT_IF_PHASE_OP_OP_SHIFT 0
/* Indicates if DMAE is enabled in this phase */
#define INIT_IF_PHASE_OP_DMAE_ENABLE_MASK 0x1
#define INIT_IF_PHASE_OP_DMAE_ENABLE_SHIFT 4
#define INIT_IF_PHASE_OP_RESERVED1_MASK 0x7FF
#define INIT_IF_PHASE_OP_RESERVED1_SHIFT 5
/* Commands to skip if the phases dont match */
#define INIT_IF_PHASE_OP_CMD_OFFSET_MASK 0xFFFF
#define INIT_IF_PHASE_OP_CMD_OFFSET_SHIFT 16
__le32 phase_data;
#define INIT_IF_PHASE_OP_PHASE_MASK 0xFF /* Init phase */
#define INIT_IF_PHASE_OP_PHASE_SHIFT 0
#define INIT_IF_PHASE_OP_RESERVED2_MASK 0xFF
#define INIT_IF_PHASE_OP_RESERVED2_SHIFT 8
#define INIT_IF_PHASE_OP_PHASE_ID_MASK 0xFFFF /* Init phase ID */
#define INIT_IF_PHASE_OP_PHASE_ID_SHIFT 16
};
/*
* init mode operators
*/
enum init_mode_ops {
INIT_MODE_OP_NOT /* init mode not operator */,
INIT_MODE_OP_OR /* init mode or operator */,
INIT_MODE_OP_AND /* init mode and operator */,
MAX_INIT_MODE_OPS
};
/*
* init operation: raw
*/
struct init_raw_op {
__le32 op_data;
/* Init operation, from init_op_types enum */
#define INIT_RAW_OP_OP_MASK 0xF
#define INIT_RAW_OP_OP_SHIFT 0
#define INIT_RAW_OP_PARAM1_MASK 0xFFFFFFF /* init param 1 */
#define INIT_RAW_OP_PARAM1_SHIFT 4
__le32 param2 /* Init param 2 */;
};
/*
* init array params
*/
struct init_op_array_params {
__le16 size /* array size in dwords */;
__le16 offset /* array start offset in dwords */;
};
/*
* Write init operation arguments
*/
union init_write_args {
/* value to write, used when init source is INIT_SRC_INLINE */
__le32 inline_val;
/* number of zeros to write, used when init source is INIT_SRC_ZEROS */
__le32 zeros_count;
/* array offset to write, used when init source is INIT_SRC_ARRAY */
__le32 array_offset;
/* runtime array params to write, used when init source is INIT_SRC_RUNTIME */
struct init_op_array_params runtime;
};
/*
* init operation: write
*/
struct init_write_op {
__le32 data;
/* init operation, from init_op_types enum */
#define INIT_WRITE_OP_OP_MASK 0xF
#define INIT_WRITE_OP_OP_SHIFT 0
/* init source type, taken from init_source_types enum */
#define INIT_WRITE_OP_SOURCE_MASK 0x7
#define INIT_WRITE_OP_SOURCE_SHIFT 4
#define INIT_WRITE_OP_RESERVED_MASK 0x1
#define INIT_WRITE_OP_RESERVED_SHIFT 7
/* indicates if the register is wide-bus */
#define INIT_WRITE_OP_WIDE_BUS_MASK 0x1
#define INIT_WRITE_OP_WIDE_BUS_SHIFT 8
/* internal (absolute) GRC address, in dwords */
#define INIT_WRITE_OP_ADDRESS_MASK 0x7FFFFF
#define INIT_WRITE_OP_ADDRESS_SHIFT 9
union init_write_args args /* Write init operation arguments */;
};
/*
* init operation: read
*/
struct init_read_op {
__le32 op_data;
/* init operation, from init_op_types enum */
#define INIT_READ_OP_OP_MASK 0xF
#define INIT_READ_OP_OP_SHIFT 0
/* polling type, from init_poll_types enum */
#define INIT_READ_OP_POLL_TYPE_MASK 0xF
#define INIT_READ_OP_POLL_TYPE_SHIFT 4
#define INIT_READ_OP_RESERVED_MASK 0x1
#define INIT_READ_OP_RESERVED_SHIFT 8
/* internal (absolute) GRC address, in dwords */
#define INIT_READ_OP_ADDRESS_MASK 0x7FFFFF
#define INIT_READ_OP_ADDRESS_SHIFT 9
/* expected polling value, used only when polling is done */
__le32 expected_val;
};
/*
* Init operations union
*/
union init_op {
struct init_raw_op raw /* raw init operation */;
struct init_write_op write /* write init operation */;
struct init_read_op read /* read init operation */;
struct init_if_mode_op if_mode /* if_mode init operation */;
struct init_if_phase_op if_phase /* if_phase init operation */;
struct init_callback_op callback /* callback init operation */;
struct init_delay_op delay /* delay init operation */;
};
/*
* Init command operation types
*/
enum init_op_types {
INIT_OP_READ /* GRC read init command */,
INIT_OP_WRITE /* GRC write init command */,
/* Skip init commands if the init modes expression doesn't match */
INIT_OP_IF_MODE,
/* Skip init commands if the init phase doesn't match */
INIT_OP_IF_PHASE,
INIT_OP_DELAY /* delay init command */,
INIT_OP_CALLBACK /* callback init command */,
MAX_INIT_OP_TYPES
};
/*
* init polling types
*/
enum init_poll_types {
INIT_POLL_NONE /* No polling */,
INIT_POLL_EQ /* init value is included in the init command */,
INIT_POLL_OR /* init value is all zeros */,
INIT_POLL_AND /* init value is an array of values */,
MAX_INIT_POLL_TYPES
};
/*
* init source types
*/
enum init_source_types {
INIT_SRC_INLINE /* init value is included in the init command */,
INIT_SRC_ZEROS /* init value is all zeros */,
INIT_SRC_ARRAY /* init value is an array of values */,
INIT_SRC_RUNTIME /* init value is provided during runtime */,
MAX_INIT_SOURCE_TYPES
};
/*
* Internal RAM Offsets macro data
*/
struct iro {
__le32 base /* RAM field offset */;
__le16 m1 /* multiplier 1 */;
__le16 m2 /* multiplier 2 */;
__le16 m3 /* multiplier 3 */;
__le16 size /* RAM field size */;
};
#endif /* __ECORE_HSI_INIT_TOOL__ */
|
#!/bin/bash
export NVM_DIR="$HOME/.nvm"
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"
INSTALLED="$(command -v nvm)"
if [ "$INSTALLED" != "nvm" ]; then
export NVM_DIR="$HOME/.nvm" && (
git clone https://github.com/nvm-sh/nvm.git "$NVM_DIR"
cd "$NVM_DIR"
git checkout `git describe --abbrev=0 --tags --match "v[0-9]*" $(git rev-list --tags --max-count=1)`
) && \. "$NVM_DIR/nvm.sh"
fi
nvm install "$1"
|
import random
def generate_password(length, characters):
password = ''
for i in range(length):
password += random.choice(characters)
return password
print(generate_password(10, 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'))
|
NAME=msm0213
END1=64
ante-MMPBSA.py -p ../${NAME}_buffer.prmtop -c complex.prmtop -r receptor.prmtop -l ligand.prmtop -s :WAT,K+,K,Na+,NA,CL,Cl- -m :1-${END1} --radii mbondi2
|
#!/bin/bash
REPORT_PATH=$1
if [[ "$REPORT_PATH" == "" ]]; then
exit 1
fi
[[ -f "$REPORT_PATH/index.html" ]] && rm "$REPORT_PATH/index.html"
cat /opt/report/header.html | tee -a "$REPORT_PATH/index.html"
for SUITE_FILE in $(find "$REPORT_PATH" -maxdepth 1 -iname *.html -type f | xargs -r -l basename); do
if [[ "$SUITE_FILE" != "index.html" ]]; then
SUITE_NAME=$(echo "$SUITE_FILE" | sed 's/^TEST\-//' | sed 's/\.html//')
cat /opt/report/node.html | SUITE_FILE="$SUITE_FILE" SUITE_NAME="$SUITE_NAME" envsubst | tee -a "$REPORT_PATH/index.html"
fi
done
cat /opt/report/footer.html | tee -a "$REPORT_PATH/index.html"
exit 0
|
<filename>Exp5_4prgs/src/exp5_4prgs/Prg3exp5.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package exp5_4prgs;
import java.util.Scanner;
/**
*
* @author prakash
*/
public class Prg3exp5 {
public static void main(String[] args) {
// TODO code application logic here
String s,rev;
// TODO code application logic here
int j;
Scanner sc= new Scanner(System.in);
System.out.println("Enter String : ");
s= sc.nextLine();
System.err.println("the entered string is "+s);
System.err.println("the reverse string is ");
int len=s.length();
for(j=len-1;j>=0;j--)
{
System.err.print(s.charAt(j));
}
System.err.println("\n");
}
}
|
#!/bin/bash
# Log
log() {
CYAN='\033[0;36m'
NONE='\033[0m'
echo "${CYAN}$1${NONE}"
}
# Check out specific version
log "Checking out specific version..."
git -c advice.detachedHead=false checkout $1
# Update colonyNetwork dependencies
log "Updating colonyNetwork dependencies..."
yarn
|
MININIX_PKG_HOMEPAGE=https://www.gnupg.org/related_software/npth/
MININIX_PKG_DESCRIPTION="New GNU Portable Threads Library"
MININIX_PKG_VERSION=1.6
MININIX_PKG_SHA256=1393abd9adcf0762d34798dc34fdcf4d0d22a8410721e76f1e3afcd1daa4e2d1
MININIX_PKG_SRCURL=https://www.gnupg.org/ftp/gcrypt/npth/npth-${MININIX_PKG_VERSION}.tar.bz2
|
#!/bin/sh
echo 'create ./data directory'
mkdir data || true
chmod 777 data
mkdir -p .env
echo 'generating .env/mysql-root.env'
MYSQLROOT=`< /dev/urandom LC_CTYPE=C tr -dc '_\*^A-Z-a-z-0-9' | head -c${1:-32};`
echo 'MYSQL_ROOT_PASSWORD='$MYSQLROOT > .env/mysql-root.env
echo 'generating .env/mysql.env'
MYSQLUSER=`< /dev/urandom LC_CTYPE=C tr -dc '_\*^A-Z-a-z-0-9' | head -c${1:-32};`
echo 'MYSQL_DATABASE=databunkerdb' > .env/mysql.env
echo 'MYSQL_USER=bunkeruser' >> .env/mysql.env
echo 'MYSQL_PASSWORD='$MYSQLUSER >> .env/mysql.env
echo 'generating .env/databunker.env'
KEY=`< /dev/urandom LC_CTYPE=C tr -dc 'a-f0-9' | head -c${1:-48};`
echo 'DATABUNKER_MASTERKEY='$KEY > .env/databunker.env
echo 'MYSQL_USER_NAME=bunkeruser' >> .env/databunker.env
echo 'MYSQL_USER_PASS='$MYSQLUSER >> .env/databunker.env
echo 'MYSQL_HOST=mysql' >> .env/databunker.env
echo 'MYSQL_PORT=3306' >> .env/databunker.env
echo 'generating .env/databunker-root.env'
echo 'DATABUNKER_ROOTTOKEN=DEMO' > .env/databunker-root.env
|
const orderDetectionPlugin = (prop, changeWhenMatches) => {
return {
postcssPlugin: 'order-detection',
Declaration(decl) {
if (changeWhenMatches(decl)) {
decl.prop = prop;
decl.value = 'changed-this-declaration';
}
},
}
}
orderDetectionPlugin.postcss = true
module.exports = {
'basic': {
message: 'supports basic usage'
},
'basic:ff49': {
message: 'supports { browsers: "ff >= 49" } usage',
options: {
browsers: 'ff >= 49'
}
},
'basic:ff66': {
message: 'supports { browsers: "ff >= 66" } usage',
options: {
browsers: 'ff >= 66'
}
},
'basic:ch38': {
message: 'supports { browsers: "chrome >= 38" } usage',
options: {
browsers: 'chrome >= 38'
}
},
'basic:ch88-ff78': {
message: 'uses :is pseudo for nesting with modern browsers { browsers: "chrome >= 88, firefox >= 78", stage: 0 }',
options: {
browsers: 'chrome >= 88, firefox >= 78',
stage: 0
}
},
'basic:ch88-ff78:no-is-pseudo': {
message: ':is pseudo for nesting can be disable with modern browsers { browsers: "chrome >= 88, firefox >= 78", stage: 0, features: { nesting-rules: { noIsPseudoSelector: true } } } usage',
options: {
browsers: 'chrome >= 88, firefox >= 78',
stage: 0,
features: {
'nesting-rules': {
noIsPseudoSelector: true
}
}
}
},
'basic:ch88-ff78-saf10': {
message: 'does not use :is pseudo for nesting with an older browser { browsers: "chrome >= 88, firefox >= 78, safari >= 10", stage: 0 } usage',
options: {
browsers: 'chrome >= 88, firefox >= 78, safari >= 10',
stage: 0
}
},
'basic:stage0': {
message: 'supports { stage: 0 } usage',
options: {
stage: 0
}
},
'basic:stage0-ff49': {
message: 'supports { browsers: "ff >= 49", stage: 0 } usage',
options: {
browsers: 'ff >= 49',
stage: 0
}
},
'basic:stage0-ff66': {
message: 'supports { browsers: "ff >= 66", stage: 0 } usage',
options: {
browsers: 'ff >= 66',
stage: 0
}
},
'basic:nesting': {
message: 'supports { stage: false, features: { "nesting-rules": true } } usage',
options: {
stage: false,
features: {
'nesting-rules': true
}
}
},
'basic:autoprefixer': {
message: 'supports { autoprefixer: { add: false } } usage',
options: {
autoprefixer: {
add: false
}
}
},
'basic:autoprefixer:false': {
message: 'supports { autoprefixer: false } usage',
options: {
autoprefixer: false
}
},
'custom-properties': {
message: 'supports { browsers: "ie >= 10" } usage',
options: {
browsers: 'ie >= 10'
}
},
'custom-properties:disabled': {
message: 'supports { browsers: "ie >= 10", features: { "custom-properties": false } } usage',
options: {
browsers: 'ie >= 10',
features: {
'custom-properties': false
}
}
},
'custom-properties:enabled': {
message: 'supports { browsers: "chrome >= 60", features: { "custom-properties": true } } usage',
options: {
browsers: 'chrome >= 60',
features: {
'custom-properties': true
}
}
},
'insert:baseline': {
message: 'supports { insertBefore/insertAfter } usage baseline',
options: {
stage: 0,
features: {
'lab-function': true
}
}
},
'insert:before:match-source': {
message: 'supports { insertBefore } usage when looking for source',
options: {
stage: 0,
features: {
'lab-function': true
},
insertBefore: {
'lab-function': [
orderDetectionPlugin('before', (decl) => {
return decl.value.indexOf('lab(') === 0;
})
]
}
}
},
'insert:before:match-result': {
message: 'supports { insertBefore } usage when looking for a result',
options: {
stage: 0,
features: {
'lab-function': true
},
insertBefore: {
'lab-function': [
orderDetectionPlugin('before', (decl) => {
return decl.value.indexOf('rgba(') === 0;
})
]
}
}
},
'insert:after:match-source': {
message: 'supports { insertAfter } usage when looking for source',
options: {
stage: 0,
features: {
'lab-function': true
},
insertAfter: {
'lab-function': [
orderDetectionPlugin('after', (decl) => {
return decl.value.indexOf('lab(') === 0;
})
]
}
}
},
'insert:after:match-result': {
message: 'supports { insertAfter } usage when looking for a result',
options: {
stage: 0,
features: {
'lab-function': true
},
insertAfter: {
'lab-function': [
orderDetectionPlugin('after', (decl) => {
return decl.value.indexOf('rgba(') === 0;
})
]
}
}
},
'insert:after:match-result:exec': {
message: 'supports { insertAfter with a single plugin, not an array } usage when looking for a result',
options: {
stage: 0,
features: {
'lab-function': true
},
insertAfter: {
'lab-function': orderDetectionPlugin('after', (decl) => {
return decl.value.indexOf('rgba(') === 0;
})
}
},
expect: 'insert.after.match-result.expect.css'
},
'import': {
message: 'supports { importFrom: { customMedia, customProperties, customSelectors, environmentVariables } } usage',
options: {
importFrom: {
customMedia: {
'--narrow-window': '(max-width: env(--sm))'
},
customProperties: {
'--order': '1'
},
customSelectors: {
':--heading': 'h1, h2, h3, h4, h5, h6'
},
environmentVariables: {
'--sm': '40rem'
}
},
stage: 0
}
},
'basic:export': {
message: 'supports { stage: 0 } usage',
options: {
stage: 0,
exportTo: [
'test/generated-custom-exports.css',
'test/generated-custom-exports.js',
'test/generated-custom-exports.json',
'test/generated-custom-exports.mjs'
]
},
expect: 'basic.stage0.expect.css',
result: 'basic.stage0.result.css',
before() {
try {
global.__exportTo = {
css: require('fs').readFileSync('test/generated-custom-exports.css', 'utf8'),
js: require('fs').readFileSync('test/generated-custom-exports.js', 'utf8'),
json: require('fs').readFileSync('test/generated-custom-exports.json', 'utf8'),
mjs: require('fs').readFileSync('test/generated-custom-exports.mjs', 'utf8')
};
require('fs').rmSync('test/generated-custom-exports.css');
require('fs').rmSync('test/generated-custom-exports.js');
require('fs').rmSync('test/generated-custom-exports.json');
require('fs').rmSync('test/generated-custom-exports.mjs');
} catch (_) {
// ignore errors here.
// If the files are removed manually test run will regenerate these.
// The after step will still fail.
// The real test is in the after step.
}
},
after() {
global.__exportAs = {
css: require('fs').readFileSync('test/generated-custom-exports.css', 'utf8'),
js: require('fs').readFileSync('test/generated-custom-exports.js', 'utf8'),
json: require('fs').readFileSync('test/generated-custom-exports.json', 'utf8'),
mjs: require('fs').readFileSync('test/generated-custom-exports.mjs', 'utf8')
};
Object.keys(global.__exportTo).forEach(key => {
if (global.__exportTo[key] !== global.__exportAs[key]) {
throw new Error(`The original ${key} file did not match the freshly exported copy`);
}
});
}
},
"unknown-feature": {
message: 'warns on unknown features',
warnings: 3,
options: {
features: {
"custom-media": true,
"postcss-logical": true,
"postcss-logica": true,
}
},
}
};
|
/**
* Simple server to serve builds
*/
const express = require("express"),
path = require("path"),
webpack = require("webpack"),
app = express(),
assert = require("assert");
require('dotenv').config();
assert(process.env.PORT, "Specify a PORT in a .env");
app.use(express.static(path.join(__dirname, "./dist")));
app.listen(process.env.PORT, () => console.log(`Server listening on port ${process.env.PORT}`));
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.