text stringlengths 1 1.05M |
|---|
def sumOfN(n):
numbers = []
total = 0
for i in range(n):
number = int(input())
numbers.append(number)
total += number
return total
n = 6
total = sumOfN(n)
print(total) // 21 |
# Create an empty set
result = set()
nums = [1,2,3,4,5,2,3]
# Iterate over the list
for num in nums:
# Add each item to the set if it is not already present
if num not in result:
result.add(num)
# Print the resulting list
print(list(result)) #[1, 2, 3, 4, 5] |
package cc.soham.togglesample;
import android.widget.ProgressBar;
/**
* Simple interface to facilitate Espresso testing
*/
public interface ProgressBarInterface {
ProgressBar getProgressBar();
}
|
#include "Snake.h"
#include <iostream>
using namespace std;
Snake::Snake(int headY, int headX)
:direction{LEFT}
{
snake.push_back(Point{headY, headX, '>'}); //add the head of the snake
for(int i=1; i<=3; i++)
snake.push_back(Point{headY, headX+i, BODY});
}
Snake::~Snake(){}
bool Snake::isBitten(void){
Point head = *snake.begin();
list<Point>::iterator body_part = snake.begin();
body_part++;
while(body_part != snake.end()){
if(body_part->getX() == head.getX() && body_part->getY() == head.getY())
return true;
body_part++;
}
return false;
}
bool Snake::hasBitSnack(int snackY, int snackX){
return snake.begin()->getY() == snackY
&& snake.begin()->getX() == snackX;
}
bool Snake::hasCrashedWall(void){
Point &head = *snake.begin();
return (head.getY() < GAME_TOP_WALL_Y) ||
(head.getY() > GAME_BOTTOM_WALL_Y) ||
(head.getX() < GAME_LEFT_WALL_X) ||
(head.getX() > GAME_RIGHT_WALL_X);
}
int Snake::getSize(void){
return snake.size();
}
void Snake::incSize(void){
auto tail = snake.end();
//since list::end() returns one element past the actual last one we will decrease by one the tail iterator
tail--; //now we actually pointing to the tail
int tailX = tail->getX();
int tailY = tail->getY();
//now we must determine the direction which is easy by just fiding the coordinates of the previous to tail element
auto prev = --tail;
int prevX = prev->getX();
int prevY = prev->getY();
if(prevY == tailY){
//if the 2 part are on the same 'height'
if (prevX < tailX) //if the tail continues to the left:
snake.push_back(Point{tailY, tailX + 1, BODY}); // add one part to the right of the tail
else if(prevX > tailX) //if the tail continues to the right:
snake.push_back(Point{tailY, tailX - 1, BODY}); // add one part to the left of the tail
}else{
if (prevY < tailY) //if the tail continues to the upper side:
snake.push_back(Point{tailY + 1, tailX, BODY}); // add one part facing down
else if (prevY > tailY) //if the tail continues to the lower side:
snake.push_back(Point{tailY - 1, tailX, BODY}); // add one part facing up
}
}
void Snake::updateHead(void){
auto head = snake.begin();
switch (this->direction)
{
case UP:
head->moveUp();
break;
case DOWN:
head->moveDown();
break;
case LEFT:
head->moveLeft();
break;
case RIGHT:
head->moveRight();
break;
}
}
void Snake::printSnake(void){
//We print each element of the snake-list
for(auto bodyPart : snake){
bodyPart.printImg();
}
refreshScreen(); //finally call the previously implemented function at Graphics.cpp
//to update the screen so the changes become noticed
}
void Snake::move(void){
//now delete the tail print since teh snake moves forward
auto tail = snake.end();
tail--;
printChar(tail->getY(), tail->getX(), ' ');
//and now we have to update all the other nodes of the body
auto bodyP1 = tail;
auto bodyP2 = --tail;
while(bodyP2 != snake.begin()){
*bodyP1 = *bodyP2;
bodyP1--;
bodyP2--;
}
//update the previous to head node
auto headPrev = snake.begin();
headPrev++;
*headPrev = *snake.begin();
headPrev->setImg(BODY);
//based on direction, update the head
this->updateHead();
this->printSnake(); // print the snake and update the screen
}
//Move Functions:
//For the move functions we must change
void Snake::moveUp(void){
snake.begin()->setImg('v');
this->direction = UP;
this->move();
}
void Snake::moveDown(void){
snake.begin()->setImg('^');
this->direction = DOWN;
this->move();
}
void Snake::moveLeft(void){
snake.begin()->setImg('>');
this->direction = LEFT;
this->move();
}
void Snake::moveRight(void){
snake.begin()->setImg('<');
this->direction = RIGHT;
this->move();
} |
<reponame>carlosrojaso/chapter<filename>server/src/controllers/Messages/resolver.ts
import { Resolver, Mutation, Arg } from 'type-graphql';
import MailerService from '../../services/MailerService';
import { Email } from './Email';
import { SendEmailInputs } from './inputs';
@Resolver()
export class EmailResolver {
@Mutation(() => Email) async sendEmail(
@Arg('data') data: SendEmailInputs,
): Promise<Email> {
const email = new MailerService(data.to, data.subject, data.html);
await email.sendEmail();
return {
ourEmail: email.ourEmail,
emailList: email.emailList,
subject: email.subject,
htmlEmail: email.htmlEmail,
backupText: email.backupText,
};
}
}
|
package elasta.orm.entity;
import elasta.orm.entity.core.Entity;
import java.util.Map;
import java.util.Objects;
/**
* Created by sohan on 3/17/2017.
*/
final public class DependencyTpl {
final Entity entity;
final Map<String, DependencyInfo> fieldToDependencyInfoMap;
public DependencyTpl(Entity entity, Map<String, DependencyInfo> fieldToDependencyInfoMap) {
Objects.requireNonNull(entity);
Objects.requireNonNull(fieldToDependencyInfoMap);
this.entity = entity;
this.fieldToDependencyInfoMap = fieldToDependencyInfoMap;
}
public Entity getEntity() {
return entity;
}
public Map<String, DependencyInfo> getFieldToDependencyInfoMap() {
return fieldToDependencyInfoMap;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DependencyTpl that = (DependencyTpl) o;
if (entity != null ? !entity.equals(that.entity) : that.entity != null) return false;
return fieldToDependencyInfoMap != null ? fieldToDependencyInfoMap.equals(that.fieldToDependencyInfoMap) : that.fieldToDependencyInfoMap == null;
}
@Override
public int hashCode() {
int result = entity != null ? entity.hashCode() : 0;
result = 31 * result + (fieldToDependencyInfoMap != null ? fieldToDependencyInfoMap.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "DependencyTpl{" +
"entity=" + entity +
", fieldToDependencyInfoMap=" + fieldToDependencyInfoMap +
'}';
}
public void add(DependencyInfo dependencyInfo) {
fieldToDependencyInfoMap.put(
dependencyInfo.getField().getName(),
dependencyInfo
);
}
}
|
#!/usr/bin/env bash
CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
. "$CURDIR"/../shell_config.sh
set -e
$CLICKHOUSE_CLIENT -n -q "
DROP DATABASE IF EXISTS database_for_dict;
DROP TABLE IF EXISTS table_for_dict1;
DROP TABLE IF EXISTS table_for_dict2;
CREATE TABLE table_for_dict1 (key_column UInt64, value_column String) ENGINE = MergeTree ORDER BY key_column;
CREATE TABLE table_for_dict2 (key_column UInt64, value_column String) ENGINE = MergeTree ORDER BY key_column;
INSERT INTO table_for_dict1 SELECT number, toString(number) from numbers(1000);
INSERT INTO table_for_dict2 SELECT number, toString(number) from numbers(1000, 1000);
CREATE DATABASE database_for_dict;
CREATE DICTIONARY database_for_dict.dict1 (key_column UInt64, value_column String) PRIMARY KEY key_column SOURCE(CLICKHOUSE(HOST 'localhost' PORT 9000 USER 'default' TABLE 'table_for_dict1' PASSWORD '' DB '$CLICKHOUSE_DATABASE')) LIFETIME(MIN 1 MAX 5) LAYOUT(FLAT());
CREATE DICTIONARY database_for_dict.dict2 (key_column UInt64, value_column String) PRIMARY KEY key_column SOURCE(CLICKHOUSE(HOST 'localhost' PORT 9000 USER 'default' TABLE 'table_for_dict2' PASSWORD '' DB '$CLICKHOUSE_DATABASE')) LIFETIME(MIN 1 MAX 5) LAYOUT(CACHE(SIZE_IN_CELLS 150));
"
function thread1()
{
while true; do $CLICKHOUSE_CLIENT --query "SELECT * FROM system.dictionaries FORMAT Null"; done
}
function thread2()
{
while true; do CLICKHOUSE_CLIENT --query "ATTACH DICTIONARY database_for_dict.dict1" ||: ; done
}
function thread3()
{
while true; do CLICKHOUSE_CLIENT --query "ATTACH DICTIONARY database_for_dict.dict2" ||:; done
}
function thread4()
{
while true; do $CLICKHOUSE_CLIENT -n -q "
SELECT * FROM database_for_dict.dict1 FORMAT Null;
SELECT * FROM database_for_dict.dict2 FORMAT Null;
" ||: ; done
}
function thread5()
{
while true; do $CLICKHOUSE_CLIENT -n -q "
SELECT dictGetString('database_for_dict.dict1', 'value_column', toUInt64(number)) from numbers(1000) FROM FORMAT Null;
SELECT dictGetString('database_for_dict.dict2', 'value_column', toUInt64(number)) from numbers(1000) FROM FORMAT Null;
" ||: ; done
}
function thread6()
{
while true; do $CLICKHOUSE_CLIENT -q "DETACH DICTIONARY database_for_dict.dict1"; done
}
function thread7()
{
while true; do $CLICKHOUSE_CLIENT -q "DETACH DICTIONARY database_for_dict.dict2"; done
}
export -f thread1;
export -f thread2;
export -f thread3;
export -f thread4;
export -f thread5;
export -f thread6;
export -f thread7;
TIMEOUT=10
timeout $TIMEOUT bash -c thread1 2> /dev/null &
timeout $TIMEOUT bash -c thread2 2> /dev/null &
timeout $TIMEOUT bash -c thread3 2> /dev/null &
timeout $TIMEOUT bash -c thread4 2> /dev/null &
timeout $TIMEOUT bash -c thread5 2> /dev/null &
timeout $TIMEOUT bash -c thread6 2> /dev/null &
timeout $TIMEOUT bash -c thread7 2> /dev/null &
timeout $TIMEOUT bash -c thread1 2> /dev/null &
timeout $TIMEOUT bash -c thread2 2> /dev/null &
timeout $TIMEOUT bash -c thread3 2> /dev/null &
timeout $TIMEOUT bash -c thread4 2> /dev/null &
timeout $TIMEOUT bash -c thread5 2> /dev/null &
timeout $TIMEOUT bash -c thread6 2> /dev/null &
timeout $TIMEOUT bash -c thread7 2> /dev/null &
timeout $TIMEOUT bash -c thread1 2> /dev/null &
timeout $TIMEOUT bash -c thread2 2> /dev/null &
timeout $TIMEOUT bash -c thread3 2> /dev/null &
timeout $TIMEOUT bash -c thread4 2> /dev/null &
timeout $TIMEOUT bash -c thread5 2> /dev/null &
timeout $TIMEOUT bash -c thread6 2> /dev/null &
timeout $TIMEOUT bash -c thread7 2> /dev/null &
timeout $TIMEOUT bash -c thread1 2> /dev/null &
timeout $TIMEOUT bash -c thread2 2> /dev/null &
timeout $TIMEOUT bash -c thread3 2> /dev/null &
timeout $TIMEOUT bash -c thread4 2> /dev/null &
timeout $TIMEOUT bash -c thread5 2> /dev/null &
timeout $TIMEOUT bash -c thread6 2> /dev/null &
timeout $TIMEOUT bash -c thread7 2> /dev/null &
wait
$CLICKHOUSE_CLIENT -q "SELECT 'Still alive'"
$CLICKHOUSE_CLIENT -q "ATTACH DICTIONARY database_for_dict.dict1"
$CLICKHOUSE_CLIENT -q "ATTACH DICTIONARY database_for_dict.dict2"
$CLICKHOUSE_CLIENT -n -q "
DROP TABLE table_for_dict1;
DROP TABLE table_for_dict2;
DROP DATABASE database_for_dict;
"
|
#!/bin/bash
#
# Deploy a jar, source jar, and javadoc jar to Sonatype's snapshot repo.
#
# Adapted from https://coderwall.com/p/9b_lfq and
# http://benlimmer.com/2013/12/26/automatically-publish-javadoc-to-gh-pages-with-travis-ci/
SLUG="dropbox/Store"
JDK="oraclejdk8"
BRANCH="main"
set -e
if [ "$TRAVIS_REPO_SLUG" != "$SLUG" ]; then
echo "Skipping snapshot deployment: wrong repository. Expected '$SLUG' but was '$TRAVIS_REPO_SLUG'."
elif [ "$TRAVIS_JDK_VERSION" != "$JDK" ]; then
echo "Skipping snapshot deployment: wrong JDK. Expected '$JDK' but was '$TRAVIS_JDK_VERSION'."
elif [ "$TRAVIS_PULL_REQUEST" != "false" ]; then
echo "Skipping snapshot deployment: was pull request."
elif [ "$TRAVIS_BRANCH" != "$BRANCH" ]; then
echo "Skipping snapshot deployment: wrong branch. Expected '$BRANCH' but was '$TRAVIS_BRANCH'."
else
echo "Deploying store..."
openssl aes-256-cbc -md sha256 -d -in tools/release/secring.gpg.aes -out tools/release/secring.gpg -k "${ENCRYPT_KEY}"
./gradlew uploadArchives -PSONATYPE_USERNAME="${SONATYPE_USERNAME}" -PSONATYPE_PASSWORD="${SONATYPE_PASSWORD}" -PsigningKeyId="${SIGNING_ID}" -PsigningPassword="${SIGNING_PASSWORD}"
echo "Store deployed!"
fi
|
public int max(int a, int b) {
return (a > b) ? a : b;
} |
import React from 'react';
export const Gantt = () => <>Gantt Chart Mock</>;
export const DefaultTheme: React.FC = ({ children }) => <>{children}</>;
|
#include <bits/stdc++.h>
using namespace std;
// sort function
void sortArray(vector<string> &arr)
{
sort(arr.begin(), arr.end());
}
// driver program
int main()
{
vector<string> arr = {"Apple", "Banana", "Cherry"};
// sort the array
sortArray(arr);
// print the sorted array
for (int i = 0; i < arr.size(); i++)
cout << arr[i] << "\n";
return 0;
} |
package aufgabe9_6;
public enum Unop {
// utf8: "Köpfchen in das Wasser, Schwänzchen in die Höh." -CIA-Verhörmethode
Minus
}
|
#!/usr/bin/bash -ex
cd ..
installer="./Ctrax-"`cat version.txt | tr -d '[:space:]'`"-installer.exe"
rm -f $installer
python setup_py2exe.py build
python setup_py2exe.py install
python setup_py2exe.py py2exe
cp maintain/dlls/* dist/
cp build/lib.win32-2.7/*pyd dist/
cp -R xrc dist/
cp -R icons dist/
cp -R mpl-data dist/
/cygdrive/c/Program\ Files\ \(x86\)/NSIS/makensis.exe setup.nsi
$installer
/cygdrive/c/Program\ Files\ \(x86\)/Ctrax-0.5/Ctrax.exe
|
import net.patowen.tempotool.TickerSource;
public class BeatMackTickerSource implements TickerSource {
private BeatFunction beatFunction;
private boolean tickerRunning;
private Thread tickerThread;
public BeatMackTickerSource(BeatFunction beatFunction) {
this.beatFunction = beatFunction;
tickerRunning = false;
}
public void startTicker() {
if (!tickerRunning) {
tickerRunning = true;
tickerThread = new Thread(() -> {
int beatNumber = 1;
while (tickerRunning) {
double nextBeatInterval = beatFunction.getNextBeatInterval();
try {
Thread.sleep((long) (nextBeatInterval * 1000)); // Convert seconds to milliseconds
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
System.out.println("Beat " + beatNumber + " at " + System.currentTimeMillis());
beatNumber++;
}
});
tickerThread.start();
}
}
} |
const getFibonacciNumber = (num) => {
let first = 0,
second = 1,
result;
while (first <= num) {
result = first;
first = second;
second = result + second;
}
return result;
};
getFibonacciNumber(500); // => 521 |
def compare_versions(version1: str, version2: str) -> int:
v1_parts = list(map(int, version1.split('.')))
v2_parts = list(map(int, version2.split('.'))
while len(v1_parts) < 3:
v1_parts.append(0)
while len(v2_parts) < 3:
v2_parts.append(0)
for i in range(3):
if v1_parts[i] > v2_parts[i]:
return 1
elif v1_parts[i] < v2_parts[i]:
return -1
return 0 |
import { Test, TestingModule } from '@nestjs/testing';
import { PageService } from '../../services/page.service';
import { getModelToken } from '@nestjs/mongoose';
import { Query, Model } from 'mongoose';
import { Block, BlockDocument } from '../../schemas/block.schema';
import { CreateBlockDto } from '../../dto/create-block.dto';
import { BlockDTOs } from '../../dto/extra-models/block-models';
import { mockPage, mockPageDocument, mockCreatePageDto } from '../mocks/page';
import {
HigherOrderBlockDocument,
HigherOrderBlock,
} from '../../schemas/higher-order-block.schema';
import { DatabaseService } from '../../services/database.service';
import { mockDatabase, mockDatabaseDocument } from '../mocks/database';
describe('PageService', () => {
let pageService: PageService;
let databaseService: DatabaseService;
let higherOrderBlockModel: Model<HigherOrderBlock>;
let blockModel: Model<Block>;
beforeEach(async () => {
const module: TestingModule = await Test.createTestingModule({
providers: [
PageService,
DatabaseService,
{
provide: getModelToken('HigherOrderBlock'),
useValue: {
new: jest.fn(),
constructor: jest.fn(),
find: jest.fn(),
findOne: jest.fn(),
update: jest.fn(),
create: jest.fn(),
remove: jest.fn(),
save: jest.fn(),
exec: jest.fn(),
},
},
{
provide: getModelToken('Block'),
useValue: {
new: jest.fn(),
constructor: jest.fn(),
exec: jest.fn(),
},
},
],
}).compile();
pageService = module.get<PageService>(PageService);
databaseService = module.get<DatabaseService>(DatabaseService);
higherOrderBlockModel = module.get<Model<HigherOrderBlock>>(
getModelToken('HigherOrderBlock'),
);
blockModel = module.get<Model<Block>>(getModelToken('Block'));
});
it('should be defined', () => {
expect(pageService).toBeDefined();
});
describe('insertOne', () => {
it('throws error if parent is not database, and user tries to create page with properites other than title property', async () => {
const pageDto = mockCreatePageDto('id-hello', 'Hello World', {
title: {
type: 'title',
title: [
{
type: 'text',
plain_text: '<NAME>',
text: { text: 'Hello World' },
},
],
},
invalid_field: {
type: 'number',
number: 5,
},
});
await expect(async () => {
await pageService.insertOne(pageDto);
}).rejects.toThrow(
'Invalid Field Exception: Pages with parents of type page can only use title property',
);
});
it('throws error if not given valid properties of parent database', async () => {
const databaseInterface = mockDatabase();
jest
.spyOn(databaseService, 'findOne')
.mockImplementation(async () =>
mockDatabaseDocument(databaseInterface),
);
const pageDto = mockCreatePageDto(
'id-hello',
'Hello World',
{
title: {
type: 'title',
title: [
{
type: 'text',
plain_text: 'Hello World',
text: { text: 'Hello World' },
},
],
},
invalid_field: {
type: 'number',
number: 5,
},
},
{ database_id: 'database-id', type: 'database' },
);
await expect(async () => {
await pageService.insertOne(pageDto);
}).rejects.toThrow(
'Invalid Field exception: {invalid_field} not valid field(s) on database: {title, valid_field}',
);
});
// it('If failes creating block children, if something goes wrong throw exception', async () => {});
});
afterEach(() => {
jest.clearAllMocks();
});
});
|
#!/usr/bin/env bash
NVM_DIR="$HOME/.nvm"
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" # This loads nvm
currentNodeVersion=$(nvm current)
requiredNodeVersion=$(head -n 1 .nvmrc)
nvm use ${requiredNodeVersion}
npm run deploy
nvm use ${currentNodeVersion}
|
parallel --jobs 5 < ./results/exp_lustre_rw/run-3/lustre_8n_6t_6d_1000f_617m_5i/jobs/jobs_n2.txt
|
#!/bin/sh
python manage.py dumpdata --all --natural-foreign --indent 2 sites zues > zues/fixtures/demo_data.json
|
def sort_ascending(arr):
sorted_arr = sorted(arr)
return sorted_arr
print(sort_ascending([5, 4, 1, 6, 9])) |
export default (theme) => ({
card: {
transformStyle: 'preserve-3d',
transition: 'transform 0.15s ease-in-out',
WebkitTransformStyle: 'preserve-3d',
},
faceCommon: {
fontWeight: 700,
marginTop: '20px',
marginLeft: '20px',
pointerEvents: 'none',
},
icon: {
fontSize: '40px',
color: theme.palette.primary.main,
},
avatar: {
position: 'absolute',
border: '3px solid black',
color: theme.palette.white,
backgroundColor: theme.palette.colors.coconut600,
},
cardContent: {
backfaceVisibility: 'hidden',
WebkitBackfaceVisibility: 'hidden',
},
faceUp: {
transform: 'rotateY(180deg)',
},
});
|
<gh_stars>0
import React, { PureComponent } from 'react';
import RootScene from './src/RootScene';
export default class App extends PureComponent {
render() {
return (
<RootScene />
);
}
}
|
#!/bin/sh
grep -EIHnr --color --exclude-dir={tools,.git} 'TODO|FIXME' .
|
cd '/scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_095'
set -o pipefail
cd /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_095
export MKL_NUM_THREADS=1
export OMP_NUM_THREADS=1
gatk --java-options "-Xmx40G" GermlineCNVCaller \
--run-mode COHORT \
-L /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_1_scatterIntervals/result/lindsay_exomeseq_3772.95.interval_list \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_09B.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_12B.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_175_06.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_175_09.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_175_10.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_175_12.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_175_18.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_175_19.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_175_23.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_175_27.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_175_33.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_181.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_181F1.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_196.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_196F1.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_196F2.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_23B.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_273_03.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_273_09.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_273_13.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_273_15.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_273_21.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_273_22.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_273_37.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_273_38.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_273_39.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_273_40.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_31B.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_38B.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_42B.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_56B.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_60B.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_64B.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_67B.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_C04.count.hdf5 \
--input /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_02_CollectReadCounts/result/P_C08.count.hdf5 \
--contig-ploidy-calls /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_04_DetermineGermlineContigPloidyCohortMode/result/lindsay_exomeseq_3772-calls \
--interval-merging-rule OVERLAPPING_ONLY \
--output . \
--output-prefix gcc \
--verbosity DEBUG \
--p-alt 1e-6 \
--p-active 1e-2 \
--cnv-coherence-length 10000.0 \
--class-coherence-length 10000.0 \
--max-copy-number 5 \
--max-bias-factors 5 \
--mapping-error-rate 0.01 \
--interval-psi-scale 0.001 \
--sample-psi-scale 0.0001 \
--depth-correction-tau 10000.0 \
--log-mean-bias-standard-deviation 0.1 \
--init-ard-rel-unexplained-variance 0.1 \
--num-gc-bins 20 \
--gc-curve-standard-deviation 1.0 \
--copy-number-posterior-expectation-mode HYBRID \
--enable-bias-factors true \
--active-class-padding-hybrid-mode 50000 \
--learning-rate 0.05 \
--adamax-beta-1 0.9 \
--adamax-beta-2 0.99 \
--log-emission-samples-per-round 50 \
--log-emission-sampling-median-rel-error 0.005 \
--log-emission-sampling-rounds 10 \
--max-advi-iter-first-epoch 5000 \
--max-advi-iter-subsequent-epochs 100 \
--min-training-epochs 10 \
--max-training-epochs 100 \
--initial-temperature 2.0 \
--num-thermal-advi-iters 2500 \
--convergence-snr-averaging-window 500 \
--convergence-snr-trigger-threshold 0.1 \
--convergence-snr-countdown-window 10 \
--max-calling-iters 10 \
--caller-update-convergence-threshold 0.001 \
--caller-internal-admixing-rate 0.75 \
--caller-external-admixing-rate 1.00 \
--disable-annealing false
rm -rf .cache .conda .config .theano
|
package org.datadryad.api;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DCValue;
import org.dspace.content.authority.Concept;
import org.dspace.content.authority.Scheme;
import org.dspace.core.Context;
import org.dspace.core.ConfigurationManager;
import org.datadryad.rest.storage.StorageException;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import java.lang.*;
import java.lang.Exception;
import java.sql.SQLException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static org.datadryad.api.DryadJournalConcept.PAYMENT_PLAN;
import static org.datadryad.api.DryadJournalConcept.WEBSITE;
import org.dspace.content.Item;
/**
*
* @author <NAME> <<EMAIL>>
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class DryadFunderConcept extends DryadOrganizationConcept {
public static final String FUNDER_ID = "identifier";
public static final String ALT_LABEL = "altLabel";
public static final String COUNTRY = "country";
public static final String NSF_ID = "http://dx.doi.org/10.13039/100000001";
private static Logger log = Logger.getLogger(DryadFunderConcept.class);
static {
metadataProperties.setProperty(FUNDER_ID, "funder.identifier");
metadataProperties.setProperty(ALT_LABEL, "funder.altLabel");
metadataProperties.setProperty(COUNTRY, "funder.country");
defaultMetadataValues.setProperty(metadataProperties.getProperty(FUNDER_ID), "");
defaultMetadataValues.setProperty(metadataProperties.getProperty(ALT_LABEL), "");
defaultMetadataValues.setProperty(metadataProperties.getProperty(COUNTRY), "");
}
{
schemeName = ConfigurationManager.getProperty("solrauthority.searchscheme.dryad_fundingEntity");
}
public DryadFunderConcept() {
Context context = null;
try {
context = new Context();
create(context);
context.commit();
for (String prop : metadataProperties.stringPropertyNames()) {
String mdString = metadataProperties.getProperty(prop);
this.setConceptMetadataValue(mdString, defaultMetadataValues.getProperty(mdString));
}
} catch (Exception e) {
log.error("Couldn't make new concept: " + e.getMessage());
if (context != null) {
context.abort();
}
}
} // JAXB needs this
public DryadFunderConcept(Context context, Concept concept) {
super();
setUnderlyingConcept(context, concept);
fullName = getConceptMetadataValue(metadataProperties.getProperty(FULLNAME));
}
public DryadFunderConcept(Context context, String fullName) throws StorageException {
this.setFullName(fullName);
try {
context.commit();
} catch (Exception e) {
log.error("exception " + e.getMessage());
}
}
public String getFunderId() {
return getConceptMetadataValue(metadataProperties.getProperty(FUNDER_ID));
}
public void setFunderId(String value) {
setConceptMetadataValue(metadataProperties.getProperty(FUNDER_ID), value);
}
public String getAltLabel() {
return getConceptMetadataValue(metadataProperties.getProperty(FUNDER_ID));
}
public void setAltLabel(String value) {
setConceptMetadataValue(metadataProperties.getProperty(ALT_LABEL), value);
}
public void addAltLabel(String value) {
addConceptMetadataValue(metadataProperties.getProperty(ALT_LABEL), value);
}
public String getCountry() {
return getConceptMetadataValue(metadataProperties.getProperty(COUNTRY));
}
public void setCountry(String value) {
setConceptMetadataValue(metadataProperties.getProperty(COUNTRY), value);
}
@JsonIgnore
public static Boolean conceptIsValidFunder(Concept concept) {
return ((concept != null) && (concept.getSingleMetadata(metadataProperties.getProperty(FUNDER_ID)) != null));
}
public static DryadFunderConcept getFunderConceptMatchingFunderID(Context context, String funderID) {
DryadFunderConcept funderConcept = null;
Concept[] concepts = Concept.searchByMetadata(context, metadataProperties.getProperty(FUNDER_ID), funderID);
if (concepts.length > 0) {
funderConcept = new DryadFunderConcept(context, concepts[0]);
}
return funderConcept;
}
// In order to get all of the information into the fundingEntity string, the authority value will be the FundRef ID,
// while the value will be in the format "<grant number>@<funder name> (<country>)".
public static DCValue createFundingEntityMetadata(DryadFunderConcept funderConcept, String grantNumber, int confidence) {
DCValue result = new DCValue();
result.schema = "dryad";
result.element = "fundingEntity";
result.value = grantNumber + "@" + funderConcept.getFullName() + " (" + funderConcept.getCountry() + ")";
result.authority = funderConcept.getFunderId();
result.confidence = confidence;
return result;
}
public static String getFunderNameFromFundingEntity(DCValue fundingMetadata) {
Matcher matcher = Pattern.compile("(.*?)@(.+)\\s*(\\(.*\\))").matcher(fundingMetadata.value);
String result = null;
if (matcher.find()) {
result = matcher.group(2);
}
return result;
}
public static String getCountryFromFundingEntity(DCValue fundingMetadata) {
Matcher matcher = Pattern.compile("(.*?)@(.+)\\s*(\\(.*\\))").matcher(fundingMetadata.value);
String result = null;
if (matcher.find()) {
result = matcher.group(3);
}
return result;
}
public static String getGrantNumberFromFundingEntity(DCValue fundingMetadata) {
Matcher matcher = Pattern.compile("(.*?)@(.+)\\s*(\\(.*\\))").matcher(fundingMetadata.value);
String result = null;
if (matcher.find()) {
result = matcher.group(1);
}
return result;
}
}
|
<reponame>NicholasBlaskey/svg-rasterizer
package main
import (
"bytes"
"encoding/base64"
"encoding/xml"
"fmt"
"image"
"image/png"
"io/ioutil"
"math"
"math/rand"
"net/http"
"strconv"
"strings"
"syscall/js"
mgl "github.com/go-gl/mathgl/mgl32"
"github.com/nicholasblaskey/dat-gui-go-wasm/datGUI"
"github.com/nicholasblaskey/svg-rasterizer/board"
"github.com/nicholasblaskey/svg-rasterizer/triangulate"
)
type Color struct {
r float32
g float32
b float32
a float32
}
func maxOfThree(x, y, z float32) float32 {
return float32(math.Max(float64(x), math.Max(float64(y), float64(z))))
}
func minOfThree(x, y, z float32) float32 {
return float32(math.Min(float64(x), math.Min(float64(y), float64(z))))
}
func crossProduct(x1, y1, x2, y2 float32) float32 {
return x1*y2 - y1*x2
}
func parseColor(col string) Color {
if len(col) == 0 {
return Color{0, 0, 0, 1.0}
}
if len(col) == 6 { // Add in # if missing
col = "#" + col
}
r, _ := strconv.ParseInt(col[1:3], 16, 9)
g, _ := strconv.ParseInt(col[3:5], 16, 9)
b, _ := strconv.ParseInt(col[5:7], 16, 9)
a := 255
return Color{
float32(r) / 255.0,
float32(g) / 255.0,
float32(b) / 255.0,
float32(a) / 255.0,
}
}
type rasterizer struct {
board *board.Board
svg *Svg
pixels []byte
widthPixels int
heightPixels int
width float32
height float32
sampleRate int
samplePixels int
origWidthPixels int
origHeightPixels int
origWidth float32
origHeight float32
unscaledWidthPixels int
unscaledHeightPixels int
unscaledWidth float32
unscaledHeight float32
pointsToFill []int
colorOfPointsToFill []Color
canvas js.Value
scale float32
}
type Svg struct {
XMLName xml.Name
Width string `xml:"width,attr"`
Height string `xml:"height,attr"`
ViewBox string `xml:"viewBox,attr"`
Rects []Rect `xml:"rect"`
Lines []Line `xml:"line"`
Polylines []Polyline `xml:"polyline"`
Polygons []Polygon `xml:"polygon"`
Circles []Circle `xml:"circle"`
Groups []*Svg `xml:"g"`
Images []*Image `xml:"image"`
Transform string `xml:"transform,attr"`
transformMatrix mgl.Mat3
}
type Rect struct {
X float32 `xml:"x,attr"`
Y float32 `xml:"y,attr"`
Fill string `xml:"fill,attr"`
Stroke string `xml:"stroke,attr"`
Width float32 `xml:"width,attr"`
Height float32 `xml:"height,attr"`
StrokeOpacity float32 `xml:"stroke-opacityt,attr"`
FillOpacity float32 `xml:"fill-opacity,attr"`
Transform string `xml:"transform,attr"`
transformMatrix mgl.Mat3
}
func (s *Rect) rasterize(r *rasterizer) {
col := parseColor(s.Fill)
col.a = s.FillOpacity
if col.a == 0.0 {
col.a = 1.0
}
// If either width or height is 0 or 1 assume we have a single point.
transformed := r.transform([]float32{s.X, s.Y}, s.transformMatrix, false)
x, y := transformed[0], transformed[1]
if s.Width == 0.0 || s.Height == 0.0 || (s.Width == 1.0 && s.Height == 1.0) {
r.drawPixel(x, y, col)
return
}
// Otherwise we have a full on rectangle.
// Draw rectangle border.
w := s.Width * r.scale
h := s.Height * r.scale
outlineCol := parseColor(s.Stroke)
col.a = s.StrokeOpacity
if col.a == 0.0 {
col.a = 1.0
}
for i := float32(0); i < w; i++ {
r.drawPixel(x+i, y+1.0, outlineCol)
r.drawPixel(x+i, y+h-1, outlineCol)
}
for i := float32(0); i < h; i++ {
r.drawPixel(x+0, y+i, outlineCol)
r.drawPixel(x+w-1, y+i, outlineCol)
}
w *= float32(r.sampleRate)
h *= float32(r.sampleRate)
x *= float32(r.sampleRate)
y *= float32(r.sampleRate)
// Draw inside of rectangle.
for x0 := x; x0 < x+w; x0++ {
for y0 := y; y0 < y+h; y0++ {
r.drawPoint(x0, y0, col)
}
}
}
func blendColors(col Color, red, g, b, a byte) (byte, byte, byte, byte) {
aPrimeA := float32(a) / 0xFF
aPrimeR := float32(red) / 0xFF * aPrimeA
aPrimeG := float32(g) / 0xFF * aPrimeA
aPrimeB := float32(b) / 0xFF * aPrimeA
bPrimeR := col.r * col.a
bPrimeG := col.g * col.a
bPrimeB := col.b * col.a
bPrimeA := col.a
cPrimeR := bPrimeR + (1-bPrimeA)*aPrimeR
cPrimeG := bPrimeG + (1-bPrimeA)*aPrimeG
cPrimeB := bPrimeB + (1-bPrimeA)*aPrimeB
cPrimeA := bPrimeA + (1-bPrimeA)*aPrimeA
return byte(cPrimeR * 0xFF), byte(cPrimeG * 0xFF),
byte(cPrimeB * 0xFF), byte(cPrimeA * 0xFF)
}
// This draws a point which will then be anti aliased.
func (r *rasterizer) drawPoint(x, y float32, col Color) {
xCoord := int(x * float32(r.widthPixels) / r.width)
yCoord := r.heightPixels - int(y*float32(r.heightPixels)/r.height)
if xCoord < 0 || xCoord >= r.widthPixels ||
yCoord < 0 || yCoord >= r.heightPixels {
return
}
red := r.pixels[(xCoord+yCoord*r.widthPixels)*4]
g := r.pixels[(xCoord+yCoord*r.widthPixels)*4+1]
b := r.pixels[(xCoord+yCoord*r.widthPixels)*4+2]
a := r.pixels[(xCoord+yCoord*r.widthPixels)*4+3]
red, g, b, a = blendColors(col, red, g, b, a)
r.pixels[(xCoord+yCoord*r.widthPixels)*4] = red
r.pixels[(xCoord+yCoord*r.widthPixels)*4+1] = g
r.pixels[(xCoord+yCoord*r.widthPixels)*4+2] = b
r.pixels[(xCoord+yCoord*r.widthPixels)*4+3] = a
}
// This draws a pixel which will be drawn into the final buffer after everything else
// has been resolved.
func (r *rasterizer) drawPixel(x, y float32, col Color) {
xCoord := int(x * float32(r.origWidthPixels) / r.origWidth)
yCoord := r.origHeightPixels - int(y*float32(r.origHeightPixels)/r.origHeight)
if xCoord < 0 || xCoord >= r.origWidthPixels ||
yCoord < 0 || yCoord >= r.origHeightPixels {
return
}
r.pointsToFill = append(r.pointsToFill, (xCoord+yCoord*r.origWidthPixels)*4)
r.colorOfPointsToFill = append(r.colorOfPointsToFill, col)
}
type Line struct {
X1 float32 `xml:"x1,attr"`
Y1 float32 `xml:"y1,attr"`
X2 float32 `xml:"x2,attr"`
Y2 float32 `xml:"y2,attr"`
Fill string `xml:"stroke,attr"`
Transform string `xml:"transform,attr"`
transformMatrix mgl.Mat3
}
func round(x float32) float32 {
return float32(int(x + 0.5))
}
func fpart(x float32) float32 {
return x - float32(int(x))
}
func rfpart(x float32) float32 {
return 1.0 - fpart(x)
}
// Uses a single strain of Xiaolin since it seems to give the best results.
// The two strains makes the colors look odd however revisit this after antialiasing.
// Not sure if the resolution is just too low.
func (r *rasterizer) drawLine(x0, y0, x1, y1 float32, col Color) {
steep := math.Abs(float64(y1-y0)) > math.Abs(float64(x1-x0))
if steep {
x0, y0 = y0, x0
x1, y1 = y1, x1
}
if x0 > x1 {
x0, x1 = x1, x0
y0, y1 = y1, y0
}
dx := x1 - x0
dy := y1 - y0
gradient := dy / dx
if dx == 0.0 {
gradient = 1.0
}
// Handle first endpoint
xend := round(x0)
yend := y0 + gradient*(xend-x0)
xpxl1 := xend // This will be used in the main loop
ypxl1 := float32(int(yend))
if steep {
r.drawPixel(ypxl1, xpxl1, col)
} else {
r.drawPixel(xpxl1, ypxl1, col)
}
intery := yend + gradient // first y-intersection for the main loop
// Handle second endpoint
xend = round(x1)
yend = y1 + gradient*(xend-x1)
xpxl2 := xend // This will be used in the main loop
ypxl2 := float32(int(yend))
if steep {
r.drawPixel(ypxl2, xpxl2, col)
} else {
r.drawPixel(xpxl2, ypxl2, col)
}
// Main loop
if steep {
for x := xpxl1 + 1; x <= xpxl2-1; x++ {
r.drawPixel(intery, x, col)
intery += gradient
}
} else {
for x := xpxl1 + 1; x <= xpxl2-1; x++ {
r.drawPixel(x, intery, col)
intery += gradient
}
}
}
func (s *Line) rasterize(r *rasterizer) {
col := parseColor(s.Fill)
pointsFloat := r.transform([]float32{s.X1, s.Y1, s.X2, s.Y2}, s.transformMatrix, false)
r.drawLine(pointsFloat[0], pointsFloat[1], pointsFloat[2], pointsFloat[3], col)
}
type Polyline struct {
Stroke string `xml:"stroke,attr"`
Points string `xml:"points,attr"`
Transform string `xml:"transform,attr"`
transformMatrix mgl.Mat3
}
func (s *Polyline) rasterize(r *rasterizer) {
col := parseColor(s.Stroke)
pointsFloat := []float32{}
points := strings.Split(strings.Trim(s.Points, " \n\r\t"), " ")
for _, p := range points {
xy := strings.Split(strings.Trim(p, "\n\r\t "), ",")
x, err1 := strconv.ParseFloat(xy[0], 32)
y, err2 := strconv.ParseFloat(xy[1], 32)
if err1 != nil || err2 != nil {
if err1 != nil {
panic(err1)
}
panic(err2)
}
pointsFloat = append(pointsFloat, float32(x), float32(y))
}
pointsFloat = r.transform(pointsFloat, s.transformMatrix, false)
for i := 0; i < len(pointsFloat)/2-1; i++ {
r.drawLine(pointsFloat[i*2], pointsFloat[i*2+1],
pointsFloat[(i+1)*2], pointsFloat[(i+1)*2+1], col)
}
}
type Circle struct {
Cx float32 `xml:"cx,attr"`
Cy float32 `xml:"cy,attr"`
R float32 `xml:"r,attr"`
Fill string `xml:'fill,attr"`
Transform string `xml:"transform,attr"`
transformMatrix mgl.Mat3
}
func (s *Circle) rasterize(r *rasterizer) {
pointsFloat := r.transform([]float32{s.Cx, s.Cy}, s.transformMatrix, true)
cx := pointsFloat[0]
cy := pointsFloat[1]
radius := s.R * float32(r.sampleRate) * r.scale
col := parseColor(s.Fill)
minX, maxX := cx-radius, cx+radius
minY, maxY := cy-radius, cy+radius
for x := float32(int(minX)); x <= maxX; x++ {
for y := float32(int(minY)); y <= maxY; y++ {
dx, dy := cx-x, cy-y
if float32(math.Sqrt(float64(dx*dx+dy*dy))) <= radius {
r.drawPoint(x, y, col)
}
}
}
}
type Polygon struct {
Fill string `xml:"fill,attr"`
Stroke string `xml:"stroke,attr"`
Points string `xml:"points,attr"`
FillOpacity float32 `xml:"fill-opacity,attr"`
StrokeOpacity float32 `xml:"stroke-opacity,attr"`
Transform string `xml:"transform,attr"`
transformMatrix mgl.Mat3
}
func parseTransform(trans string) mgl.Mat3 {
if strings.Contains(trans, "matrix") { // Matrix transformation case
trans = strings.TrimPrefix(trans, "matrix(")
trans = strings.Trim(trans, " )\n\t\r")
points := []float32{}
for _, s := range strings.Split(trans, ",") {
x, err := strconv.ParseFloat(s, 32)
if err != nil {
panic(err)
}
points = append(points, float32(x))
}
mat := mgl.Ident3()
mat[0], mat[1] = points[0], points[1]
mat[3], mat[4] = points[2], points[3]
mat[6], mat[7] = points[4], points[5]
return mat
} else if strings.Contains(trans, "translate") {
trans = strings.TrimPrefix(trans, "translate(")
trans = strings.Trim(trans, " )\n\t\r")
split := strings.Split(trans, " ")
x, err := strconv.ParseFloat(split[0], 32)
if err != nil {
panic(err)
}
y, err := strconv.ParseFloat(split[1], 32)
if err != nil {
panic(err)
}
return mgl.Translate2D(float32(x), float32(y))
} else if strings.Contains(trans, "scale(") {
trans = strings.TrimPrefix(trans, "scale(")
trans = strings.Trim(trans, " )\n\t\r")
split := strings.Split(trans, " ")
x, err := strconv.ParseFloat(split[0], 32)
if err != nil {
panic(err)
}
y, err := strconv.ParseFloat(split[1], 32)
if err != nil {
panic(err)
}
return mgl.Scale2D(float32(x), float32(y))
}
return mgl.Ident3()
}
func (r *rasterizer) transform(points []float32, trans mgl.Mat3, isAliased bool) []float32 {
for i := 0; i < len(points); i += 2 {
xyz := mgl.Vec3{points[i], points[i+1], 1.0}
transformed := trans.Mul3x1(xyz)
sampleRate := float32(r.sampleRate)
if !isAliased {
sampleRate = 1.0
}
points[i] = transformed[0] * r.scale * sampleRate
points[i+1] = transformed[1] * r.scale * sampleRate
}
return points
}
func (r *rasterizer) pointsToTriangles(in string,
transformation mgl.Mat3) ([]*triangulate.Triangle, []float32) {
points := strings.Split(strings.Trim(in, " "), " ")
pointsFloat := []float32{}
for _, p := range points {
xy := strings.Split(strings.Trim(p, "\n\r\t "), ",")
x, err1 := strconv.ParseFloat(xy[0], 32)
y, err2 := strconv.ParseFloat(xy[1], 32)
if err1 != nil || err2 != nil {
if err1 != nil {
panic(err1)
}
panic(err2)
}
pointsFloat = append(pointsFloat, float32(x), float32(y))
}
pointsFloat = r.transform(pointsFloat, transformation, true)
triangles := triangulate.Triangulate(pointsFloat)
for _, t := range triangles {
// Sort triangle such that y1 < y2 < y3
if t.Y1 > t.Y3 {
t.X1, t.Y1, t.X3, t.Y3 = t.X3, t.Y3, t.X1, t.Y1
}
if t.Y1 > t.Y2 {
t.X1, t.Y1, t.X2, t.Y2 = t.X2, t.Y2, t.X1, t.Y1
}
if t.Y2 > t.Y3 {
t.X2, t.Y2, t.X3, t.Y3 = t.X3, t.Y3, t.X2, t.Y2
}
}
return triangles, pointsFloat
}
func (s *Polygon) rasterize(r *rasterizer) {
s.boundingBoxApproach(r)
}
func (s *Polygon) boundingBoxApproach(r *rasterizer) {
triangles, points := r.pointsToTriangles(s.Points, s.transformMatrix)
// Draw each triangle
col := parseColor(s.Fill)
col.a = s.FillOpacity
if col.a == 0.0 { // Handle missing opacity provided.
col.a = 1.0
}
for _, t := range triangles {
minX := minOfThree(t.X1, t.X2, t.X3)
maxX := maxOfThree(t.X1, t.X2, t.X3)
minY := minOfThree(t.Y1, t.Y2, t.Y3)
maxY := maxOfThree(t.Y1, t.Y2, t.Y3)
vsX1, vsY1 := t.X2-t.X1, t.Y2-t.Y1
vsX2, vsY2 := t.X3-t.X1, t.Y3-t.Y1
for x := float32(int(minX)); x <= maxX; x++ {
for y := float32(int(minY)); y <= maxY; y++ {
//for x := float32(minX); x <= maxX; x++ {
// for y := float32(minY); y <= maxY; y++ {
qx, qy := x-t.X1, y-t.Y1
s := crossProduct(qx, qy, vsX2, vsY2) / crossProduct(vsX1, vsY1, vsX2, vsY2)
t := crossProduct(vsX1, vsY1, qx, qy) / crossProduct(vsX1, vsY1, vsX2, vsY2)
if s >= 0 && t >= 0 && s+t <= 1 {
r.drawPoint(x, y, col)
}
}
}
}
// Draw the outline if it exists.
if s.Stroke == "" {
return
}
outlineCol := parseColor(s.Stroke)
outlineCol.a = s.StrokeOpacity
if outlineCol.a == 0.0 {
outlineCol.a = 1.0
}
for i := 0; i < len(points); i += 2 {
p1X, p1Y := points[i], points[i+1]
p2X, p2Y := points[(i+2)%len(points)], points[(i+3)%len(points)]
r.drawLine(p1X/float32(r.sampleRate), p1Y/float32(r.sampleRate),
p2X/float32(r.sampleRate), p2Y/float32(r.sampleRate), outlineCol)
}
}
type Image struct {
X int `xml:"x,attr"`
Y int `xml:"y,attr"`
Width int `xml:"width,attr"`
Height int `xml:"height,attr"`
Href string `xml:"href,attr"` // Assume all images of base64 png encoded
mipMaps []mip
Transform string `xml:"transform,attr"`
transformMatrix mgl.Mat3
//imageSizeX int // Width of image loaded
//imageSizeY int // Height of image laoded
}
type mip struct {
w int
h int
data []byte
}
func (m *mip) At(x, y int) Color {
if x < 0 {
x = 0
}
if y < 0 {
y = 0
}
if x >= m.w {
x = m.w - 1
}
if y >= m.h {
y = m.h - 1
}
i := (x + y*m.w) * 4
return Color{float32(m.data[i]) / 0xFF,
float32(m.data[i+1]) / 0xFF,
float32(m.data[i+2]) / 0xFF,
float32(m.data[i+3]) / 0xFF}
}
// Must be a power of two image
func generateMipMaps(img image.Image) []mip {
bounds := img.Bounds()
w := bounds.Max.X - bounds.Min.X
h := bounds.Max.Y - bounds.Min.Y
// Get original mip.
mips := []mip{mip{w, h, make([]byte, w*h*4)}}
for x := 0; x < w; x++ {
for y := 0; y < h; y++ {
r, g, b, a := img.At(x, y).RGBA()
i := (x + y*w) * 4
mips[0].data[i] = byte(float32(r) / 0xFFFF * 0xFF)
mips[0].data[i+1] = byte(float32(g) / 0xFFFF * 0xFF)
mips[0].data[i+2] = byte(float32(b) / 0xFFFF * 0xFF)
mips[0].data[i+3] = byte(float32(a) / 0xFFFF * 0xFF)
}
}
for w > 1 && h > 1 {
buff := downSampleBuffer(mips[len(mips)-1].data, 2, w, h)
w /= 2
h /= 2
mips = append(mips, mip{w, h, buff})
}
return mips
}
func (s *Image) rasterize(r *rasterizer) {
for x := int(float32(s.X) * r.scale); x < int(float32(s.X+s.Width)*r.scale); x++ {
for y := int(float32(s.Y) * r.scale); y < int(float32(s.Y+s.Height)*r.scale); y++ {
//col := s.sampleNearest(s.mipMaps[0], float32(x), float32(y))
col := s.sampleBilinear(s.mipMaps[0], float32(x)/r.scale, float32(y)/r.scale)
r.drawPixel(float32(x), float32(y), col)
}
}
}
func (s *Image) sampleNearest(img mip, x, y float32) Color {
x -= float32(s.X) + 0.5
y -= float32(s.Y) + 0.5
x = x / float32(s.Width) * float32(img.w)
y = y / float32(s.Height) * float32(img.h)
return img.At(int(x), int(y))
}
func blendColor(c0, c1 Color, amount float32) Color {
return Color{blend(c0.r, c1.r, amount), blend(c0.g, c1.g, amount),
blend(c0.b, c1.b, amount), blend(c0.a, c1.a, amount)}
}
func blend(x0, x1, amount float32) float32 {
return x0*amount + x1*(1-amount)
}
func (s *Image) sampleBilinear(img mip, x, y float32) Color {
x = x - float32(s.X) + 0.5
y = y - float32(s.Y) + 0.5
x = x / float32(s.Width) * float32(img.w)
y = y / float32(s.Height) * float32(img.h)
tt := x - float32(int(x+0.5)) + 0.5
st := y - float32(int(y+0.5)) + 0.5
f00 := img.At(int(x-0.5), int(y+0.5))
f01 := img.At(int(x-0.5), int(y-0.5))
f10 := img.At(int(x+0.5), int(y+0.5))
f11 := img.At(int(x+0.5), int(y-0.5))
c0 := blendColor(f00, f10, tt)
c1 := blendColor(f01, f11, tt)
c := blendColor(c0, c1, st)
return c
}
func New(canvas js.Value, filePath string) (*rasterizer, error) {
r := &rasterizer{}
r.canvas = canvas
r.scale = 1.0
b, err := board.New(r.canvas)
if err != nil {
panic(err)
}
r.board = b
r.SetSvg(filePath)
b.EnablePixelInspector(true)
return r, nil
}
func (r *rasterizer) SetSvg(filePath string) error {
// Get xml file and parse it.
fileString := getFile(filePath)
buf := bytes.NewBuffer([]byte(fileString))
dec := xml.NewDecoder(buf)
var svg Svg
if err := dec.Decode(&svg); err != nil {
return err
}
r.svg = &svg
// Calculate drawing info.
width, _ := strconv.ParseFloat(strings.Split(svg.Width, "px")[0], 64)
height, _ := strconv.ParseFloat(strings.Split(svg.Height, "px")[0], 64)
if svg.ViewBox != "" { // Does not have a viewbox
viewBox := strings.Split(svg.ViewBox, " ")
widthPixels, _ := strconv.ParseFloat(viewBox[2], 64)
heightPixels, _ := strconv.ParseFloat(viewBox[3], 64)
r.widthPixels = int(widthPixels)
r.heightPixels = int(heightPixels)
} else {
r.widthPixels, r.heightPixels = int(width), int(height)
}
r.width = float32(width)
r.height = float32(height)
// Update board.
r.board.SetWidthHeight(r.widthPixels, r.heightPixels)
r.board.ResetView()
r.canvas.Set("width", r.widthPixels)
r.canvas.Set("height", r.heightPixels)
// Calculate mip maps for all images.
loadImagesAndCreateMipMaps(r.svg)
r.sampleRate = 1
r.Draw()
r.unscaledWidth = r.width
r.unscaledHeight = r.height
r.unscaledWidthPixels = r.widthPixels
r.unscaledHeightPixels = r.heightPixels
return nil
}
func (r *rasterizer) SetTargetScale(scale float32) {
fmt.Println(scale)
r.scale = scale
r.widthPixels = int(float32(r.unscaledWidthPixels) * scale)
r.heightPixels = int(float32(r.unscaledHeightPixels) * scale)
r.width = r.unscaledWidth * scale
r.height = r.unscaledHeight * scale
r.board.SetWidthHeight(r.widthPixels, r.heightPixels)
r.Draw()
}
func loadImagesAndCreateMipMaps(curSvg *Svg) {
for _, imgSvg := range curSvg.Images {
// Load the image.
baseImage := strings.Split(imgSvg.Href, ",")[1] // Only works for data:image/png;base64,...
decoded, err := base64.StdEncoding.DecodeString(baseImage)
if err != nil { // Remove this.
panic(err)
}
reader := bytes.NewReader(decoded)
img, err := png.Decode(reader)
if err != nil {
panic(err)
}
imgSvg.mipMaps = generateMipMaps(img)
}
for _, g := range curSvg.Groups {
loadImagesAndCreateMipMaps(g)
}
}
func downSampleBuffer(from []byte, sampleRate int, w, h int) []byte {
targetW := w / sampleRate
targetH := h / sampleRate
target := make([]byte, targetW*targetH*4)
scaleFactor := byte(sampleRate * sampleRate)
for x := 0; x < w; x++ {
for y := 0; y < h; y++ {
i := (x/sampleRate + y/sampleRate*targetW) * 4
j := (x + y*w) * 4
target[i] += from[j] / scaleFactor
target[i+1] += from[j+1] / scaleFactor
target[i+2] += from[j+2] / scaleFactor
target[i+3] += from[j+3] / scaleFactor
}
}
return target
}
func (r *rasterizer) Draw() {
r.origWidthPixels, r.origHeightPixels = r.widthPixels, r.heightPixels
r.origWidth, r.origHeight = r.width, r.height
r.pointsToFill = []int{}
r.colorOfPointsToFill = []Color{}
r.widthPixels *= r.sampleRate
r.heightPixels *= r.sampleRate
r.width *= float32(r.sampleRate)
r.height *= float32(r.sampleRate)
r.pixels = make([]byte, 4*r.widthPixels*r.heightPixels)
for i := 0; i < len(r.pixels); i++ {
r.pixels[i] = 255
}
r.svg.transformMatrix = parseTransform(r.svg.Transform) // Can an SVG element have a transform??
r.svg.rasterize(r)
if r.sampleRate > 1 { // Anti aliasing
r.pixels = downSampleBuffer(r.pixels, r.sampleRate, r.widthPixels, r.heightPixels)
}
// Fill points/lines that we aren't antialiaisng on.
for i, point := range r.pointsToFill {
red := r.pixels[point]
g := r.pixels[point+1]
b := r.pixels[point+2]
a := r.pixels[point+3]
red, g, b, a = blendColors(r.colorOfPointsToFill[i], red, g, b, a)
r.pixels[point] = red
r.pixels[point+1] = g
r.pixels[point+2] = b
r.pixels[point+3] = a
}
r.board.SetPixels(r.pixels)
r.widthPixels, r.heightPixels = r.origWidthPixels, r.origHeightPixels
r.width, r.height = r.origWidth, r.origHeight
}
func (s *Svg) rasterize(r *rasterizer) {
for _, rect := range s.Rects {
rect.transformMatrix = parseTransform(rect.Transform)
rect.transformMatrix = s.transformMatrix.Mul3(rect.transformMatrix)
rect.rasterize(r)
}
for _, polyline := range s.Polylines {
polyline.transformMatrix = parseTransform(polyline.Transform)
polyline.transformMatrix = s.transformMatrix.Mul3(polyline.transformMatrix)
polyline.rasterize(r)
}
for _, line := range s.Lines {
line.transformMatrix = parseTransform(line.Transform)
line.transformMatrix = s.transformMatrix.Mul3(line.transformMatrix)
line.rasterize(r)
}
for _, circle := range s.Circles {
circle.transformMatrix = parseTransform(circle.Transform)
circle.transformMatrix = s.transformMatrix.Mul3(circle.transformMatrix)
circle.rasterize(r)
}
for _, polygon := range s.Polygons {
polygon.transformMatrix = parseTransform(polygon.Transform)
polygon.transformMatrix = s.transformMatrix.Mul3(polygon.transformMatrix)
polygon.rasterize(r)
}
for _, group := range s.Groups {
group.transformMatrix = parseTransform(group.Transform)
group.transformMatrix = s.transformMatrix.Mul3(group.transformMatrix)
group.rasterize(r)
}
for _, image := range s.Images {
image.transformMatrix = parseTransform(image.Transform)
image.transformMatrix = s.transformMatrix.Mul3(image.transformMatrix)
image.rasterize(r)
}
}
func getUrl(filePath string) string {
loc := js.Global().Get("location")
url := loc.Get("protocol").String() + "//" +
loc.Get("hostname").String() + ":" +
loc.Get("port").String()
return url + filePath
}
func getFile(url string) string {
resp, err := http.Get(url)
if err != nil {
panic(err)
}
b, err := ioutil.ReadAll(resp.Body)
if err != nil {
panic(err)
}
s := string(b)
return strings.ReplaceAll(s, "\r", "")
}
type testType struct {
X int
Y bool
Z float32
W string
Fun func()
}
func addSvgToGUI(gui *datGUI.GUI, path string, r *rasterizer, onSvgLoad func()) {
obj := testType{Fun: func() {
go func() {
r.SetSvg(path)
onSvgLoad()
}()
}}
split := strings.Split(path, "/")
name := strings.TrimSuffix(split[len(split)-1], ".svg")
funController := gui.Add(&obj, "Fun").Name(name)
svgIcon := js.Global().Get("document").Call("createElement", "img")
svgIcon.Set("background-color", "white")
svgIcon.Get("style").Set("background-color", "white")
svgIcon.Get("style").Set("float", "right")
height := 75
svgIcon.Set("src", path)
svgIcon.Set("height", height)
funController.JSController.Get("__li").Get("style").Set("height", height)
funController.JSController.Get("domElement").Get("parentElement").Call("appendChild", svgIcon)
}
func createSvgFolders(gui *datGUI.GUI, r *rasterizer, onSvgLoad func()) {
style := js.Global().Get("document").Call("createElement", "style")
style.Set("innerHTML", `
ul.closed > :not(li.title) {
display: none;
}`)
js.Global().Get("document").Get("head").Call("appendChild", style)
folderNames := []string{"basic", "alpha", "illustration", "hardcore"}
svgFiles := [][]string{
[]string{"test1", "test2", "test3", "test4", "test5", "test6", "test7"},
[]string{"01_prism", "02_cube", "03_buckyball", "04_scotty", "05_sphere"},
[]string{"01_sketchpad", "02_hexes", "03_circle", "04_sun", "05_lion",
"06_sphere", "07_lines", "08_monkeytree", "09_kochcurve"},
[]string{"01_degenerate_square1", "02_degenerate_square2"},
}
svgImagesGUI := gui.AddFolder("svg images")
svgImagesGUI.Open()
for i, folder := range folderNames {
folderGUI := svgImagesGUI.AddFolder(folder)
//if folder == "alpha" {
if folder == "illustration" {
folderGUI.Open()
}
for _, svgFile := range svgFiles[i] {
addSvgToGUI(folderGUI, getUrl("/svg/"+folder+"/"+svgFile+".svg"), r, onSvgLoad)
}
}
}
type guiValues struct {
SuperSampleRate int
TargetScale float32
CanvasScale float32
WidthHeightPixelInspect int
PixelInspectorOn bool
PixelInspectorScale float32
}
func createGui(r *rasterizer) {
gui := datGUI.New()
gui.JSGUI.Set("width", 300)
guiVals := guiValues{
SuperSampleRate: 1,
TargetScale: 100,
CanvasScale: 100,
PixelInspectorOn: true,
PixelInspectorScale: 30,
WidthHeightPixelInspect: 25,
}
// Pixel inspector GUI
pixelGui := gui.AddFolder("Pixel inspector")
pixelGui.Open()
pixelGui.Add(&guiVals, "PixelInspectorOn").Name("Inspector on?").OnChange(func() {
r.board.EnablePixelInspector(guiVals.PixelInspectorOn)
})
pixelGui.Add(&guiVals, "PixelInspectorScale").Min(5).Max(
80).Name("Inspector size").OnChange(func() {
r.board.SetInspectorSize(guiVals.PixelInspectorScale / 100.0)
})
pixelGui.Add(&guiVals, "WidthHeightPixelInspect").Min(1).Max(
100).Name("Width Height (px)").OnChange(func() {
r.board.SetWidthHeightPixelInspector(guiVals.WidthHeightPixelInspect)
})
// Rasterizer GUI
rasterizerGui := gui.AddFolder("Rasterizer settings")
rasterizerGui.Open()
rasterizerGui.Add(&guiVals,
"SuperSampleRate").Min(1).Max(8).Name("Super sample rate").OnChange(func() {
if r.sampleRate == guiVals.SuperSampleRate {
return
}
r.sampleRate = guiVals.SuperSampleRate
r.Draw()
})
setCanvasScale := func() {
scaleVal := (guiVals.CanvasScale / 100.0) * (guiVals.TargetScale / 100.0)
r.canvas.Set("width", scaleVal*float32(r.unscaledWidthPixels))
r.canvas.Set("height", scaleVal*float32(r.unscaledHeightPixels))
r.board.Draw()
}
targetScaleController := rasterizerGui.Add(&guiVals,
"TargetScale").Min(1).Max(200).Step(
0.1).Name("Target scale %").OnChange(func() {
r.SetTargetScale(guiVals.TargetScale / 100.0)
setCanvasScale()
})
canvasScaleController := rasterizerGui.Add(&guiVals,
"CanvasScale").Min(1).Max(500).Step(
0.1).Name("Canvas scale %").OnChange(func() {
setCanvasScale()
})
onSvgLoad := func() {
canvasScaleController.SetValue(100)
targetScaleController.SetValue(100)
}
// SVG options GUI
createSvgFolders(gui, r, onSvgLoad)
}
func main() {
document := js.Global().Get("document")
canvas := document.Call("getElementById", "webgl")
canvas.Get("style").Set("border-style", "solid")
//r, err := New(canvas, "/svg/basic/test1.svg")
//r, err := New(canvas, "/svg/basic/test2.svg")
//r, err := New(canvas, "/svg/basic/test3.svg")
//r, err := New(canvas, "/svg/basic/test4.svg")
//r, err := New(canvas, "/svg/basic/test5.svg")
//r, err := New(canvas, "/svg/basic/test6.svg")
//r, err := New(canvas, "/svg/basic/test7.svg")
//r, err := New(canvas, "/svg/alpha/01_prism.svg")
//r, err := New(canvas, "/svg/alpha/02_cube.svg")
//r, err := New(canvas, "/svg/alpha/03_buckyball.svg")
//r, err := New(canvas, "/svg/alpha/04_scotty.svg")
//r, err := New(canvas, "/svg/alpha/05_sphere.svg")
//r, err := New(canvas, getUrl("/svg/illustration/01_sketchpad.svg"))
//r, err := New(canvas, "/svg/illustration/02_hexes.svg")
//r, err := New(canvas, "/svg/illustration/03_circle.svg")
//r, err := New(canvas, "/svg/illustration/04_sun.svg")
r, err := New(canvas, "/svg/illustration/05_lion.svg")
//r, err := New(canvas, "/svg/illustration/06_sphere.svg")
//r, err := New(canvas, "/svg/illustration/07_lines.svg")
//r, err := New(canvas, "/svg/illustration/08_monkeytree.svg")
//r, err := New(canvas, "/svg/illustration/09_kochcurve.svg")
//r, err := New(canvas, "/svg/hardcore/01_degenerate_square1.svg")
//r, err := New(canvas, "/svg/hardcore/02_degenerate_square2.svg")
//r.SetSvg("/svg/illustration/01_sketchpad.svg")
createGui(r)
if err != nil {
panic(err)
}
_ = r
/*
canvas.Set("height", 900)
canvas.Set("width", 900)
*/
fmt.Println("starting", rand.Int31n(256))
<-make(chan bool) // Prevent program from exiting
}
|
#!/bin/bash
set -euo pipefail
IFS=$'\n\t'
for file in anime/anime/episodes/*; do
if [[ -n $(cat $file) ]]; then
echo $file
fi
done
|
CREATE TABLE Coordenador
(
id_coordenador BIGINT PRIMARY KEY IDENTITY(1,1),
nome NVARCHAR(60) NOT NULL,
email NVARCHAR(60) NOT NULL,
telefone BIGINT
) |
set -e
set -x
basedir=bench_tables_trees
resdir=$basedir/results
mkdir -p $resdir
for f in $basedir/*.biom
do
bench=${basedir}/$(basename $f .biom)
res=${resdir}/$(basename $f .biom)
for method in {unweighted,weighted_normalized,weighted_unnormalized}
do
/usr/bin/time -l ./su ${bench}.tre ${bench}.biom $method > ${res}.${method}.su.dm 2> ${res}.${method}.su.stats
/usr/bin/time -l ./sk ${bench}.tre ${bench}.biom $method > ${res}.${method}.sk.dm 2> ${res}.${method}.sk.stats
python compare_dms.py ${res}.${method}.sk.dm ${res}.${method}.su.dm
done
done
|
<reponame>urjc-cloudapps-fjvela/p3-mastermind<gh_stars>0
package mastermind.server.dispatchers;
import mastermind.controllers.GameController;
public class GetBlacksDispatcher extends Dispatcher {
public GetBlacksDispatcher(GameController gameController) {
super(gameController);
}
@Override
public void dispatch() {
int position = tcpip.receiveInt();
int blacks = ((GameController) this.acceptorController).getBlacks(position);
tcpip.send(blacks);
}
}
|
#define __USE_GNU
#include "Etterna/Globals/global.h"
#include <cstdio>
#include <cstring>
#include <cerrno>
#include <sys/types.h>
#include <sys/wait.h>
#include <sys/select.h>
#include "Backtrace.h"
#include "BacktraceNames.h"
#include "RageUtil/Utils/RageUtil.h"
#include "CrashHandler.h"
#include "CrashHandlerInternal.h"
#include "RageUtil/Misc/RageLog.h" /* for RageLog::GetAdditionalLog, etc. only */
#include "Etterna/Globals/ProductInfo.h"
#include "arch/ArchHooks/ArchHooks.h"
#ifdef __APPLE__
#include "archutils/Darwin/Crash.h"
#endif
#include "ver.h"
bool
child_read(int fd, void* p, int size);
const char* g_pCrashHandlerArgv0 = NULL;
static void
output_stack_trace(FILE* out, const void** BacktracePointers)
{
if (BacktracePointers[0] == BACKTRACE_METHOD_NOT_AVAILABLE) {
fprintf(out, "No backtrace method available.\n");
return;
}
if (!BacktracePointers[0]) {
fprintf(out, "Backtrace was empty.\n");
return;
}
for (int i = 0; BacktracePointers[i]; ++i) {
BacktraceNames bn;
bn.FromAddr(const_cast<void*>(BacktracePointers[i]));
bn.Demangle();
/* Don't show the main module name. */
if (bn.File == g_pCrashHandlerArgv0 && !bn.Symbol.empty())
bn.File = "";
if (bn.Symbol == "__libc_start_main")
break;
fprintf(out, "%s\n", bn.Format().c_str());
}
}
bool
child_read(int fd, void* p, int size)
{
char* buf = (char*)p;
int got = 0;
while (got < size) {
int ret = read(fd, buf + got, size - got);
if (ret == -1) {
if (errno == EINTR)
continue;
fprintf(stderr,
"Crash handler: error communicating with parent: %s\n",
strerror(errno));
return false;
}
if (ret == 0) {
fprintf(stderr, "Crash handler: EOF communicating with parent.\n");
return false;
}
got += ret;
}
return true;
}
/* Once we get here, we should be * safe to do whatever we want;
* heavyweights like malloc and RString are OK. (Don't crash!) */
static void
child_process()
{
/* 1. Read the CrashData. */
CrashData crash;
if (!child_read(3, &crash, sizeof(CrashData)))
return;
/* 2. Read info. */
int size;
if (!child_read(3, &size, sizeof(size)))
return;
char* Info = new char[size];
if (!child_read(3, Info, size))
return;
/* 3. Read AdditionalLog. */
if (!child_read(3, &size, sizeof(size)))
return;
char* AdditionalLog = new char[size];
if (!child_read(3, AdditionalLog, size))
return;
/* 4. Read RecentLogs. */
int cnt = 0;
if (!child_read(3, &cnt, sizeof(cnt)))
return;
char* Recent[1024];
for (int i = 0; i < cnt; ++i) {
if (!child_read(3, &size, sizeof(size)))
return;
Recent[i] = new char[size];
if (!child_read(3, Recent[i], size))
return;
}
/* 5. Read CHECKPOINTs. */
if (!child_read(3, &size, sizeof(size)))
return;
char* temp = new char[size];
if (!child_read(3, temp, size))
return;
vector<RString> Checkpoints;
split(temp, "$$", Checkpoints);
delete[] temp;
/* 6. Read the crashed thread's name. */
if (!child_read(3, &size, sizeof(size)))
return;
temp = new char[size];
if (!child_read(3, temp, size))
return;
const RString CrashedThread(temp);
delete[] temp;
/* Wait for the child to either finish cleaning up or die. */
fd_set rs;
struct timeval timeout = { 5, 0 }; // 5 seconds
FD_ZERO(&rs);
FD_SET(3, &rs);
int ret = select(4, &rs, NULL, NULL, &timeout);
if (ret == 0) {
fputs("Timeout exceeded.\n", stderr);
} else if ((ret == -1 && errno != EPIPE) || ret != 1) {
fprintf(stderr,
"Unexpected return from select() result: %d (%s)\n",
ret,
strerror(errno));
// Keep going.
} else {
char x;
// No need to check FD_ISSET( 3, &rs ) because it was the only
// descriptor in the set.
ret = read(3, &x, sizeof(x));
if (ret > 0) {
fprintf(stderr, "Unexpected child read() result: %i\n", ret);
/* keep going */
} else if ((ret == -1 && errno != EPIPE) || ret != 0) {
/* We expect an EOF or EPIPE. What happened? */
fprintf(stderr,
"Unexpected child read() result: %i (%s)\n",
ret,
strerror(errno));
/* keep going */
}
}
RString sCrashInfoPath = "/tmp";
#ifdef __APPLE__
sCrashInfoPath = CrashHandler::GetLogsDirectory();
#else
const char* home = getenv("HOME");
if (home)
sCrashInfoPath = home;
#endif
sCrashInfoPath += "/crashinfo.txt";
FILE* CrashDump = fopen(sCrashInfoPath, "w+");
if (CrashDump == NULL) {
fprintf(stderr,
"Couldn't open " + sCrashInfoPath + ": %s\n",
strerror(errno));
exit(1);
}
fprintf(CrashDump, "%s%s crash report", PRODUCT_FAMILY, product_version);
fprintf(CrashDump, " (build %s)", ::version_git_hash);
fprintf(CrashDump, "\n");
fprintf(CrashDump, "--------------------------------------\n");
fprintf(CrashDump, "\n");
RString reason;
switch (crash.type) {
case CrashData::SIGNAL: {
reason = ssprintf("%s - %s",
SignalName(crash.signal),
SignalCodeName(crash.signal, crash.si.si_code));
/* Linux puts the PID that sent the signal in si_addr for SI_USER.
*/
if (crash.si.si_code == SI_USER) {
reason += ssprintf(" from pid %li", (long)crash.si.si_addr);
} else {
switch (crash.signal) {
case SIGILL:
case SIGFPE:
case SIGSEGV:
case SIGBUS:
reason += ssprintf(" at 0x%0*lx",
int(sizeof(void*) * 2),
(unsigned long)crash.si.si_addr);
}
break;
}
}
case CrashData::FORCE_CRASH:
crash.reason[sizeof(crash.reason) - 1] = 0;
reason = crash.reason;
break;
}
fprintf(CrashDump, "Architecture: %s\n", HOOKS->GetArchName().c_str());
fprintf(CrashDump, "Crash reason: %s\n", reason.c_str());
fprintf(CrashDump, "Crashed thread: %s\n\n", CrashedThread.c_str());
fprintf(CrashDump, "Checkpoints:\n");
for (unsigned i = 0; i < Checkpoints.size(); ++i)
fputs(Checkpoints[i], CrashDump);
fprintf(CrashDump, "\n");
for (int i = 0; i < CrashData::MAX_BACKTRACE_THREADS; ++i) {
if (!crash.BacktracePointers[i][0])
break;
fprintf(CrashDump, "Thread: %s\n", crash.m_ThreadName[i]);
output_stack_trace(CrashDump, crash.BacktracePointers[i]);
fprintf(CrashDump, "\n");
}
fprintf(CrashDump, "Static log:\n");
fprintf(CrashDump, "%s", Info);
fprintf(CrashDump, "%s", AdditionalLog);
fprintf(CrashDump, "\nPartial log:\n");
for (int i = 0; i < cnt; ++i)
fprintf(CrashDump, "%s\n", Recent[i]);
fprintf(CrashDump, "\n");
fprintf(CrashDump, "-- End of report\n");
fclose(CrashDump);
#ifdef __APPLE__
CrashHandler::InformUserOfCrash(sCrashInfoPath);
#else
/* stdout may have been inadvertently closed by the crash in the parent;
* write to /dev/tty instead. */
FILE* tty = fopen("/dev/tty", "w");
if (tty == NULL)
tty = stderr;
fputs("\n" PRODUCT_ID
" has crashed. Debug information has been output to\n"
"\n"
" " +
sCrashInfoPath +
"\n"
"\n"
"Please report a bug at:\n"
"\n"
" " REPORT_BUG_URL "\n"
"\n",
tty);
#endif
}
void
CrashHandler::CrashHandlerHandleArgs(int argc, char* argv[])
{
g_pCrashHandlerArgv0 = argv[0];
if (argc == 2 && !strcmp(argv[1], CHILD_MAGIC_PARAMETER)) {
child_process();
exit(0);
}
}
|
revwords = ' '.join(astring.split()[::-1])
|
################################################
for i in {1..1000}
do
echo "Test # $i:\n"
make clean; make || exit 1 ;
done
|
#!/bin/bash
#restart commands
function restartNode(){
echo "检测到卡顿超时!重启节点!" #可替换为各种告警脚本命令
phala stop node #停止节点命令,取决于用户部署环境
phala start #启动节点命令,取决于用户部署环境
}
#update commands
function updateNode() {
echo "重启多次无效!更新节点!" #可替换为各种告警脚本命令
phala stop node #停止节点命令,取决于用户部署环境
docker image rm phalanetwork/khala-node #移除旧 node 镜像
docker pull phalanetwork/khala-node #拉取新 node 镜像
phala start #启动节点命令,取决于用户部署环境
}
#check if synced
function isSynced(){
if [ -z $1 ]; then
echo "未启动"
elif [ -n $1 -o $1 = "false" ]; then
echo "\E[1;32m已同步\E[0m"
else
echo "同步中"
fi
}
#need sudo
if [ $(id -u) -ne 0 ]; then
echo "请使用sudo运行!"
exit 1
fi
#need jq
if ! type jq > /dev/null; then
apt-get install -y jq
fi
#var
node_ip="127.0.0.1"
khala_block_last_check=0
kusama_block_last_check=0
node_stuck_count=0
restart_count=0
#reads var
read -p "检测区块未增加几分钟后重启? (直接回车默认5分)" stuck_times
if [ -z $stuck_times ]; then stuck_times=5; fi
read -p "重启几次后未解决,更新节点? (直接回车默认3次)" restart_times
if [ -z $restart_times ]; then restart_times=3; fi
while true; do
#get_node_version
node_system_version=$(curl -sH "Content-Type: application/json" -d '{"id":1, "jsonrpc":"2.0", "method": "system_version", "params":[]}' http://${node_ip}:9933 | jq '.result' | tr -d '"' | cut -d'-' -f1)
if [ -z $node_system_version ]; then node_system_version="节点未响应"; fi
#get_khala_info
node_khala_system_health=$(curl -sH "Content-Type: application/json" -d '{"id":1, "jsonrpc":"2.0", "method": "system_health", "params":[]}' http://${node_ip}:9933 | jq '.result')
node_khala_system_health_isSyncing=$(echo $node_khala_system_health | jq '.isSyncing')
node_khala_system_health_peers=$(echo $node_khala_system_health | jq '.peers')
node_khala_system_syncState=$(curl -sH "Content-Type: application/json" -d '{"id":1, "jsonrpc":"2.0", "method": "system_syncState", "params":[]}' http://${node_ip}:9933 | jq '.result')
node_khala_system_syncState_currentBlock=$(echo $node_khala_system_syncState | jq '.currentBlock')
node_khala_system_syncState_highestBlock=$(echo $node_khala_system_syncState | jq '.highestBlock')
node_khala_synced=$(isSynced $node_khala_system_health_isSyncing)
#get_kusama_info
node_kusama_system_health=$(curl -sH "Content-Type: application/json" -d '{"id":1, "jsonrpc":"2.0", "method": "system_health", "params":[]}' http://${node_ip}:9934 | jq '.result')
node_kusama_system_health_isSyncing=$(echo $node_kusama_system_health | jq '.isSyncing')
node_kusama_system_health_peers=$(echo $node_kusama_system_health | jq '.peers')
node_kusama_system_syncState=$(curl -sH "Content-Type: application/json" -d '{"id":1, "jsonrpc":"2.0", "method": "system_syncState", "params":[]}' http://${node_ip}:9934 | jq '.result')
node_kusama_system_syncState_currentBlock=$(echo $node_kusama_system_syncState | jq '.currentBlock')
node_kusama_system_syncState_highestBlock=$(echo $node_kusama_system_syncState | jq '.highestBlock')
node_kusama_synced=$(isSynced $node_kusama_system_health_isSyncing)
#get node ip length
node_ip_length=${#node_ip}
hyphen=""
for i in `seq 0 $node_ip_length`; do hyphen="-$hyphen"; done
#print info
printf "
--$hyphen--
$node_ip |
----------------------------------------------------------------------
节点版本 | khala节点 | 当前高度 | 最高高度 | 对等点数量 |
----------------------------------------------------------------------
%-8s | $node_khala_synced | %-10s | %-10s | %-10s |
----------------------------------------------------------------------
| ksm节点 | 当前高度 | 最高高度 | 对等点数量 |
----------------------------------------------------------------------
| $node_kusama_synced | %-10s | %-10s | %-10s |
----------------------------------------------------------------------" $node_system_version $node_khala_system_syncState_currentBlock $node_khala_system_syncState_highestBlock $node_khala_system_health_peers $node_kusama_system_syncState_currentBlock $node_kusama_system_syncState_highestBlock $node_kusama_system_health_peers
#if getting info fails
if [ -z ${node_khala_system_syncState_currentBlock} ]; then
node_khala_system_syncState_currentBlock=1
khala_block_last_check=0
fi
if [ -z ${node_kusama_system_syncState_currentBlock} ]; then
node_kusama_system_syncState_currentBlock=1
kusama_block_last_check=0
fi
#compare block value
khala_diff=`expr $node_khala_system_syncState_currentBlock - $khala_block_last_check`
kusama_diff=`expr $node_kusama_system_syncState_currentBlock - $kusama_block_last_check`
#save last check value
khala_block_last_check=$node_khala_system_syncState_currentBlock
kusama_block_last_check=$node_kusama_system_syncState_currentBlock
#if stuck, increase node_stuck_count
if [ $khala_diff -lt 1 -o $kusama_diff -lt 1 ]; then
node_stuck_count=`expr $node_stuck_count + 1`
else
node_stuck_count=0
restart_count=0
fi
printf "
---------------------------------
卡顿计数 | $node_stuck_count | 重启计数 | $restart_count |
"
#if stuck too long, restart node
if [ $node_stuck_count -ge $stuck_times ]; then
restartNode
restart_count=`expr $restart_count + 1`
node_stuck_count=0
#waiting 5 mins for node fully restarted
for i in `seq 300 -1 1`
do
echo -ne "--- ${i}s 等待重启完成 ---\r"
sleep 1
done
fi
#if restart not work, try update node
if [ $restart_count -ge $restart_times ]; then
updateNode
restart_count=0
#waiting 5 mins for node fully restarted
for i in `seq 300 -1 1`
do
echo -ne "--- ${i}s 等待重启完成 ---\r"
sleep 1
done
fi
#check every 60s
for i in `seq 60 -1 1`
do
echo -ne "--- ${i}s 刷新 ---\r"
sleep 1
done
done
|
export class Filters {
constructor(filter) {
this.filter = filter
}
filterClick () {
$(this.filter).on('click', function () {
$(this).hasClass('selected') ? $(this).removeClass('selected') : $(this).addClass('selected')
})
}
} |
import { extend, localize, configure } from 'plugins/vee-validate'
import { required, min, max, email, confirmed, numeric } from 'vee-validate/dist/rules'
import dictionary from '../locales/plugins/vee-validate'
configure({
classes: {
valid: 'is-valid',
invalid: 'is-invalid'
}
})
const regex = {
name: new RegExp('^([a-zA-ZÀ-ÖØ-öø-ÿ]+)([\\s\\-]?([a-zA-ZÀ-ÖØ-öø-ÿ]+))*$'),
phone: new RegExp('^0([1-9])([0-9]{8})$'),
zipCode: new RegExp('^[0-9]{5}$')
}
localize(dictionary)
localize('fr')
// Generic rules
extend('required', {
...required
})
// Text rules
extend('firstName', {
validate: (value) => {
return regex.name.test(value)
}
})
extend('lastName', {
validate: (value) => {
return regex.name.test(value)
}
})
extend('min', {
...min
})
extend('max', {
...max
})
extend('email', {
...email
})
extend('confirmed', {
...confirmed
})
extend('numeric', {
...numeric
})
extend('phone', {
validate: (value) => {
return regex.phone.test(value)
}
})
extend('zipCode', {
validate: (value) => {
return regex.zipCode.test(value)
}
})
extend('addressRequiredDefault', {
// Street and city required
validate: (value) => {
const wrongValues = ['', null, undefined]
const isValid = !wrongValues.includes(value.address) && !wrongValues.includes(value.city)
return {
required: true,
valid: isValid
}
},
computesRequired: true
})
extend('promoCode', {
validate: (value, availableValues) => availableValues.includes(value.toUpperCase())
})
|
/**
*/
package PhotosMetaModel;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EObject;
/**
* <!-- begin-user-doc -->
* A representation of the model object '<em><b>User p</b></em>'.
* <!-- end-user-doc -->
*
* <p>
* The following features are supported:
* </p>
* <ul>
* <li>{@link PhotosMetaModel.User_p#getExecute <em>Execute</em>}</li>
* <li>{@link PhotosMetaModel.User_p#getPrivilege <em>Privilege</em>}</li>
* <li>{@link PhotosMetaModel.User_p#getUsername <em>Username</em>}</li>
* <li>{@link PhotosMetaModel.User_p#getPassword <em>Password</em>}</li>
* </ul>
*
* @see PhotosMetaModel.PhotosMetaModelPackage#getUser_p()
* @model
* @generated
*/
public interface User_p extends EObject {
/**
* Returns the value of the '<em><b>Execute</b></em>' containment reference list.
* The list contents are of type {@link PhotosMetaModel.Query}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the value of the '<em>Execute</em>' containment reference list.
* @see PhotosMetaModel.PhotosMetaModelPackage#getUser_p_Execute()
* @model containment="true"
* @generated
*/
EList<Query> getExecute();
/**
* Returns the value of the '<em><b>Privilege</b></em>' containment reference list.
* The list contents are of type {@link PhotosMetaModel.Privilege}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the value of the '<em>Privilege</em>' containment reference list.
* @see PhotosMetaModel.PhotosMetaModelPackage#getUser_p_Privilege()
* @model containment="true"
* @generated
*/
EList<Privilege> getPrivilege();
/**
* Returns the value of the '<em><b>Username</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the value of the '<em>Username</em>' attribute.
* @see #setUsername(String)
* @see PhotosMetaModel.PhotosMetaModelPackage#getUser_p_Username()
* @model
* @generated
*/
String getUsername();
/**
* Sets the value of the '{@link PhotosMetaModel.User_p#getUsername <em>Username</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Username</em>' attribute.
* @see #getUsername()
* @generated
*/
void setUsername(String value);
/**
* Returns the value of the '<em><b>Password</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the value of the '<em>Password</em>' attribute.
* @see #setPassword(String)
* @see PhotosMetaModel.PhotosMetaModelPackage#getUser_p_Password()
* @model
* @generated
*/
String getPassword();
/**
* Sets the value of the '{@link PhotosMetaModel.User_p#getPassword <em>Password</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Password</em>' attribute.
* @see #getPassword()
* @generated
*/
void setPassword(String value);
} // User_p
|
<reponame>smagill/opensphere-desktop<gh_stars>10-100
/**
* Viewers are responsible for managing how model coordinates are transformed to
* be displayed on the screen.
*/
package io.opensphere.core.viewer.impl;
|
package question1;
/**
* Write a description of class PileVideException here.
*
* @author (your name)
* @version (a version number or a date)
*/
public class PileVideException extends Exception{
public PileVideException(){
System.out.println("la pile est vide!");
}}
|
class Proposal:
def __init__(self, name, details):
self.name = name
self.details = details
class ProposalStatus:
def __init__(self, proposal, reviewer, proposal_status):
self.proposal = proposal
self.reviewer = reviewer
self.proposal_status = proposal_status
class Feedback:
def __init__(self, proposal):
self.proposal = proposal
def process_proposal(instance: Proposal) -> None:
print('--------------------')
print('Proposal Details:')
print(f'Name: {instance.name}')
print(f'Details: {instance.details}')
print('--------------------')
ProposalStatus.objects.create(proposal=instance,
reviewer=instance.name.organizer,
proposal_status='to_be_reviewed')
Feedback.objects.create(proposal=instance) |
from django import forms
class SupportForm(forms.Form):
username = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'id': 'support_page_username'}))
message = forms.CharField(widget=forms.Textarea(attrs={'rows': 5}))
sender = forms.EmailField(widget=forms.TextInput(attrs={'id': 'support_page_email', 'type': 'email'}))
|
nodes=( "inp" "dex" "out" "int")
for i in "${nodes[@]}"
do
shareName=$(az webapp config storage-account list -g rg-dfx-api-$1 -n app-dfx-node-$i-$1 --query "[0].value.shareName")
if [[ $shareName == *a\" ]]
then
echo "$i: A"
else
echo "$i: B"
fi
done
|
/*global
angular
*/
(function () {
'use strict';
angular.module('routing.main', ['ui.router']).config(['$urlRouterProvider', '$stateProvider', function ($urlRouterProvider, $stateProvider) {
$urlRouterProvider.when('', '/');
$urlRouterProvider.otherwise(function ($injector, $location) {
var state = $injector.get('$state');
state.go('404');
return $location.path();
});
$stateProvider
.state('main', {
abstract: true,
url: '',
controller: 'MainCtrl as vm',
templateUrl: 'views/main.html'
})
// HOME STATES AND NESTED VIEWS ========================================
.state('main.home', {
url: '/',
controller: 'HomeCtrl as vm',
templateUrl: 'views/home.html',
resolve: {
recipeList: ['recipeService', function (recipeService) {
return recipeService.getRecipeList({where: {id: {'!': ''}}, mit: 6, sort: 'updatedAt DESC'}).$promise.then(function (recipeList) {
return recipeList;
});
}]
}
})
// ABOUT PAGE =================================
.state('main.about', {
url: '/about',
templateUrl: 'views/about.html'
})
.state('main.impressum', {
url: '/impressum',
templateUrl: 'views/impressum.html'
})
// LOGIN PAGE =================================
.state('main.login', {
url: '/login',
templateUrl: 'views/login.html'
})
.state('main.logout', {
url: '/logout',
controller: 'LogoutCtrl'
})
.state('404', {
templateUrl: '/404.html'
})
.state('main.error', {
url: '/error',
templateUrl: '/error.html',
controller: 'ErrorpageCtrl as vm',
params: {
error: {}
}
});
}]);
}());
|
module.exports = {
appExtend: 'hello'
} |
#! @shell@
targetRoot=/mnt-root
console=tty1
extraUtils="@extraUtils@"
export LD_LIBRARY_PATH=@extraUtils@/lib
export PATH=@extraUtils@/bin
ln -s @extraUtils@/bin /bin
# Stop LVM complaining about fd3
export LVM_SUPPRESS_FD_WARNINGS=true
fail() {
if [ -n "$panicOnFail" ]; then exit 1; fi
@preFailCommands@
# If starting stage 2 failed, allow the user to repair the problem
# in an interactive shell.
cat <<EOF
An error occurred in stage 1 of the boot process, which must mount the
root filesystem on \`$targetRoot' and then start stage 2. Press one
of the following keys:
EOF
if [ -n "$allowShell" ]; then cat <<EOF
i) to launch an interactive shell
f) to start an interactive shell having pid 1 (needed if you want to
start stage 2's init manually)
EOF
fi
cat <<EOF
r) to reboot immediately
*) to ignore the error and continue
EOF
read reply
if [ -n "$allowShell" -a "$reply" = f ]; then
exec setsid @shell@ -c "exec @shell@ < /dev/$console >/dev/$console 2>/dev/$console"
elif [ -n "$allowShell" -a "$reply" = i ]; then
echo "Starting interactive shell..."
setsid @shell@ -c "@shell@ < /dev/$console >/dev/$console 2>/dev/$console" || fail
elif [ "$reply" = r ]; then
echo "Rebooting..."
reboot -f
else
echo "Continuing..."
fi
}
trap 'fail' 0
# Print a greeting.
echo
echo "[1;32m<<< NixOS Stage 1 >>>[0m"
echo
# Make several required directories.
mkdir -p /etc/udev
touch /etc/fstab # to shut up mount
ln -s /proc/mounts /etc/mtab # to shut up mke2fs
touch /etc/udev/hwdb.bin # to shut up udev
touch /etc/initrd-release
# Mount special file systems.
specialMount() {
local device="$1"
local mountPoint="$2"
local options="$3"
local fsType="$4"
mkdir -m 0755 -p "$mountPoint"
mount -n -t "$fsType" -o "$options" "$device" "$mountPoint"
}
source @earlyMountScript@
# Log the script output to /dev/kmsg or /run/log/stage-1-init.log.
mkdir -p /tmp
mkfifo /tmp/stage-1-init.log.fifo
logOutFd=8 && logErrFd=9
eval "exec $logOutFd>&1 $logErrFd>&2"
if test -w /dev/kmsg; then
tee -i < /tmp/stage-1-init.log.fifo /proc/self/fd/"$logOutFd" | while read -r line; do
if test -n "$line"; then
echo "<7>stage-1-init: $line" > /dev/kmsg
fi
done &
else
mkdir -p /run/log
tee -i < /tmp/stage-1-init.log.fifo /run/log/stage-1-init.log &
fi
exec > /tmp/stage-1-init.log.fifo 2>&1
# Process the kernel command line.
export stage2Init=/init
for o in $(cat /proc/cmdline); do
case $o in
console=*)
set -- $(IFS==; echo $o)
params=$2
set -- $(IFS=,; echo $params)
console=$1
;;
init=*)
set -- $(IFS==; echo $o)
stage2Init=$2
;;
boot.trace|debugtrace)
# Show each command.
set -x
;;
boot.shell_on_fail)
allowShell=1
;;
boot.debug1|debug1) # stop right away
allowShell=1
fail
;;
boot.debug1devices) # stop after loading modules and creating device nodes
allowShell=1
debug1devices=1
;;
boot.debug1mounts) # stop after mounting file systems
allowShell=1
debug1mounts=1
;;
boot.panic_on_fail|stage1panic=1)
panicOnFail=1
;;
root=*)
# If a root device is specified on the kernel command
# line, make it available through the symlink /dev/root.
# Recognise LABEL= and UUID= to support UNetbootin.
set -- $(IFS==; echo $o)
if [ $2 = "LABEL" ]; then
root="/dev/disk/by-label/$3"
elif [ $2 = "UUID" ]; then
root="/dev/disk/by-uuid/$3"
else
root=$2
fi
ln -s "$root" /dev/root
;;
esac
done
# Set hostid before modules are loaded.
# This is needed by the spl/zfs modules.
@setHostId@
# Load the required kernel modules.
mkdir -p /lib
ln -s @modulesClosure@/lib/modules /lib/modules
echo @extraUtils@/bin/modprobe > /proc/sys/kernel/modprobe
for i in @kernelModules@; do
echo "loading module $(basename $i)..."
modprobe $i
done
# Create device nodes in /dev.
@preDeviceCommands@
echo "running udev..."
mkdir -p /etc/udev
ln -sfn @udevRules@ /etc/udev/rules.d
mkdir -p /dev/.mdadm
systemd-udevd --daemon
udevadm trigger --action=add
udevadm settle
# XXX: Use case usb->lvm will still fail, usb->luks->lvm is covered
@preLVMCommands@
echo "starting device mapper and LVM..."
lvm vgchange -ay
if test -n "$debug1devices"; then fail; fi
@postDeviceCommands@
# Return true if the machine is on AC power, or if we can't determine
# whether it's on AC power.
onACPower() {
! test -d "/proc/acpi/battery" ||
! ls /proc/acpi/battery/BAT[0-9]* > /dev/null 2>&1 ||
! cat /proc/acpi/battery/BAT*/state | grep "^charging state" | grep -q "discharg"
}
# Check the specified file system, if appropriate.
checkFS() {
local device="$1"
local fsType="$2"
# Only check block devices.
if [ ! -b "$device" ]; then return 0; fi
# Don't check ROM filesystems.
if [ "$fsType" = iso9660 -o "$fsType" = udf ]; then return 0; fi
# Don't check resilient COWs as they validate the fs structures at mount time
if [ "$fsType" = btrfs -o "$fsType" = zfs ]; then return 0; fi
# Skip fsck for inherently readonly filesystems.
if [ "$fsType" = squashfs ]; then return 0; fi
# If we couldn't figure out the FS type, then skip fsck.
if [ "$fsType" = auto ]; then
echo 'cannot check filesystem with type "auto"!'
return 0
fi
# Optionally, skip fsck on journaling filesystems. This option is
# a hack - it's mostly because e2fsck on ext3 takes much longer to
# recover the journal than the ext3 implementation in the kernel
# does (minutes versus seconds).
if test -z "@checkJournalingFS@" -a \
\( "$fsType" = ext3 -o "$fsType" = ext4 -o "$fsType" = reiserfs \
-o "$fsType" = xfs -o "$fsType" = jfs -o "$fsType" = f2fs \)
then
return 0
fi
# Don't run `fsck' if the machine is on battery power. !!! Is
# this a good idea?
if ! onACPower; then
echo "on battery power, so no \`fsck' will be performed on \`$device'"
return 0
fi
echo "checking $device..."
fsckFlags=
if test "$fsType" != "btrfs"; then
fsckFlags="-V -a"
fi
fsck $fsckFlags "$device"
fsckResult=$?
if test $(($fsckResult | 2)) = $fsckResult; then
echo "fsck finished, rebooting..."
sleep 3
reboot -f
fi
if test $(($fsckResult | 4)) = $fsckResult; then
echo "$device has unrepaired errors, please fix them manually."
fail
fi
if test $fsckResult -ge 8; then
echo "fsck on $device failed."
fail
fi
return 0
}
# Function for mounting a file system.
mountFS() {
local device="$1"
local mountPoint="$2"
local options="$3"
local fsType="$4"
if [ "$fsType" = auto ]; then
fsType=$(blkid -o value -s TYPE "$device")
if [ -z "$fsType" ]; then fsType=auto; fi
fi
# Filter out x- options, which busybox doesn't do yet.
local optionsFiltered="$(IFS=,; for i in $options; do if [ "${i:0:2}" != "x-" ]; then echo -n $i,; fi; done)"
echo "$device /mnt-root$mountPoint $fsType $optionsFiltered" >> /etc/fstab
checkFS "$device" "$fsType"
# Optionally resize the filesystem.
case $options in
*x-nixos.autoresize*)
if [ "$fsType" = ext2 -o "$fsType" = ext3 -o "$fsType" = ext4 ]; then
echo "resizing $device..."
resize2fs "$device"
fi
;;
esac
# Create backing directories for unionfs-fuse.
if [ "$fsType" = unionfs-fuse ]; then
for i in $(IFS=:; echo ${options##*,dirs=}); do
mkdir -m 0700 -p /mnt-root"${i%=*}"
done
fi
echo "mounting $device on $mountPoint..."
mkdir -p "/mnt-root$mountPoint"
# For CIFS mounts, retry a few times before giving up.
local n=0
while true; do
mount "/mnt-root$mountPoint" && break
if [ "$fsType" != cifs -o "$n" -ge 10 ]; then fail; break; fi
echo "retrying..."
n=$((n + 1))
done
[ "$mountPoint" == "/" ] &&
[ -f "/mnt-root/etc/NIXOS_LUSTRATE" ] &&
lustrateRoot "/mnt-root"
}
lustrateRoot () {
local root="$1"
echo
echo -e "\e[1;33m<<< NixOS is now lustrating the root filesystem (cruft goes to /old-root) >>>\e[0m"
echo
mkdir -m 0755 -p "$root/old-root.tmp"
echo
echo "Moving impurities out of the way:"
for d in "$root"/*
do
[ "$d" == "$root/nix" ] && continue
[ "$d" == "$root/boot" ] && continue # Don't render the system unbootable
[ "$d" == "$root/old-root.tmp" ] && continue
mv -v "$d" "$root/old-root.tmp"
done
# Use .tmp to make sure subsequent invokations don't clash
mv -v "$root/old-root.tmp" "$root/old-root"
mkdir -m 0755 -p "$root/etc"
touch "$root/etc/NIXOS"
exec 4< "$root/old-root/etc/NIXOS_LUSTRATE"
echo
echo "Restoring selected impurities:"
while read -u 4 keeper; do
dirname="$(dirname "$keeper")"
mkdir -m 0755 -p "$root/$dirname"
cp -av "$root/old-root/$keeper" "$root/$keeper"
done
exec 4>&-
}
# Function for waiting a device to appear.
waitDevice() {
local device="$1"
# USB storage devices tend to appear with some delay. It would be
# great if we had a way to synchronously wait for them, but
# alas... So just wait for a few seconds for the device to
# appear.
if test ! -e $device; then
echo -n "waiting for device $device to appear..."
try=20
while [ $try -gt 0 ]; do
sleep 1
# also re-try lvm activation now that new block devices might have appeared
lvm vgchange -ay
# and tell udev to create nodes for the new LVs
udevadm trigger --action=add
if test -e $device; then break; fi
echo -n "."
try=$((try - 1))
done
echo
[ $try -ne 0 ]
fi
}
# Try to resume - all modules are loaded now.
if test -e /sys/power/tuxonice/resume; then
if test -n "$(cat /sys/power/tuxonice/resume)"; then
echo 0 > /sys/power/tuxonice/user_interface/enabled
echo 1 > /sys/power/tuxonice/do_resume || echo "failed to resume..."
fi
fi
if test -e /sys/power/resume -a -e /sys/power/disk; then
if test -n "@resumeDevice@" && waitDevice "@resumeDevice@"; then
resumeDev="@resumeDevice@"
resumeInfo="$(udevadm info -q property "$resumeDev" )"
else
for sd in @resumeDevices@; do
# Try to detect resume device. According to Ubuntu bug:
# https://bugs.launchpad.net/ubuntu/+source/pm-utils/+bug/923326/comments/1
# when there are multiple swap devices, we can't know where the hibernate
# image will reside. We can check all of them for swsuspend blkid.
if waitDevice "$sd"; then
resumeInfo="$(udevadm info -q property "$sd")"
if [ "$(echo "$resumeInfo" | sed -n 's/^ID_FS_TYPE=//p')" = "swsuspend" ]; then
resumeDev="$sd"
break
fi
fi
done
fi
if test -n "$resumeDev"; then
resumeMajor="$(echo "$resumeInfo" | sed -n 's/^MAJOR=//p')"
resumeMinor="$(echo "$resumeInfo" | sed -n 's/^MINOR=//p')"
echo "$resumeMajor:$resumeMinor" > /sys/power/resume 2> /dev/null || echo "failed to resume..."
fi
fi
# Try to find and mount the root device.
mkdir -p $targetRoot
exec 3< @fsInfo@
while read -u 3 mountPoint; do
read -u 3 device
read -u 3 fsType
read -u 3 options
# !!! Really quick hack to support bind mounts, i.e., where the
# "device" should be taken relative to /mnt-root, not /. Assume
# that every device that starts with / but doesn't start with /dev
# is a bind mount.
pseudoDevice=
case $device in
/dev/*)
;;
//*)
# Don't touch SMB/CIFS paths.
pseudoDevice=1
;;
/*)
device=/mnt-root$device
;;
*)
# Not an absolute path; assume that it's a pseudo-device
# like an NFS path (e.g. "server:/path").
pseudoDevice=1
;;
esac
if test -z "$pseudoDevice" && ! waitDevice "$device"; then
# If it doesn't appear, try to mount it anyway (and
# probably fail). This is a fallback for non-device "devices"
# that we don't properly recognise.
echo "Timed out waiting for device $device, trying to mount anyway."
fi
# Wait once more for the udev queue to empty, just in case it's
# doing something with $device right now.
udevadm settle
mountFS "$device" "$mountPoint" "$options" "$fsType"
done
exec 3>&-
@postMountCommands@
# Emit a udev rule for /dev/root to prevent systemd from complaining.
if [ -e /mnt-root/iso ]; then
eval $(udevadm info --export --export-prefix=ROOT_ --device-id-of-file=/mnt-root/iso)
else
eval $(udevadm info --export --export-prefix=ROOT_ --device-id-of-file=$targetRoot)
fi
if [ "$ROOT_MAJOR" -a "$ROOT_MINOR" -a "$ROOT_MAJOR" != 0 ]; then
mkdir -p /run/udev/rules.d
echo 'ACTION=="add|change", SUBSYSTEM=="block", ENV{MAJOR}=="'$ROOT_MAJOR'", ENV{MINOR}=="'$ROOT_MINOR'", SYMLINK+="root"' > /run/udev/rules.d/61-dev-root-link.rules
fi
# Stop udevd.
udevadm control --exit
# Reset the logging file descriptors.
# Do this just before pkill, which will kill the tee process.
exec 1>&$logOutFd 2>&$logErrFd
eval "exec $logOutFd>&- $logErrFd>&-"
# Kill any remaining processes, just to be sure we're not taking any
# with us into stage 2. But keep storage daemons like unionfs-fuse.
#
# Storage daemons are distinguished by an @ in front of their command line:
# https://www.freedesktop.org/wiki/Software/systemd/RootStorageDaemons/
local pidsToKill="$(pgrep -v -f '^@')"
for pid in $pidsToKill; do
# Make sure we don't kill kernel processes, see #15226 and:
# http://stackoverflow.com/questions/12213445/identifying-kernel-threads
readlink "/proc/$pid/exe" &> /dev/null || continue
# Try to avoid killing ourselves.
[ $pid -eq $$ ] && continue
kill -9 "$pid"
done
if test -n "$debug1mounts"; then fail; fi
# Restore /proc/sys/kernel/modprobe to its original value.
echo /sbin/modprobe > /proc/sys/kernel/modprobe
# Start stage 2. `switch_root' deletes all files in the ramfs on the
# current root. Note that $stage2Init might be an absolute symlink,
# in which case "-e" won't work because we're not in the chroot yet.
if ! test -e "$targetRoot/$stage2Init" -o ! -L "$targetRoot/$stage2Init"; then
echo "stage 2 init script ($targetRoot/$stage2Init) not found"
fail
fi
mkdir -m 0755 -p $targetRoot/proc $targetRoot/sys $targetRoot/dev $targetRoot/run
mount --move /proc $targetRoot/proc
mount --move /sys $targetRoot/sys
mount --move /dev $targetRoot/dev
mount --move /run $targetRoot/run
exec env -i $(type -P switch_root) "$targetRoot" "$stage2Init"
fail # should never be reached
|
// By KRT girl xiplus
#include <bits/stdc++.h>
#define endl '\n'
using namespace std;
int v1[100010],v2[100010],v3[200020],v4[200020];
int main(){
ios::sync_with_stdio(false);
cin.tie(0);
int T,N,K,x,y,d;
cin>>T;
while(T--){
memset(v1,0,sizeof(v1));
memset(v2,0,sizeof(v2));
memset(v3,0,sizeof(v3));
memset(v4,0,sizeof(v4));
cin>>N>>K;
int i=0;
bool ans=false;
while(i<N){
i++;
cin>>x>>y;
v1[x]++;
if(v1[x]>=K){
ans=true;
break;
}
v2[y]++;
if(v2[y]>=K){
ans=true;
break;
}
v3[x+y]++;
if(v3[x+y]>=K){
ans=true;
break;
}
v4[100010+x-y]++;
if(v4[100010+x-y]>=K){
ans=true;
break;
}
}
if(ans)cout<<i<<endl;
else cout<<-1<<endl;
while(i<N){
i++;
cin>>x>>y;
}
}
} |
<reponame>eddiewentw/pgxbuilder<gh_stars>0
package pgxbuilder
import "strings"
// Delete starts a delete statement.
func Delete(table string) *Query {
return &Query{
stmt: stmtDelete,
table: table,
}
}
func (q Query) toDelete() string {
var b strings.Builder
b.WriteString("DELETE FROM ")
b.WriteString(q.table)
b.WriteString(q.toWhere())
if len(q.returning) > 0 {
b.WriteString(" RETURNING ")
b.WriteString(strings.Join(q.returning, ", "))
}
return b.String()
}
|
#!/bin/bash
set -eo pipefail
FUNCTION=$(aws cloudformation describe-stack-resource --stack-name blank-java --logical-resource-id function --query 'StackResourceDetail.PhysicalResourceId' --output text)
while true; do
aws lambda invoke --function-name $FUNCTION --payload file://event.json out.json
cat out.json
echo ""
sleep 2
done
|
#!/bin/bash
folder=/home/roott/queries/fedBench
FEDX_HIBISCUS_HOME=/home/roott/FedX-HiBISCus
configFileHibiscus=${FEDX_HIBISCUS_HOME}/config.properties
ODYSSEY_HOME=/home/roott/federatedOptimizer
proxyFederationFile=/home/roott/tmp/proxyFederation
cold=true
s=`seq 1 11`
l=""
n=10
w=1800
for i in ${s}; do
l="${l} LD${i}"
done
s=`seq 1 7`
for i in ${s}; do
l="${l} CD${i}"
done
for i in ${s}; do
l="${l} LS${i}"
done
for query in ${l}; do
f=0
rm ${FEDX_HIBISCUS_HOME}/cache.db
for j in `seq 1 ${n}`; do
cd ${ODYSSEY_HOME}/scripts
tmpFile=`./startProxies2.sh "172.19.2.123 172.19.2.106 172.19.2.100 172.19.2.115 172.19.2.107 172.19.2.118 172.19.2.111 172.19.2.113 172.19.2.120" 3030`
sleep 2s
cd ${FEDX_HIBISCUS_HOME}
if [ -f ${FEDX_HIBISCUS_HOME}/summaries*?/* ]; then
rm ${FEDX_HIBISCUS_HOME}/summaries*?/*
fi
if [ "$cold" = "true" ] && [ -f cache.db ]; then
rm cache.db
fi
/usr/bin/time -f "%e %P %t %M" timeout ${w}s ./cli.sh -c ${configFileHibiscus} @q ${folder}/${query} > outputFile 2> timeFile
x=`grep "planning=" outputFile`
y=`echo ${x##*planning=}`
if [ -n "$y" ]; then
s=`echo ${y%%ms*}`
else
s=-1
fi
${ODYSSEY_HOME}/scripts/processFedXPlansNSS.sh outputFile > xxx
nss=`cat xxx`
${ODYSSEY_HOME}/scripts/processFedXPlansNSQ.sh outputFile > xxx
ns=`cat xxx`
rm xxx
x=`tail -n 1 timeFile`
y=`echo ${x%% *}`
x=`echo ${y%%.*}`
if [ "$x" -ge "$w" ]; then
t=`echo $y`
t=`echo "scale=2; $t*1000" | bc`
f=$(($f+1))
nr=`grep "^\[" outputFile | grep "\]$" | wc -l | sed 's/^[ ^t]*//' | cut -d' ' -f1`
else
x=`grep "duration=" outputFile`
y=`echo ${x##*duration=}`
t=`echo ${y%%ms*}`
x=`grep "results=" outputFile`
nr=`echo ${x##*results=}`
fi
cd ${ODYSSEY_HOME}/scripts
./killAll.sh ${proxyFederationFile}
sleep 10s
pi=`./processProxyInfo.sh ${tmpFile} 0 8`
echo "${query} ${nss} ${ns} ${s} ${t} ${pi} ${nr}"
if [ "$f" -ge "2" ]; then
break
fi
done
done
|
<reponame>itsoo/rest-client
package com.cupshe.restclient;
import com.cupshe.ak.text.StringUtils;
import com.cupshe.restclient.exception.NoSupportMethodException;
import com.cupshe.restclient.lang.RestClient;
import com.cupshe.restclient.parser.PathVariableExpressionParser;
import lombok.SneakyThrows;
import org.springframework.beans.factory.BeanDefinitionStoreException;
import org.springframework.core.annotation.AnnotationUtils;
import org.springframework.lang.NonNull;
import org.springframework.util.Assert;
import org.springframework.util.ReflectionUtils;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.Arrays;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
/**
* AssertBeforeRegister
*
* @author zxy
*/
class AssertBeforeRegister {
private static Map<String, String> registeredBeans = new ConcurrentHashMap<>(32);
private static final Object CLEAR_REGISTERED_BEANS_LOCK = new Object();
static void assertSingletonRegister(String beanName, String className) {
String repClassName = registeredBeans.computeIfAbsent(beanName, k -> className);
if (StringUtils.isNotEquals(repClassName, className)) {
String message = "Annotation-specified bean name '{}' for bean class [{}] conflicts with existing, " +
"non-compatible bean definition of same name and class [{}].";
throw new BeanDefinitionStoreException(
StringUtils.getFormatString(message, beanName, className, registeredBeans.get(beanName)));
}
}
static void clearCheckedRegisterCache() {
if (Objects.nonNull(registeredBeans)) {
synchronized (CLEAR_REGISTERED_BEANS_LOCK) {
if (Objects.nonNull(registeredBeans)) {
registeredBeans.clear();
registeredBeans = null;
}
}
}
}
@NonNull
@SneakyThrows
static RestClient assertAndGetAnnotation(String className) {
return assertRestClientIsInterface(Class.forName(className));
}
@NonNull
static RestClient assertRestClientIsInterface(Class<?> clazz) {
assertIsTrue(clazz.isInterface(), clazz, "@RestClient component can only be interface.");
RestClient ann = AnnotationUtils.findAnnotation(clazz, RestClient.class);
Assert.notNull(ann, clazz.getCanonicalName() + ": Cannot found interface with @RestClient.");
assertNameOrValueIsNotEmpty(ann, clazz);
assertMaxAutoRetriesValue(ann, clazz);
assertFallbackClass(ann, clazz);
// assert all methods
for (Method method : ReflectionUtils.getDeclaredMethods(clazz)) {
assertRequestBodyOnlyOne(method);
assertRequestMappingMethod(method);
assertRequestMappingPath(method);
assertXxxMappingOnlyOne(method);
assertPathVariableParams(method);
}
return ann;
}
static void assertNameOrValueIsNotEmpty(RestClient ann, Class<?> clazz) {
assertIsTrue(StringUtils.isNotBlank(ann.name()), clazz, "@RestClient 'name' or 'value' cannot be all empty.");
}
static void assertMaxAutoRetriesValue(RestClient ann, Class<?> clazz) {
boolean checkAutoRetries = ann.maxAutoRetries() >= 0;
assertIsTrue(checkAutoRetries, clazz, "@RestClient 'maxAutoRetries' range [0, Integer.MAX_VALUE].");
}
static void assertFallbackClass(RestClient ann, Class<?> clazz) {
Class<?> fallback = ann.fallback();
if (fallback == void.class) {
return;
}
boolean checkIsSubclass = clazz.isAssignableFrom(fallback);
assertIsTrue(checkIsSubclass, clazz, "Fallback class must implement the interface annotated by @RestClient.");
boolean checkClassTyped = !fallback.isInterface() && !Modifier.isAbstract(fallback.getModifiers());
assertIsTrue(checkClassTyped, clazz, "Fallback class cannot be interface or abstract class.");
long count = Arrays.stream(fallback.getDeclaredAnnotations())
.filter(t -> SupportedAnnotations.isSupport(t.annotationType()))
.count();
String types = SupportedAnnotations.supportTypes();
String message = StringUtils.getFormatString("Fallback annotation only supports one of [{}].", types);
assertIsTrue(count <= 1L, clazz, message);
}
static void assertRequestBodyOnlyOne(Method method) {
long count = Arrays.stream(method.getParameters())
.filter(t -> AnnotationUtils.findAnnotation(t, RequestBody.class) != null)
.count();
assertIsTrue(count <= 1L, method, "@RequestBody of the method cannot have that more than one.");
}
static void assertRequestMappingMethod(Method method) {
RequestMapping ann = getRequestMappingOfMethod(method);
assertIsTrue(ann.method().length > 0, method, "@RequestMapping property 'method' cannot be empty.");
assertIsTrue(ann.method().length == 1, method, "@RequestMapping property 'method' can only one.");
}
static void assertRequestMappingPath(Method method) {
AnnotationMethodAttribute attr = AnnotationMethodAttribute.of(method);
assertIsTrue(attr.paths.length <= 1, method, "@RequestMapping 'path' or 'value' is only one param.");
}
static void assertXxxMappingOnlyOne(Method method) {
int count = 0;
for (Annotation ann : method.getDeclaredAnnotations()) {
try {
AnnotationMethodAttribute.of(ann, false);
count++;
} catch (NoSupportMethodException ignore) {}
}
assertIsTrue(count == 1, method, "@RequestMapping is required and only one.");
}
static void assertPathVariableParams(Method method) {
AnnotationMethodAttribute attr = AnnotationMethodAttribute.of(method);
long pvCnt1 = StringUtils.findSubstringCountOf(attr.path, PathVariableExpressionParser.EXPRESSION_DELIMITER_PREFIX);
long pvCnt2 = StringUtils.findSubstringCountOf(attr.path, PathVariableExpressionParser.EXPRESSION_DELIMITER_SUFFIX);
long mpsCnt = Arrays.stream(method.getParameters())
.filter(t -> AnnotationUtils.findAnnotation(t, PathVariable.class) != null)
.count();
assertIsTrue(pvCnt1 == pvCnt2, method, "@RequestMapping 'path' format error.");
assertIsTrue(pvCnt1 == mpsCnt, method, "Wrong params map to request @PathVariable.");
}
@NonNull
private static RequestMapping getRequestMappingOfMethod(Method method) {
RequestMapping result = AnnotationUtils.findAnnotation(method, RequestMapping.class);
Assert.notNull(result, method.toGenericString() + ": Cannot found anyone @RequestMapping class.");
return result;
}
private static void assertIsTrue(boolean expr, Class<?> clazz, String message) {
Assert.isTrue(expr, clazz.getCanonicalName() + ": " + message);
}
private static void assertIsTrue(boolean expr, Method method, String message) {
Assert.isTrue(expr, method.toGenericString() + ": " + message);
}
}
|
<gh_stars>0
var socket = io.connect();//'http://nodejs-mongo-persistent-test-project-2-uniek.a3c1.starter-us-west-1.openshiftapps.com:8000')
setcolor(Math.floor(Math.random()*360));
function enterroom() {
var roomnameinput = document.getElementById('roomnameinput');
socket.emit('roomJoinReq', roomnameinput.value);
}
function setcolor(hue) {
socket.color = hue;
socket.emit('setColor', socket.color);
}
|
package com.oven.wsdl;
import java.net.MalformedURLException;
import java.net.URL;
import javax.xml.namespace.QName;
import javax.xml.ws.WebEndpoint;
import javax.xml.ws.WebServiceClient;
import javax.xml.ws.WebServiceFeature;
import javax.xml.ws.Service;
/**
* This class was generated by Apache CXF 3.2.5
* 2020-08-17T18:00:14.094+08:00
* Generated source version: 3.2.5
*
*/
@WebServiceClient(name = "userService",
wsdlLocation = "http://localhost:8080/ws/user?wsdl",
targetNamespace = "wsdl.oven.com")
public class UserService extends Service {
public final static URL WSDL_LOCATION;
public final static QName SERVICE = new QName("wsdl.oven.com", "userService");
public final static QName UserPortName = new QName("wsdl.oven.com", "userPortName");
static {
URL url = null;
try {
url = new URL("http://localhost:8080/ws/user?wsdl");
} catch (MalformedURLException e) {
java.util.logging.Logger.getLogger(UserService.class.getName())
.log(java.util.logging.Level.INFO,
"Can not initialize the default wsdl from {0}", "http://localhost:8080/ws/user?wsdl");
}
WSDL_LOCATION = url;
}
public UserService(URL wsdlLocation) {
super(wsdlLocation, SERVICE);
}
public UserService(URL wsdlLocation, QName serviceName) {
super(wsdlLocation, serviceName);
}
public UserService() {
super(WSDL_LOCATION, SERVICE);
}
public UserService(WebServiceFeature ... features) {
super(WSDL_LOCATION, SERVICE, features);
}
public UserService(URL wsdlLocation, WebServiceFeature ... features) {
super(wsdlLocation, SERVICE, features);
}
public UserService(URL wsdlLocation, QName serviceName, WebServiceFeature ... features) {
super(wsdlLocation, serviceName, features);
}
/**
*
* @return
* returns UserPortType
*/
@WebEndpoint(name = "userPortName")
public UserPortType getUserPortName() {
return super.getPort(UserPortName, UserPortType.class);
}
/**
*
* @param features
* A list of {@link WebServiceFeature} to configure on the proxy. Supported features not in the <code>features</code> parameter will have their default values.
* @return
* returns UserPortType
*/
@WebEndpoint(name = "userPortName")
public UserPortType getUserPortName(WebServiceFeature... features) {
return super.getPort(UserPortName, UserPortType.class, features);
}
}
|
#!/bin/bash
set -o errexit -o pipefail
if [[ ${target_platform} =~ linux.* ]] || [[ ${target_platform} == win-32 ]] || [[ ${target_platform} == win-64 ]] || [[ ${target_platform} == osx-64 ]]; then
export DISABLE_AUTOBREW=1
${R} CMD INSTALL --build .
else
mkdir -p "${PREFIX}"/lib/R/library/storr
mv ./* "${PREFIX}"/lib/R/library/storr
if [[ ${target_platform} == osx-64 ]]; then
pushd "${PREFIX}"
for libdir in lib/R/lib lib/R/modules lib/R/library lib/R/bin/exec sysroot/usr/lib; do
pushd "${libdir}" || exit 1
while IFS= read -r -d '' SHARED_LIB
do
echo "fixing SHARED_LIB ${SHARED_LIB}"
install_name_tool -change /Library/Frameworks/R.framework/Versions/3.5.0-MRO/Resources/lib/libR.dylib "${PREFIX}"/lib/R/lib/libR.dylib "${SHARED_LIB}" || true
install_name_tool -change /Library/Frameworks/R.framework/Versions/3.5/Resources/lib/libR.dylib "${PREFIX}"/lib/R/lib/libR.dylib "${SHARED_LIB}" || true
install_name_tool -change /usr/local/clang4/lib/libomp.dylib "${PREFIX}"/lib/libomp.dylib "${SHARED_LIB}" || true
install_name_tool -change /usr/local/gfortran/lib/libgfortran.3.dylib "${PREFIX}"/lib/libgfortran.3.dylib "${SHARED_LIB}" || true
install_name_tool -change /Library/Frameworks/R.framework/Versions/3.5/Resources/lib/libquadmath.0.dylib "${PREFIX}"/lib/libquadmath.0.dylib "${SHARED_LIB}" || true
install_name_tool -change /usr/local/gfortran/lib/libquadmath.0.dylib "${PREFIX}"/lib/libquadmath.0.dylib "${SHARED_LIB}" || true
install_name_tool -change /Library/Frameworks/R.framework/Versions/3.5/Resources/lib/libgfortran.3.dylib "${PREFIX}"/lib/libgfortran.3.dylib "${SHARED_LIB}" || true
install_name_tool -change /usr/lib/libgcc_s.1.dylib "${PREFIX}"/lib/libgcc_s.1.dylib "${SHARED_LIB}" || true
install_name_tool -change /usr/lib/libiconv.2.dylib "${PREFIX}"/sysroot/usr/lib/libiconv.2.dylib "${SHARED_LIB}" || true
install_name_tool -change /usr/lib/libncurses.5.4.dylib "${PREFIX}"/sysroot/usr/lib/libncurses.5.4.dylib "${SHARED_LIB}" || true
install_name_tool -change /usr/lib/libicucore.A.dylib "${PREFIX}"/sysroot/usr/lib/libicucore.A.dylib "${SHARED_LIB}" || true
install_name_tool -change /usr/lib/libexpat.1.dylib "${PREFIX}"/lib/libexpat.1.dylib "${SHARED_LIB}" || true
install_name_tool -change /usr/lib/libcurl.4.dylib "${PREFIX}"/lib/libcurl.4.dylib "${SHARED_LIB}" || true
install_name_tool -change /usr/lib/libc++.1.dylib "${PREFIX}"/lib/libc++.1.dylib "${SHARED_LIB}" || true
install_name_tool -change /Library/Frameworks/R.framework/Versions/3.5/Resources/lib/libc++.1.dylib "${PREFIX}"/lib/libc++.1.dylib "${SHARED_LIB}" || true
done < <(find . \( -type f -iname "*.dylib" -or -iname "*.so" -or -iname "R" \) -print0)
popd
done
popd
fi
fi
|
#include "context.h"
zmq::context_t* Context::s_contextLocal = nullptr; |
SELECT name, COUNT(name) AS Count
FROM Customers
GROUP BY name
ORDER BY Count DESC
LIMIT 1; |
<reponame>houstonmc/houston-slack
# https://get.slack.help/hc/en-us/articles/216360827-Changing-your-username
# Usernames can be up to 21 characters long. They can contain lowercase letters
# a to z (without accents), and numbers 0 to 9. We hope to make usernames more
# customizable in future!
Attentive::Entity.define "houston.user", "{{slack.user}}" do |match|
User.find_by_slack_username match["slack.user"].username
end
|
import { inject } from "./inject";
export const promptsFn = () => {
const prompts = require("prompts");
const properties = [
{
type: "text",
name: "projectName",
message: "Project name:",
validate: (projectName: string) =>
projectName.match(/^[a-zA-Z0-9\s\-_]+$/)
? true
: "Project name must only conaton letters, numbers or dashes",
},
{
type: "text",
name: "projectAuthor",
message: "Author name: ",
},
{
type: "text",
name: "porjectLicense",
message: "License (MIT):",
},
{
type: "select",
name: "databaseType",
message: "Pick a Databse Type:",
choices: [
{ title: "MySQL", value: "mysql" },
{ title: "PostgreSQL", value: "postgres" },
{ title: "SQLite", value: "sqlite" },
{ title: "MSSql", value: "mssql" },
{ title: "Oracle", value: "oracle" },
{ title: "CockroachDB", value: "cockroachdb" },
{ title: "MariaDB", value: "mariadb" },
],
initial: 0,
},
];
(async () => {
const result = await prompts(properties);
inject(result);
})();
};
|
/*
* == BSD2 LICENSE ==
* Copyright (c) 2017, Tidepool Project
*
* This program is free software; you can redistribute it and/or modify it under
* the terms of the associated License, which is identical to the BSD 2-Clause
* License as published by the Open Source Initiative at opensource.org.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the License for more details.
*
* You should have received a copy of the License along with this program; if
* not, you can obtain one from Tidepool Project at tidepool.org.
* == BSD2 LICENSE ==
*/
import _ from 'lodash';
import { formatInsulin, formatDecimalNumber } from './format';
import { ONE_HR } from './datetime';
/**
* getBasalSequences
* @param {Array} basals - Array of preprocessed Tidepool basal objects
*
* @return {Array} Array of Arrays where each component Array is a sequence of basals
* of the same subType to be rendered as a unit
*/
export function getBasalSequences(basals) {
const basalSequences = [];
let currentBasal = basals[0];
let seq = [basals[0]];
let idx = 1;
while (idx <= basals.length - 1) {
const nextBasal = basals[idx];
const basalTypeChange = nextBasal.subType !== currentBasal.subType;
if (basalTypeChange || currentBasal.discontinuousEnd || nextBasal.rate === 0) {
basalSequences.push(seq);
seq = [];
}
seq.push(nextBasal);
currentBasal = nextBasal;
++idx;
}
basalSequences.push(seq);
return basalSequences;
}
/**
* getBasalPathGroupType
* @param {Object} basal - single basal datum
* @return {String} the path group type
*/
export function getBasalPathGroupType(datum = {}) {
const deliveryType = _.get(datum, 'subType', datum.deliveryType);
const suppressedDeliveryType = _.get(
datum.suppressed,
'subType',
_.get(datum.suppressed, 'deliveryType')
);
return _.includes([deliveryType, suppressedDeliveryType], 'automated') ? 'automated' : 'manual';
}
/**
* getBasalPathGroups
* @param {Array} basals - Array of preprocessed Tidepool basal objects
* @return {Array} groups of alternating 'automated' and 'manual' datums
*/
export function getBasalPathGroups(basals) {
const basalPathGroups = [];
let currentPathType;
_.each(basals, datum => {
const pathType = getBasalPathGroupType(datum);
if (pathType !== currentPathType) {
currentPathType = pathType;
basalPathGroups.push([]);
}
_.last(basalPathGroups).push(datum);
});
return basalPathGroups;
}
/**
* Get the start and end indexes and datetimes of basal datums within a given time range
* @param {Array} data Array of Tidepool basal data
* @param {String} s ISO date string for the start of the range
* @param {String} e ISO date string for the end of the range
* @param {Boolean} optionalExtents If true, allow basal gaps at start and end extents of the range.
* @returns {Object} The start and end datetimes and indexes
*/
export function getEndpoints(data, s, e, optionalExtents = false) {
const start = new Date(s);
const end = new Date(e);
const startIndex = _.findIndex(
data,
segment => (optionalExtents || new Date(segment.normalTime).valueOf() <= start)
&& (start <= new Date(segment.normalEnd).valueOf())
);
const endIndex = _.findLastIndex(
data,
segment => (new Date(segment.normalTime).valueOf() <= end)
&& (optionalExtents || end <= new Date(segment.normalEnd).valueOf())
);
return {
start: {
datetime: start.toISOString(),
index: startIndex,
},
end: {
datetime: end.toISOString(),
index: endIndex,
},
};
}
/**
* Get durations of basal groups within a given span of time
* @param {Array} data Array of Tidepool basal data
* @param {String} s ISO date string for the start of the range
* @param {String} e ISO date string for the end of the range
* @returns {Object} The durations (in ms) keyed by basal group type
*/
export function getGroupDurations(data, s, e) {
const endpoints = getEndpoints(data, s, e, true);
const durations = {
automated: 0,
manual: 0,
};
if ((endpoints.start.index >= 0) && (endpoints.end.index >= 0)) {
const start = new Date(endpoints.start.datetime);
const end = new Date(endpoints.end.datetime);
// handle first segment, which may have started before the start endpoint
let segment = data[endpoints.start.index];
const initialSegmentDuration = _.min([new Date(segment.normalEnd) - start, segment.duration]);
durations[getBasalPathGroupType(segment)] = initialSegmentDuration;
// add the durations of all subsequent basals, minus the last
let i = endpoints.start.index + 1;
while (i < endpoints.end.index) {
segment = data[i];
durations[getBasalPathGroupType(segment)] += segment.duration;
i++;
}
// handle last segment, which may go past the end endpoint
segment = data[endpoints.end.index];
durations[getBasalPathGroupType(segment)] += _.min([
end - new Date(segment.normalTime),
segment.duration,
]);
}
return durations;
}
/**
* Calculate the total insulin dose delivered in a given basal segment
* @param {Number} duration Duration of segment in milliseconds
* @param {Number} rate Basal rate of segment
*/
export function getSegmentDose(duration, rate) {
const hours = duration / ONE_HR;
return parseFloat(formatDecimalNumber(hours * rate, 3));
}
/**
* Get total basal delivered for a given time range
* @param {Array} data Array of Tidepool basal data
* @param {[]String} enpoints ISO date strings for the start, end of the range, in that order
* @return {Number} Formatted total insulin dose
*/
export function getTotalBasalFromEndpoints(data, endpoints) {
const start = new Date(endpoints[0]);
const end = new Date(endpoints[1]);
let dose = 0;
_.each(data, (datum, index) => {
let duration = datum.duration;
if (index === 0) {
// handle first segment, which may have started before the start endpoint
duration = _.min([new Date(datum.normalEnd) - start, datum.duration]);
} else if (index === data.length - 1) {
// handle last segment, which may go past the end endpoint
duration = _.min([end - new Date(datum.normalTime), datum.duration]);
}
dose += getSegmentDose(duration, datum.rate);
});
return formatInsulin(dose);
}
/**
* Get automated and manual basal delivery time for a given time range
* @param {Array} data Array of Tidepool basal data
* @param {[]String} enpoints ISO date strings for the start, end of the range, in that order
* @return {Number} Formatted total insulin dose
*/
export function getBasalGroupDurationsFromEndpoints(data, endpoints) {
const start = new Date(endpoints[0]);
const end = new Date(endpoints[1]);
const durations = {
automated: 0,
manual: 0,
};
_.each(data, (datum, index) => {
let duration = datum.duration;
if (index === 0) {
// handle first segment, which may have started before the start endpoint
duration = _.min([new Date(datum.normalEnd) - start, datum.duration]);
} else if (index === data.length - 1) {
// handle last segment, which may go past the end endpoint
duration = _.min([end - new Date(datum.normalTime), datum.duration]);
}
durations[getBasalPathGroupType(datum)] += duration;
});
return durations;
}
|
#!/bin/bash
: << EOF
API 性能测试脚本,会自动执行 wrk 命令,采集数据、分析数据并调用 gnuplot 画图
使用方式 ( 测试 API 性能):
1. 启动 promotion (8080端口)
2. 执行测试脚本: ./wrktest.sh
脚本会生成 _wrk.dat 的数据文件,每列含义为:
并发数 QPS 平均响应时间 成功率
使用方式 (对比2次测试结果)
1. 执行命令: ./wrktest.sh diff promotion1_wrk.dat http_wrk.dat
> Note: 需要确保系统安装了 wrk 和 gnuplot 工具
EOF
t1="alpha" # 对比图中红色线条名称
t2="http" # 对比图中粉色线条名称
jobname="alpha" # 本次测试名称
## wrk 参数配置
d="300s"
concurrent="200 500 1000 3000 5000 10000 15000 20000 25000 50000 100000 200000 500000 1000000"
threads=144
if [ "$1" != "" ];then
url="$1"
else
url="http://127.0.0.1:8080/sd/health"
fi
cmd="wrk --latency -t$threads -d$d -T30s $url"
apiperformance="${jobname}_performance.png"
apisuccessrate="${jobname}_success_rate.png"
datfile="${jobname}_wrk.dat"
# functions
function convertPlotData()
{
echo "$1" | awk -v datfile="$datfile" ' {
if ($0 ~ "Running") {
common_time=$2
}
if ($0 ~ "connections") {
connections=$4
common_threads=$1
}
if ($0 ~ "Latency ") {
avg_latency=convertLatency($2)
}
if ($0 ~ "50%") {
p50=convertLatency($2)
}
if ($0 ~ "75%") {
p75=convertLatency($2)
}
if ($0 ~ "90%") {
p90=convertLatency($2)
}
if ($0 ~ "99%") {
p99=convertLatency($2)
}
if ($0 ~ "Requests/sec") {
qps=$2
}
if ($0 ~ "requests in") {
allrequest=$1
}
if ($0 ~ "Socket errors") {
err=$4+$6+$8+$10
}
}
END {
rate=sprintf("%.2f", (allrequest-err)*100/allrequest)
print connections,qps,avg_latency,rate >> datfile
}
function convertLatency(s) {
if (s ~ "us") {
sub("us", "", s)
return s/1000
}
if (s ~ "ms") {
sub("ms", "", s)
return s
}
if (s ~ "s") {
sub("s", "", s)
return s * 1000
}
}
'
}
function prepare()
{
rm -f $datfile
}
function plot() {
gnuplot << EOF
set terminal png enhanced #输出格式为png文件
set output "$apiperformance" #指定数据文件名称
set title "QPS & TTLB\nRunning: 300s\nThreads: $threads"
set ylabel 'QPS'
set xlabel 'Concurrent'
set y2label 'Average Latency (ms)'
set key top left vertical noreverse spacing 1.2 box
set tics out nomirror
set border 3 front
set style line 1 linecolor rgb '#00ff00' linewidth 2 linetype 3 pointtype 2
set style line 2 linecolor rgb '#ff0000' linewidth 1 linetype 3 pointtype 2
set style data linespoints
set grid #显示网格
set xtics nomirror rotate #by 90#只需要一个x轴
set mxtics 5
set mytics 5 #可以增加分刻度
set ytics nomirror
set y2tics
set autoscale y
set autoscale y2
plot "$datfile" using 2:xticlabels(1) w lp pt 7 ps 1 lc rgbcolor "#EE0000" axis x1y1 t "QPS","$datfile" using 3:xticlabels(1) w lp pt 5 ps 1 lc rgbcolor "#0000CD" axis x2y2 t "Avg Latency (ms)"
unset y2tics
unset y2label
set ytics nomirror
set yrange[0:100]
set output "$apisuccessrate" #指定数据文件名称
set title "Success Rate\nRunning: 300s\nThreads: $threads"
plot "$datfile" using 4:xticlabels(1) w lp pt 7 ps 1 lc rgbcolor "#F62817" t "Success Rate"
EOF
}
function plotDiff()
{
gnuplot << EOF
set terminal png enhanced #输出格式为png文件
set output "${t1}_$t2.qps.diff.png" #指定数据文件名称
set title "QPS & TTLB\nRunning: 300s\nThreads: $threads"
set xlabel 'Concurrent'
set ylabel 'QPS'
set y2label 'Average Latency (ms)'
set key below left vertical noreverse spacing 1.2 box autotitle columnheader
set tics out nomirror
set border 3 front
set style line 1 linecolor rgb '#00ff00' linewidth 2 linetype 3 pointtype 2
set style line 2 linecolor rgb '#ff0000' linewidth 1 linetype 3 pointtype 2
set style data linespoints
#set border 3 lt 3 lw 2 #这会让你的坐标图的border更好看
set grid #显示网格
set xtics nomirror rotate #by 90#只需要一个x轴
set mxtics 5
set mytics 5 #可以增加分刻度
set ytics nomirror
set y2tics
#set pointsize 0.4 #点的像素大小
#set datafile separator '\t' #数据文件的字段用\t分开
set autoscale y
set autoscale y2
#设置图像的大小 为标准大小的2倍
#set size 2.3,2
plot "/tmp/plot_diff.dat" using 2:xticlabels(1) w lp pt 7 ps 1 lc rgbcolor "#EE0000" axis x1y1 t "$t1 QPS","/tmp/plot_diff.dat" using 5:xticlabels(1) w lp pt 7 ps 1 lc rgbcolor "#EE82EE" axis x1y1 t "$t2 QPS","/tmp/plot_diff.dat" using 3:xticlabels(1) w lp pt 5 ps 1 lc rgbcolor "#0000CD" axis x2y2 t "$t1 Avg Latency (ms)", "/tmp/plot_diff.dat" using 6:xticlabels(1) w lp pt 5 ps 1 lc rgbcolor "#6495ED" axis x2y2 t "$t2 Avg Latency (ms)"
unset y2tics
unset y2label
set ytics nomirror
set yrange[0:100]
set title "Success Rate\nRunning: 300s\nThreads: $threads"
set output "${t1}_$t2.success_rate.diff.png" #指定数据文件名称
plot "/tmp/plot_diff.dat" using 4:xticlabels(1) w lp pt 7 ps 1 lc rgbcolor "#EE0000" t "$t1 Success Rate","/tmp/plot_diff.dat" using 7:xticlabels(1) w lp pt 7 ps 1 lc rgbcolor "#EE82EE" t "$t2 Success Rate"
EOF
}
if [ "$1" == "diff" ];then
join $2 $3 > /tmp/plot_diff.dat
plotDiff `basename $2` `basename $3`
exit 0
fi
prepare
for c in $concurrent
do
wrkcmd="$cmd -c $c"
echo -e "\nRunning wrk command: $wrkcmd"
result=`eval $wrkcmd`
convertPlotData "$result"
done
echo -e "\nNow plot according to $datfile"
plot &> /dev/null
echo -e "QPS graphic file is: $apiperformance\nSuccess rate graphic file is: $apisuccessrate"
|
<reponame>jaumefe/Informatica_1
function perimeter(b,h) {
var p =(2*b) + (2*h);
return p;
} |
kubectl --context argocd-debugging port-forward service/argo-cd-argocd-server -n argocd 8080:443
|
package javafx.animation;
import com.sun.scenario.animation.SplineInterpolator;
/**
* The abstract class defines several {@code interpolate} methods, which are
* used to calculate interpolated values. Various built-in implementations of
* this class are offered. Applications may choose to implement their own
* {@code Interpolator} to get custom interpolation behavior.
* <p>
* A custom {@code Interpolator} has to be defined in terms of a "
* {@link #curve(double) curve()}".
*/
public abstract class Interpolator {
private static final double EPSILON = 1e-12;
/**
* The constructor of {@code Interpolator}.
*/
protected Interpolator() {
}
/**
* Built-in interpolator that provides discrete time interpolation. The
* return value of {@code interpolate()} is {@code endValue} only when the
* input {@code fraction} is 1.0, and {@code startValue} otherwise.
*/
public static final Interpolator DISCRETE = new Interpolator() {
@Override
protected double curve(double t) {
return (Math.abs(t - 1.0) < EPSILON) ? 1.0 : 0.0;
}
@Override
public String toString() {
return "Interpolator.DISCRETE";
}
};
/**
* Built-in interpolator that provides linear time interpolation. The return
* value of {@code interpolate()} is {@code startValue} + ({@code endValue}
* - {@code startValue}) * {@code fraction}.
*/
public static final Interpolator LINEAR = new Interpolator() {
@Override
protected double curve(double t) {
return t;
}
@Override
public String toString() {
return "Interpolator.LINEAR";
}
};
/*
* Easing is calculated with the following algorithm (taken from SMIL 3.0
* specs). The result is clamped because of possible rounding errors.
*
* double runRate = 1.0 / (1.0 - acceleration/2.0 - deceleration/2.0); if
* (fraction < acceleration) { double averageRunRate = runRate * (fraction /
* acceleration) / 2; fraction *= averageRunRate; } else if (fraction > (1.0
* - deceleration)) { // time spent in deceleration portion double tdec =
* fraction - (1.0 - deceleration); // proportion of tdec to total
* deceleration time double pdec = tdec / deceleration; fraction = runRate *
* (1.0 - ( acceleration / 2) - deceleration + tdec * (2 - pdec) / 2); }
* else { fraction = runRate * (fraction - (acceleration / 2)); }
*/
/**
* Built-in interpolator instance that provides ease in/out behavior.
* <p>
* An ease-both interpolator will make an animation start slow, then
* accelerate and slow down again towards the end, all in a smooth manner.
* <p>
* The implementation uses the algorithm for easing defined in SMIL 3.0
* with an acceleration and deceleration factor of 0.2, respectively.
*/
public static final Interpolator EASE_BOTH = new Interpolator() {
@Override
protected double curve(double t) {
// See the SMIL 3.1 specification for details on this calculation
// acceleration = 0.2, deceleration = 0.2
return Interpolator.clamp((t < 0.2) ? 3.125 * t * t
: (t > 0.8) ? -3.125 * t * t + 6.25 * t - 2.125
: 1.25 * t - 0.125);
}
@Override
public String toString() {
return "Interpolator.EASE_BOTH";
}
};
/**
* Built-in interpolator instance that provides ease in behavior.
* <p>
* An ease-in interpolator will make an animation start slow and then
* accelerate smoothly.
* <p>
* The implementation uses the algorithm for easing defined in SMIL 3.0
* with an acceleration factor of 0.2.
*/
public static final Interpolator EASE_IN = new Interpolator() {
private static final double S1 = 25.0 / 9.0;
private static final double S3 = 10.0 / 9.0;
private static final double S4 = 1.0 / 9.0;
@Override
protected double curve(double t) {
// See the SMIL 3.1 specification for details on this calculation
// acceleration = 0.2, deceleration = 0.0
return Interpolator.clamp((t < 0.2) ? S1 * t * t : S3 * t - S4);
}
@Override
public String toString() {
return "Interpolator.EASE_IN";
}
};
/**
* Built-in interpolator instance that provides ease out behavior.
* <p>
* An ease-out interpolator will make an animation slow down toward the
* end smoothly.
* <p>
* The implementation uses the algorithm for easing defined in SMIL 3.0
* with an deceleration factor of 0.2.
*/
public static final Interpolator EASE_OUT = new Interpolator() {
private static final double S1 = -25.0 / 9.0;
private static final double S2 = 50.0 / 9.0;
private static final double S3 = -16.0 / 9.0;
private static final double S4 = 10.0 / 9.0;
@Override
protected double curve(double t) {
// See the SMIL 3.1 specification for details on this calculation
// acceleration = 0.2, deceleration = 0.0
return Interpolator.clamp((t > 0.8) ? S1 * t * t + S2 * t + S3 : S4 * t);
}
@Override
public String toString() {
return "Interpolator.EASE_OUT";
}
};
/**
* Creates an {@code Interpolator}, which {@link #curve(double) curve()} is
* shaped using the spline control points defined by ({@code x1}, {@code y1}
* ) and ({@code x2}, {@code y2}). The anchor points of the spline are
* implicitly defined as ({@code 0.0}, {@code 0.0}) and ({@code 1.0},
* {@code 1.0}).
*
* @param x1
* x coordinate of the first control point
* @param y1
* y coordinate of the first control point
* @param x2
* x coordinate of the second control point
* @param y2
* y coordinate of the second control point
* @return A spline interpolator
*/
public static Interpolator SPLINE(double x1, double y1, double x2, double y2) {
return new SplineInterpolator(x1, y1, x2, y2);
}
/**
* This method takes two {@code Objects} along with a {@code fraction}
* between {@code 0.0} and {@code 1.0} and returns the interpolated value.
* <p>
* If both {@code Objects} implement {@code Number}, their values are
* interpolated. If {@code startValue} implements {@link Interpolatable} the
* calculation defined in {@link Interpolatable#interpolate(Object, double)
* interpolate()} is used. If neither of these conditions are met, a
* discrete interpolation is used, i.e. {@code endValue} is returned if and
* only if {@code fraction} is {@code 1.0}, otherwise {@code startValue} is
* returned.
* <p>
* Before calculating the interpolated value, the fraction is altered
* according to the function defined in {@link #curve(double) curve()}.
*
* @param startValue
* start value
* @param endValue
* end value
* @param fraction
* a value between 0.0 and 1.0
* @return interpolated value
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public Object interpolate(Object startValue, Object endValue, double fraction) {
if (startValue instanceof Number && endValue instanceof Number) {
double start = ((Number) startValue).doubleValue();
double end = ((Number) endValue).doubleValue();
double val = start + (end - start) * curve(fraction);
if ((startValue instanceof Double) || (endValue instanceof Double))
return val;
if ((startValue instanceof Float) || (endValue instanceof Float))
return (float) val;
if ((startValue instanceof Long) || (endValue instanceof Long))
return Math.round(val);
return (int) Math.round(val);
}
if ((startValue instanceof Interpolatable) && (endValue instanceof Interpolatable))
return ((Interpolatable) startValue).interpolate(endValue, curve(fraction));
// discrete
return (curve(fraction) == 1.0) ? endValue : startValue;
}
/**
* This method takes two {@code boolean} values along with a
* {@code fraction} between {@code 0.0} and {@code 1.0} and returns the
* interpolated value.
* <p>
* Before calculating the interpolated value, the fraction is altered
* according to the function defined in {@link #curve(double) curve()}.
*
* @param startValue
* the first data point
* @param endValue
* the second data point
* @param fraction
* the fraction in {@code [0.0...1.0]}
*/
public boolean interpolate(boolean startValue, boolean endValue, double fraction) {
return (Math.abs(curve(fraction) - 1.0) < EPSILON) ? endValue : startValue;
}
/**
* This method takes two {@code double} values along with a {@code fraction}
* between {@code 0.0} and {@code 1.0} and returns the interpolated value.
* <p>
* Before calculating the interpolated value, the fraction is altered
* according to the function defined in {@link #curve(double) curve()}.
*
* @param startValue
* the first data point
* @param endValue
* the second data point
* @param fraction
* the fraction in {@code [0.0...1.0]}
*/
public double interpolate(double startValue, double endValue, double fraction) {
return startValue + (endValue - startValue) * curve(fraction);
}
/**
* This method takes two {@code int} values along with a {@code fraction}
* between {@code 0.0} and {@code 1.0} and returns the interpolated value.
* <p>
* Before calculating the interpolated value, the fraction is altered
* according to the function defined in {@link #curve(double) curve()}.
*
* @param startValue
* the first data point
* @param endValue
* the second data point
* @param fraction
* the fraction in {@code [0.0...1.0]}
*/
public int interpolate(int startValue, int endValue, double fraction) {
return startValue + (int) Math.round((endValue - startValue) * curve(fraction));
}
/**
* This method takes two {@code int} values along with a {@code fraction}
* between {@code 0.0} and {@code 1.0} and returns the interpolated value.
* <p>
* Before calculating the interpolated value, the fraction is altered
* according to the function defined in {@link #curve(double) curve()}.
*
* @param startValue
* the first data point
* @param endValue
* the second data point
* @param fraction
* the fraction in {@code [0.0...1.0]}
*/
public long interpolate(long startValue, long endValue, double fraction) {
return startValue + Math.round((endValue - startValue) * curve(fraction));
}
private static double clamp(double t) {
return (t < 0.0) ? 0.0 : (t > 1.0) ? 1.0 : t;
}
/**
* Mapping from [0.0..1.0] to itself.
*
* @param t
* time, but normalized to the range [0.0..1.0], where 0.0 is the
* start of the current interval, while 1.0 is the end of the
* current interval. Usually a function that increases
* monotonically.
*/
protected abstract double curve(double t);
}
|
<reponame>Tfarcenim/TerminaledRedstone
package tfar.terminaledredstone;
import net.minecraft.client.util.ITooltipFlag;
import net.minecraft.entity.player.PlayerEntity;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.util.ActionResult;
import net.minecraft.util.Hand;
import net.minecraft.util.text.ITextComponent;
import net.minecraft.util.text.StringTextComponent;
import net.minecraft.world.World;
import javax.annotation.Nullable;
import java.util.List;
public class RedstoneWireSpoolItem extends Item {
public RedstoneWireSpoolItem(Properties properties) {
super(properties);
}
@Override
public void addInformation(ItemStack stack, @Nullable World worldIn, List<ITextComponent> tooltip, ITooltipFlag flagIn) {
if (stack.hasTag())
tooltip.add(new StringTextComponent(NBTUtil.readBlockPos(stack.getTag()).toString()));
}
@Override
public ActionResult<ItemStack> onItemRightClick(World worldIn, PlayerEntity playerIn, Hand handIn) {
return super.onItemRightClick(worldIn, playerIn, handIn);
}
}
|
#!/bin/bash
#SBATCH --account=def-dkulic
#SBATCH --mem=8000M # memory per node
#SBATCH --time=23:00:00 # time (DD-HH:MM)
#SBATCH --output=/project/6001934/lingheng/Double_DDPG_Job_output/continuous_RoboschoolInvertedPendulumSwingup-v1_ddpg_softcopy_action_noise_seed5_run5_%N-%j.out # %N for node name, %j for jobID
module load qt/5.9.6 python/3.6.3 nixpkgs/16.09 gcc/7.3.0 boost/1.68.0 cuda cudnn
source ~/tf_cpu/bin/activate
python ./ddpg_discrete_action.py --env RoboschoolInvertedPendulumSwingup-v1 --random-seed 5 --exploration-strategy action_noise --summary-dir ../Double_DDPG_Results_no_monitor/continuous/RoboschoolInvertedPendulumSwingup-v1/ddpg_softcopy_action_noise_seed5_run5 --continuous-act-space-flag --double-ddpg-flag
|
import {default as ObliteratorIterator} from './iterator.js';
export default function range(end: number): ObliteratorIterator<number>;
export default function range(start: number, end: number): ObliteratorIterator<number>;
export default function range(start: number, end: number, step: number): ObliteratorIterator<number>;
|
TERMUX_PKG_HOMEPAGE=https://wiki.gnome.org/action/show/Projects/libsoup
TERMUX_PKG_DESCRIPTION="HTTP client and server library"
TERMUX_PKG_LICENSE="LGPL-2.0"
TERMUX_PKG_VERSION=2.66.2
TERMUX_PKG_REVISION=1
TERMUX_PKG_SRCURL=https://ftp.gnome.org/pub/GNOME/sources/libsoup/${TERMUX_PKG_VERSION:0:4}/libsoup-$TERMUX_PKG_VERSION.tar.xz
TERMUX_PKG_SHA256=bd2ea602eba642509672812f3c99b77cbec2f3de02ba1cc8cb7206bf7de0ae2a
TERMUX_PKG_EXTRA_CONFIGURE_ARGS="-Dvapi=false -Dgssapi=false"
TERMUX_PKG_DEPENDS="glib, libpsl, libsqlite, libxml2"
TERMUX_PKG_BREAKS="libsoup-dev"
TERMUX_PKG_REPLACES="libsoup-dev"
|
#!/bin/sh
RELEASE_BRANCH=$1
git checkout release-${RELEASE_BRANCH}
git fetch origin master
echo "Press ENTER to force push the rebased ${VERSION} branch to GitHub"
read
git rebase origin/master && git push -f origin release-${RELEASE_BRANCH}
|
package com.allendowney.thinkdast;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.jsoup.Connection;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.nodes.Node;
import org.jsoup.nodes.TextNode;
import org.jsoup.select.Elements;
public class WikiPhilosophy {
final static List<String> visited = new ArrayList<String>();
final static WikiFetcher wf = new WikiFetcher();
/**
* Tests a conjecture about Wikipedia and Philosophy.
*
* https://en.wikipedia.org/wiki/Wikipedia:Getting_to_Philosophy
*
* 1. Clicking on the first non-parenthesized, non-italicized link
* 2. Ignoring external links, links to the current page, or red links
* 3. Stopping when reaching "Philosophy", a page with no links or a page
* that does not exist, or when a loop occurs
*
* @param args
* @throws IOException
*/
private static int staticLimit = 30;
public static void main(String[] args) throws IOException {
String destination = "https://en.wikipedia.org/wiki/Philosophy";
String source = "https://en.wikipedia.org/wiki/Cumberland,_Maryland";
testConjecture(destination, source, new ArrayList<String>(), staticLimit);
}
/**
* Starts from given URL and follows first link until it finds the destination or exceeds the limit.
*
* @param destination
* @param source
* @throws IOException
*/
public static void testConjecture(String destination, String source, ArrayList<String> list, int limit) throws IOException {
int iteration = staticLimit - limit;
if (limit < 0) {
System.out.println("Out of limit iteration");
return;
}
if (destination.equals(source)) {
System.out.println("Finished with " + iteration + " iteration");
return;
}
if (source.equals("sorry")){
System.out.println("Sorry, no links");
return;
}
String url = getFirstLink(source, list);
list.add(source);
testConjecture(destination, url, list, --limit);
}
private static String getFirstLink(String source, ArrayList<String> list) throws IOException {
Connection conn = Jsoup.connect(source);
Document doc = conn.get();
Element content = doc.getElementById("mw-content-text");
String newSurce = getFirstLinkBySelect(content, "p", list);
if (newSurce == null) newSurce = getFirstLinkBySelect(content, "ul", list);
if (newSurce == null) return "sorry";
else return newSurce;
}
private static String getFirstLinkBySelect(Element content, String query, ArrayList<String> list){
Elements elements = content.select(query);
for(Node node : elements){
Iterable<Node> iterable = new WikiNodeIterable(node);
Iterator<Node> iterator = iterable.iterator();
boolean isQuotes = false;
while (iterator.hasNext()){
node = iterator.next();
boolean isLink = node.nodeName().equals("a");
boolean isWiki = node.absUrl("href").contains("wikipedia.org");
boolean isRef = node.attr("href").contains("#");
//boolean isCursive = node.parent().attr("class").equals("IPA nopopups noexcerpt") || node.attr("class").equals("IPA nopopups noexcerpt");
boolean isCursive = node.parent().nodeName().equals("span");
boolean isSmall = node.parent().nodeName().equals("small");
boolean isRepeat = list.contains(node.absUrl("href"));
if (node instanceof TextNode){
if (((TextNode) node).getWholeText().contains("(")) isQuotes = true;
if (((TextNode) node).getWholeText().contains(")")) isQuotes = false;
}
if(isLink && isWiki && !isRef && !isCursive && !isRepeat && !isSmall && !isQuotes){
System.out.printf("\"%s\", \n", node.absUrl("href"));
return node.absUrl("href");
}
}
}
return null;
}
private static void recursiveDFS(Node node) {
boolean isCursive = node.nodeName().equals("i");
boolean isRed = node.nodeName().equals("span");
boolean isLink = node.nodeName().equals("a");
boolean isWiki = node.absUrl("href").contains("wikipedia.org");
if (!isCursive && !isRed){
if (isLink && isWiki){
System.out.println(node.absUrl("href"));
return;
}
for (Node child: node.childNodes()) {
recursiveDFS(child);
}
}
}
}
|
import java.util.HashMap;
import java.util.List;
public class NaiveBayes {
// Assume that the emails are stored in a list called "emails".
public static void main(String[] args) {
HashMap<String, Integer> wordCounts;
List<String> labels;
// Create two dictionaries to store the word counts for spam and non-spam emails.
HashMap<String, Integer> spamWordCounts = new HashMap<>();
HashMap<String, Integer> nonSpamWordCounts = new HashMap<>();
// Create a list to store the labels.
List<String> labels = new ArrayList<>();
// Loop over the emails.
for (String email : emails) {
// Split the email into words.
String[] words = email.split(" ");
wordCounts = new HashMap<>();
// Count each word in the email.
for (String word : words) {
Integer count = wordCounts.get(word);
if (count == null) {
count = 0;
}
count++;
wordCounts.put(word, count);
}
// Classify the email as spam or non-spam.
double probabilitySpam = 0.0;
double probabilityNonSpam = 0.0;
// Calculate the probability of the email being spam or non-spam.
// ...
// Assign the label to the email.
String label = (probabilitySpam > probabilityNonSpam) ? "spam" : "not spam";
labels.add(label);
// Update the word counts in the dictionaries.
for (String word : words) {
if (label.equals("spam")) {
Integer count = spamWordCounts.get(word);
if (count == null) {
count = 0;
}
count++;
spamWordCounts.put(word, count);
} else {
Integer count = nonSpamWordCounts.get(word);
if (count == null) {
count = 0;
}
count++;
nonSpamWordCounts.put(word, count);
}
}
}
// Print the labels.
System.out.println(labels);
// Print the word counts.
System.out.println("spam word counts:");
for (String word : spamWordCounts.keySet()) {
System.out.println(word + ": " + spamWordCounts.get(word));
}
System.out.println("non-spam word counts:");
for (String word : nonSpamWordCounts.keySet()) {
System.out.println(word + ": " + nonSpamWordCounts.get(word));
}
}
} |
package com.mycompany.app;
import java.io.Serializable;
public class Player implements Serializable {
int x;
int y;
int hp;
int maxHP;
public Player(int _x, int _y) {
x = _x;
y = _y;
maxHP = 10;
hp = maxHP;
}
public int getX() {
return x;
}
public int getY() {
return y;
}
public int getHP() {
return hp;
}
public void setX(int _x) {
x = _x;
}
public void setY(int _y) {
y = _y;
}
// returns true if alive, false if ded
public boolean setHP(int _hp) {
hp = _hp;
if (hp <= 0) {
return false;
}
if (hp > maxHP) {
hp = maxHP;
}
return true;
}
}
|
//
// ViewController.h
// Echoes
//
// Created by <NAME> on 04.06.2015.
// Copyright (c) 2015 SO MANY APPS. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface ViewController : UIViewController
@end
|
<reponame>CommonsBuild/pollen-cred-fetcher
import fetchPollenData from './utils'
import connectDB from './config/db'
import Account from './models/Account'
import CredParticipant from './models/CredParticipant'
const updatePollenData = async (): Promise<void> => {
try {
await connectDB()
console.log(`${Date.now()}: Fetching data...`)
const pollenData = await fetchPollenData()
const { accounts, credParticipants } = pollenData
console.log(`${Date.now()}: Updating DB entries for ledger accounts...`)
for (const account of accounts) {
const { identity } = account
const aliases = identity.aliases.map(alias => alias.address)
const foundAccount = await Account.findOne({ 'identity.id': identity.id })
if (!foundAccount) {
Account.create({
'identity.id': identity.id,
'identity.subtype': identity.subtype,
'identity.aliases': aliases
})
} else if (foundAccount.identity.aliases !== aliases) foundAccount.updateOne({
'identity.aliases': aliases
})
else continue
}
console.log(`${Date.now()}: Removing DB entries for accounts that are not longer in the ledger...`)
const dbAccounts = await Account.find({})
for (const dbAccount of dbAccounts) {
if(!accounts.find(account => account.identity.id === dbAccount.identity.id))
await dbAccount.deleteOne()
}
console.log(`${Date.now()}: Updating DB entries for credgraph participants...`)
for (const participant of credParticipants) {
if (participant.cred < 1) continue
const { credPerInterval, cred, id } = participant
await CredParticipant.findOneAndUpdate(
{ id },
{
cred,
credPerInterval: credPerInterval.slice(-2)
},
{ upsert: true }
)
}
console.log(`${Date.now()}: Removing DB entries for participants that are not longer in the credGraph...`)
const dbParticipants = await CredParticipant.find({})
for (const dbParticipant of dbParticipants) {
if(!credParticipants.find(participant => participant.id === dbParticipant.id))
await dbParticipant.deleteOne()
}
console.log(`${Date.now()}: DB entries updated.`)
process.exit()
} catch (err) {
console.log(err)
process.exit(1)
}
}
updatePollenData()
|
"""
Algorithm to sort a given list of numbers in ascending order
"""
def sort_numbers(nums):
# Return if list is empty or single element
if len(nums) <= 1:
return nums
# Split list in two
mid = len(nums) // 2
left_list = nums[:mid]
right_list = nums[mid:]
# Sort both left and right lists
left_list = sort_numbers(left_list)
right_list = sort_numbers(right_list)
# Merge both sorted lists and return
return sorted_list(left_list, right_list)
def sorted_list(left_list, right_list):
# Initialize merged list
sorted_list = []
left_list_index = right_list_index = 0
# Traverse both lists and compare elements
len_left_list, len_right_list = len(left_list), len(right_list)
while left_list_index < len_left_list and right_list_index < len_right_list:
if left_list[left_list_index] <= right_list[right_list_index]:
sorted_list.append(left_list[left_list_index])
left_list_index += 1
else:
sorted_list.append(right_list[right_list_index])
right_list_index += 1
# Append remaining elements from left and right lists
if left_list:
sorted_list.extend(left_list[left_list_index:])
if right_list:
sorted_list.extend(right_list[right_list_index:])
return sorted_list |
package dev.arkav.openoryx.game;
import dev.arkav.openoryx.game.models.Vector2;
import dev.arkav.openoryx.net.data.MoveRecord;
import java.util.ArrayList;
@SuppressWarnings("ALL")
public class MoveRecords {
public int lastClearTime;
public ArrayList<MoveRecord> records;
public MoveRecords() {
this.lastClearTime = -1;
this.records = new ArrayList<>();
}
public void addRecord(int time, Vector2 pos) {
this.addRecord(time, pos.x, pos.y);
}
public void addRecord(int time, float x, float y) {
if (this.lastClearTime < 0) return;
int id = this.getId(time);
if (id < 1 || id > 10) return;
if (this.records.size() == 0) {
MoveRecord record = new MoveRecord(time, x, y);
this.records.add(record);
}
MoveRecord current = this.records.get(this.records.size() - 1);
int currentId = this.getId(current.time);
if (id != currentId) {
MoveRecord record = new MoveRecord(time, x, y);
this.records.add(record);
}
int score = this.getScore(id, time);
int currentScore = this.getScore(currentId, current.time);
if (score < currentScore) {
current.time = time;
current.x = x;
current.y = y;
}
}
public void clear(int time) {
this.records.clear();
this.lastClearTime = time;
}
private int getId(int time) {
return (time - this.lastClearTime + 50) / 100;
}
private int getScore(int id, int time) {
return Math.round(Math.abs(time - this.lastClearTime - id * 100));
}
}
|
for (int i = 0; i < 3; i++){
for (int j = 0; j < 3; j++){
printf("%d\t", array[i][j]);
}
printf("\n");
} |
class Base:
def __init__(self, x):
self.x = x
# create a subclass
class SubClass(Base):
def __init__(self, x, y):
super().__init__(x)
self.y = y |
<filename>apps/studio/src/components/dashboard/LinkDraggableList.tsx
import React, { FC, useState, memo } from "react";
import Skeleton from "react-loading-skeleton";
import EmptyShell from "../EmptyShell";
import useLinks from "~/utils/hooks/queries/useLinks";
import useReorderLink from "~/utils/hooks/mutations/useReorderLink";
import { DragDropContext, Droppable, DropResult } from "react-beautiful-dnd";
import { useSafeLayoutEffect } from "@linkto/core";
import { FilePlus } from "@linkto/gamut";
import { LinkCard } from "~/components/dashboard";
import { useAppContext } from "~/data/context";
import { getLinksWithOrder, reorderList } from "~/utils/misc/orderLinks";
const LinkDraggableList: FC = () => {
const { links, isLoading, isError } = useLinks();
const { addLinkDialog } = useAppContext();
const { mutate } = useReorderLink();
const [linksList, setLinksList] = useState([]);
const handleDragEnd = ({ source, destination }: DropResult) => {
// Dragged outside of container -> cancel
if (!destination) return;
// Dragged at the same position -> cancel
if (destination.index === source.index) return;
const list = reorderList(linksList, source.index, destination.index);
setLinksList(list);
mutate(list);
};
useSafeLayoutEffect(() => {
if (links && !isLoading && !isError) {
setLinksList(getLinksWithOrder(links));
}
}, [links]);
if (isLoading) {
return (
<Skeleton
count={3}
height={136}
containerClassName="mt-2 block space-y-4"
/>
);
}
if (!isLoading && links.length < 1) {
return (
<EmptyShell
text="Begin by adding your first link"
icon={FilePlus}
onPress={addLinkDialog.onOpen}
/>
);
}
return (
<DragDropContext onDragEnd={handleDragEnd}>
<Droppable droppableId="droppable-link__list">
{({ innerRef, droppableProps, placeholder }) => {
return (
<ul
className="mt-6 xs:mt-8 space-y-4"
ref={innerRef}
{...droppableProps}
>
{linksList.map((l) => (
<LinkCard key={l.id} {...l} />
))}
{placeholder}
</ul>
);
}}
</Droppable>
</DragDropContext>
);
};
export default memo(LinkDraggableList);
|
<reponame>mc18g13/teensy-drone<filename>calibration-cli/lib/MARGCalibrationHandler/MARGCalibrationHandler.h
#ifndef MARG_CALIBRATION_HANDLER
#define MARG_CALIBRATION_HANDLER
#include <SPI.h>
#include <MARG.h>
#include <arm_math.h>
#include <map>
#include "MenuOptionHandler.h"
#define SPI_CLOCK 8000000 // 8MHz clock works.
#define SS_PIN 10
#define INT_PIN 3
#define LED 13
class MARGCalibrationHandler : public MenuOptionHandler {
public:
MARG m_marg;
MARGCalibrationHandler();
virtual void setup() override;
virtual void printTitle() override;
void checkMaxAndMinAndSet(float32_t input, float32_t& max, float32_t& min);
void checkMaxAndMinAndSetForOneValue(float32_t input, float32_t& maxOrMin);
void calibrate(Vector (MARG::*data)(), int eepromStartAddress);
void calibrateOneValue(int index, Vector (MARG::*data)(), int eepromStartAddress);
void calibrateAcceleration();
void readCalibrationValues();
void printTestValues();
void writeManualValuesToCalibration();
void calibrateMagnitometer();
};
#endif |
<gh_stars>0
package com.foxconn.iot.service.impl;
import java.util.ArrayList;
import java.util.List;
import javax.transaction.Transactional;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Service;
import com.foxconn.iot.dto.ApplicationVersionDto;
import com.foxconn.iot.dto.UpdateRecordDto;
import com.foxconn.iot.dto.UpdateTaskDto;
import com.foxconn.iot.entity.ApplicationEntity;
import com.foxconn.iot.entity.ApplicationVersionEntity;
import com.foxconn.iot.entity.UpdateRecordEntity;
import com.foxconn.iot.entity.UpdateRecordVo;
import com.foxconn.iot.entity.UpdateTaskEntity;
import com.foxconn.iot.exception.BizException;
import com.foxconn.iot.repository.ApplicationVersionRepository;
import com.foxconn.iot.repository.UpdateRecordRepository;
import com.foxconn.iot.repository.UpdateTaskRepository;
import com.foxconn.iot.service.UpdateTaskService;
import com.foxconn.iot.support.Snowflaker;
@Service
public class UpdateTaskServiceImpl implements UpdateTaskService {
@Autowired
private UpdateTaskRepository updateTaskRepository;
@Autowired
private ApplicationVersionRepository applicationVersionRepository;
@Autowired
private UpdateRecordRepository updateRecordRepository;
@Override
@Transactional
public void create(UpdateTaskDto task) {
ApplicationVersionEntity version = applicationVersionRepository.findById(task.getVersionId());
if (version == null) {
throw new BizException("Invalid application version");
}
List<UpdateTaskEntity> entities = new ArrayList<>();
for (String sn : task.getSns()) {
/** 只保留最新版本的升级 */
updateTaskRepository.deleteBySn(sn);
UpdateTaskEntity entity = new UpdateTaskEntity();
entity.setId(Snowflaker.getId());
entity.setSn(sn);
entity.setVersion(version);
entities.add(entity);
}
updateTaskRepository.saveAll(entities);
}
@Override
public Page<UpdateRecordDto> queryReady(Pageable pageable) {
Page<UpdateRecordVo> vos = updateTaskRepository.query(pageable);
List<UpdateRecordDto> dtos = new ArrayList<>();
for (UpdateRecordVo vo : vos.getContent()) {
UpdateRecordDto dto = new UpdateRecordDto();
BeanUtils.copyProperties(vo, dto);
dtos.add(dto);
}
return new PageImpl<>(dtos, pageable, vos.getTotalElements());
}
@Override
public Page<UpdateRecordDto> queryReady(long version, Pageable pageable) {
Page<UpdateRecordVo> vos = updateTaskRepository.query(version, pageable);
List<UpdateRecordDto> dtos = new ArrayList<>();
for (UpdateRecordVo vo : vos.getContent()) {
UpdateRecordDto dto = new UpdateRecordDto();
BeanUtils.copyProperties(vo, dto);
dtos.add(dto);
}
return new PageImpl<>(dtos, pageable, vos.getTotalElements());
}
@Override
public ApplicationVersionDto ready(String sn) {
ApplicationVersionEntity version = updateTaskRepository.queryBySn(sn);
ApplicationVersionDto dto = new ApplicationVersionDto();
BeanUtils.copyProperties(version, dto);
return dto;
}
@Override
public Page<UpdateRecordDto> queryComplete(Pageable pageable) {
Page<UpdateRecordEntity> entities = updateRecordRepository.query(pageable);
List<UpdateRecordDto> dtos = new ArrayList<>();
for (UpdateRecordEntity entity : entities.getContent()) {
UpdateRecordDto dto = new UpdateRecordDto();
BeanUtils.copyProperties(entity, dto);
dtos.add(dto);
}
return new PageImpl<>(dtos, pageable, entities.getTotalElements());
}
@Override
public Page<UpdateRecordDto> queryComplete(long version, Pageable pageable) {
Page<UpdateRecordEntity> entities = updateRecordRepository.query(version, pageable);
List<UpdateRecordDto> dtos = new ArrayList<>();
for (UpdateRecordEntity entity : entities.getContent()) {
UpdateRecordDto dto = new UpdateRecordDto();
BeanUtils.copyProperties(entity, dto);
dtos.add(dto);
}
return new PageImpl<>(dtos, pageable, entities.getTotalElements());
}
@Override
@Transactional
public void complete(String sn) {
ApplicationVersionEntity version = updateTaskRepository.queryBySn(sn);
if (version == null) {
throw new BizException("Update task not found");
}
ApplicationEntity application = applicationVersionRepository.queryApplicationById(version.getId());
if (application == null) {
throw new BizException("Invalid application");
}
UpdateRecordEntity record = new UpdateRecordEntity();
record.setId(Snowflaker.getId());
record.setSn(sn);
record.setAppId(application.getAppId());
record.setApplicationName(application.getName());
record.setVersionId(version.getId());
record.setVersion(version.getVersion());
record.setLink(version.getLink());
updateRecordRepository.save(record);
updateTaskRepository.deleteBySn(sn);
}
}
|
package io.opensphere.core.util.swing.tags;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.FlowLayout;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Insets;
import java.awt.RenderingHints;
import java.awt.event.MouseEvent;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Consumer;
import javax.swing.JMenuItem;
import javax.swing.JPanel;
import javax.swing.JPopupMenu;
import javax.swing.event.ChangeEvent;
import javax.swing.event.MouseInputAdapter;
import io.opensphere.core.util.collections.New;
/**
* A field in which a pre-defined set of tags may be chosen by the user with a
* popup menu.
*/
public class JTagField extends JPanel
{
/** The unique identifier used for serialization. */
private static final long serialVersionUID = 6039752385919432581L;
/** The values selected by the user. */
private final List<String> myOrderedSelections;
/** A dictionary of tags selected by the user. */
private final Map<String, JTag> myValues;
/** A dictionary of menu items. */
private final Map<String, JMenuItem> myMenuItems;
/** The popup menu shown when the user clicks the component. */
private final JPopupMenu myPopupMenu;
/** The choices available to the user. */
private final List<String> myChoices;
/** The set of change listeners registered for notification. */
private final Set<Consumer<ChangeEvent>> myChangeListeners;
/** The color to use for tags within the field. */
private Color myTagColor;
/**
* Creates a new tag field using the supplied list of choices.
*
* @param choices the choices with which to populate the tag field.
*/
public JTagField(List<String> choices)
{
this(choices, JTag.DEFAULT_COLOR);
}
/**
* Creates a new tag field using the supplied list of choices and the
* supplied tag color.
*
* @param choices the choices with which to populate the tag field.
* @param tagColor the color to use for tags within the field.
*/
public JTagField(List<String> choices, Color tagColor)
{
super(new WrappedFlowLayout(FlowLayout.LEFT, 5, 1));
myChoices = choices;
myTagColor = tagColor;
setPreferredSize(new Dimension(500, 28));
setMinimumSize(new Dimension(25, 28));
setBorder(new RoundedBorder(Color.GRAY));
getInsets().set(0, 0, 0, 0);
myChangeListeners = New.set();
myValues = New.map();
myOrderedSelections = New.list();
myMenuItems = New.map();
myPopupMenu = new JPopupMenu();
choices.forEach(value -> createMenuItem(value));
rebuildPopup();
addMouseListener(new MouseInputAdapter()
{
@Override
public void mouseReleased(MouseEvent e)
{
if (isEnabled())
{
myPopupMenu.show(JTagField.this, e.getX(), getHeight());
}
}
});
}
/**
* {@inheritDoc}
*
* @see javax.swing.JComponent#setEnabled(boolean)
*/
@Override
public void setEnabled(boolean enabled)
{
for (JTag selectedTag : myValues.values())
{
selectedTag.setEnabled(enabled);
}
super.setEnabled(enabled);
}
/**
* Sets the value of the {@link #myTagColor} field.
*
* @param tagColor the value to store in the {@link #myTagColor} field.
*/
public void setTagColor(Color tagColor)
{
myTagColor = tagColor;
}
/**
* Registers the supplied listener to receive change events.
*
* @param listener the listener to register.
*/
public void addChangeListener(Consumer<ChangeEvent> listener)
{
myChangeListeners.add(listener);
}
/**
* De-registers the supplied listener from receiving change events.
*
* @param listener the listener to de-register.
*/
public void removeChangeListener(Consumer<ChangeEvent> listener)
{
myChangeListeners.remove(listener);
}
/**
* Notifies all registered listeners that a change to the selected data set
* has taken place.
*/
protected void notifyChange()
{
ChangeEvent event = new ChangeEvent(this);
myChangeListeners.forEach(listener -> listener.accept(event));
}
/**
* Sets the collection of available choices to the supplied {@link List}.
* This method has side-effects, and modifies the menu items, selected
* values, and ordered selection collections. The popup menu is rebuilt to
* reflect the new collection of available choices. The collection of
* selected values ({@link #myValues}) is modified to retain only those
* items which are valid in the new collection of available choices, and the
* ordered collection of selected values ({@link #myOrderedSelections}) is
* likewise modified to retain only the new valid choices.
*
* @param choices the collection of available choices.
*/
public void setChoices(List<String> choices)
{
myChoices.clear();
myChoices.addAll(choices);
myMenuItems.clear();
myChoices.forEach(value -> createMenuItem(value));
// remove all invalid choices from the selected values:
myValues.keySet().retainAll(myChoices);
myOrderedSelections.retainAll(myChoices);
rebuildPopup();
}
/**
* Gets a list of the items selected by the user.
*
* @return the set of items selected by the user.
*/
public List<String> getSelectedItems()
{
return myOrderedSelections;
}
/**
* Creates a menu item, and adds it to the {@link #myMenuItems} map. The
* item is not added to the popup menu.
*
* @param value the value for which to create the menu item.
*/
protected void createMenuItem(String value)
{
JMenuItem menuItem = new StayOpenMenuItem(value);
menuItem.addActionListener(e -> add(value));
myMenuItems.put(value, menuItem);
}
/**
* Adds the supplied item to the tag field if it is not already present, and
* if it is a valid choice within the tag set. This method has a side-effect
* of rebuilding the popup menu to reflect the unchosen options.
*
* @param text the text to add to the tag field.
*/
public void add(String text)
{
add(text, (Color)null);
}
/**
* Adds the supplied item to the tag field if it is not already present with
* the given color, and if it is a valid choice within the tag set. This method
* has a side-effect of rebuilding the popup menu to reflect the unchosen options.
*
* @param text the text to add to the tag field.
* @param textColor the color of the text in the tag field, may be null.
*/
public void add(String text, Color textColor)
{
if (!myValues.containsKey(text) && myChoices.contains(text))
{
myOrderedSelections.add(text);
myValues.put(text, new JTag(text, this::remove, myTagColor, textColor));
add(myValues.get(text));
rebuildPopup();
revalidate();
repaint();
notifyChange();
}
}
/**
* Removes the named item from the tag list if it is included in the
* selection set. If the tag is not included, no action is taken. This
* method has a side-effect of rebuilding the popup menu to reflect the
* unchosen options.
*
* @param text the text to remove from the tag field.
*/
public void remove(String text)
{
if (myValues.containsKey(text))
{
myOrderedSelections.remove(text);
remove(myValues.remove(text));
rebuildPopup();
revalidate();
repaint();
notifyChange();
}
}
/**
* Rebuilds the popup menu with the items available for selection. Already
* selected items are not included in the popup. Order is preserved from the
* original collection of choices.
*/
public void rebuildPopup()
{
myPopupMenu.removeAll();
myChoices.stream().filter(p -> !myValues.containsKey(p)).forEach(value -> myPopupMenu.add(myMenuItems.get(value)));
}
/**
* {@inheritDoc}
*
* @see javax.swing.JComponent#paint(java.awt.Graphics)
*/
@Override
public void paint(Graphics g)
{
Insets insets = getInsets();
int x = insets.left;
int y = insets.top;
int width = getWidth() - insets.left - insets.right;
int height = getHeight() - insets.top - insets.bottom;
((Graphics2D)g).setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
((Graphics2D)g).setColor(getBackground());
((Graphics2D)g).fillRoundRect(x, y, width, height, 12, 12);
paintChildren(g);
paintBorder(g);
}
}
|
#!/bin/sh
openssl req -x509 -nodes -days 365 -newkey rsa:2048 -out appgw.crt -keyout appgw.key -subj "/CN=bicycle.contoso.com/O=Contoso Bicycle"
openssl pkcs12 -export -out appgw.pfx -in appgw.crt -inkey appgw.key -passout pass:
export APP_GATEWAY_LISTENER_CERTIFICATE=$(cat appgw.pfx | base64 -w 0)
openssl req -x509 -nodes -days 365 -newkey rsa:2048 -out traefik-ingress-internal-aks-ingress-contoso-com-tls.crt -keyout traefik-ingress-internal-aks-ingress-contoso-com-tls.key -subj "/CN=*.aks-ingress.contoso.com/O=Contoso Aks Ingress"
export AKS_INGRESS_CONTROLLER_CERTIFICATE_BASE64=$(cat traefik-ingress-internal-aks-ingress-contoso-com-tls.crt | base64 -w 0)
echo Secret APP_GATEWAY_LISTENER_CERTIFICATE_BASE64
echo $APP_GATEWAY_LISTENER_CERTIFICATE
echo \n
echo AKS_INGRESS_CONTROLLER_CERTIFICATE_BASE64
echo $AKS_INGRESS_CONTROLLER_CERTIFICATE_BASE64
|
#!/bin/bash
cd /root
echo " Add ID Cloudlare "
echo ""
read -e -p " Masukan Domain :$domain" domain
read -e -p " Masukan Email Cloudflare :" email
read -e -p " Masukan Api Key :" key
echo -e "domain=$domain" >> /root/mail.conf
echo -e "email=$email" >> /root/mail.conf
echo -e "key=$key" >> /root/mail.conf
clear
echo "Done"
echo "Your ID Cloudflare"
echo -e "==============================="
echo "DOMAIN : $domain"
echo "Email : $email"
echo "Api Key : $key"
echo -e "==============================="
|
<filename>src/templates/visit-page.js<gh_stars>0
import React from "react";
// import PropTypes from "prop-types";
import { graphql } from "gatsby";
import Layout from "../components/Layout";
import Hero from "../components/Hero";
import Fade from "react-reveal/Fade";
import { ParallaxProvider } from "react-scroll-parallax";
// import { Parallax } from "react-scroll-parallax";
import Line from "../components/Line";
import HugeText from "../components/HugeText";
import ImageTextSection from "../components/ImageTextSection";
import ContactForm from "../components/ContactForm";
export const VisitPageTemplate = ({ frontmatter }) => (
<div>
<ParallaxProvider>
<Hero
image={frontmatter.heroImage}
content={{
heading: frontmatter.heading,
subheading: frontmatter.subheading
}}
/>
<section className="relative mb-12 lg:mb-20">
<div className="home-scroll-line"></div>
<Line mobile={4} desk={4} />
<HugeText text="Information" start="-10" finish="-60" />
<Fade>
<div className="text-center mx-auto max-w-3xl px-8 mb-12 lg:mb-16">
<h4 className="uppercase tracking-widest text-green mb-4 max-w-lg mx-auto">
How to find the memorial
</h4>
<h2 className="text-3xl lg:text-4xl mb-6">
Location and Contact Information
</h2>
</div>
</Fade>
<div className="mx-auto max-w-3xl flex items-center">
<div className="w-full lg:w-1/2">
<div className="p-4">
<div className="mb-4">
<span className="text-green">
THIS IS THE PLACE HERITAGE PARK
</span>
<br />
2601 Sunnyside Ave
<br />
Salt Lake City, UT 84081
</div>
<div className="mb-4">
<span className="text-green">Phone:</span> 801-582-1847
</div>
<div>
<span className="text-green">UTA Bus Service:</span>
<br />
UTA provides service to This Is The Place Heritage Park and
Hogle Zoo during the summer months.{" "}
<a
href="http://www.rideuta.com/"
target="_blank"
rel="noopener noreferrer"
>
Visit the UTA website
</a>{" "}
to find out more information about finding the best route for
you.
</div>
</div>
</div>
<div className="w-full lg:w-1/2">
<div className="max-w-md mx-auto">
<ContactForm />
</div>
</div>
</div>
</section>
<section className="mb-12 lg:mb-20">
<div className="container mx-auto">
<iframe
title="map"
src="https://www.google.com/maps/embed?pb=!1m18!1m12!1m3!1d3022.4268631217524!2d-111.81799798459387!3d40.7526352793275!2m3!1f0!2f0!3f0!3m2!1i1024!2i768!4f13.1!3m3!1m2!1s0x87525e2bd2441443%3A0xdf4e3ffc5c0526e4!2sThis+Is+The+Place+Heritage+Park!5e0!3m2!1sen!2sus!4v1562954879631!5m2!1sen!2sus"
width="600"
height="450"
frameborder="0"
style={{ border: "0", margin: "auto" }}
allowfullscreen
></iframe>
</div>
</section>
{frontmatter.sections.map((section, index) => (
<ImageTextSection key={index} section={section} index={index} />
))}
</ParallaxProvider>
</div>
);
// VisitPageTemplate.propTypes = {
// heroImage: PropTypes.oneOfType([PropTypes.object, PropTypes.string]),
// title: PropTypes.string,
// heading: PropTypes.string,
// };
const VisitPage = ({ data }) => {
// console.log(data);
const { frontmatter } = data.markdownRemark;
return (
<Layout>
<VisitPageTemplate frontmatter={frontmatter} />
</Layout>
);
};
// VisitPage.propTypes = {
// data: PropTypes.shape({
// markdownRemark: PropTypes.shape({
// frontmatter: PropTypes.object
// }),
// introBg: PropTypes.object,
// donorsBg: PropTypes.object
// })
// };
export default VisitPage;
export const pageQuery = graphql`
query VisitPageTemplate {
markdownRemark(frontmatter: { templateKey: { eq: "visit-page" } }) {
frontmatter {
title
heading
heroImage {
childImageSharp {
fluid(maxWidth: 2048, quality: 100) {
...GatsbyImageSharpFluid_withWebp
}
}
}
sections {
heading
description
hugetext
image {
childImageSharp {
fluid(maxWidth: 800, quality: 100) {
...GatsbyImageSharpFluid_withWebp
}
}
}
}
}
}
}
`;
// gallery {
// image {
// childImageSharp {
// fluid(maxWidth: 600, quality: 90) {
// ...GatsbyImageSharpFluid_withWebp
// }
// }
// }
// }
// export const aboutPageQuery = graphql`
// query VisitPage($id: String!) {
// markdownRemark(id: { eq: $id }) {
// html
// frontmatter {
// title
// }
// }
// }
// `
|
def is_armstrong_number(number):
# Convert the number to string so that we can
# iterate over each digit in the number
number = str(number)
# Initialize the sum
armstrong_sum = 0
# Iterate through each digit of the number
for digit in number:
# Add the digit raised to the power of
# the length of the number to the sum
armstrong_sum += int(digit)**len(number)
# Check if sum is equal to the number.
# If yes, it is an Armstrong number
if armstrong_sum == int(number):
return True
return False
if __name__ == '__main__':
print(is_armstrong_number(371))
# Output
True |
/*
* Small jpeg decoder library (Internal header)
*
* Copyright (c) 2006, <NAME> <<EMAIL>>
* All rights reserved.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* - Neither the name of the author nor the names of its contributors may be
* used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
*/
#ifndef __TINYJPEG_INTERNAL_H_
#define __TINYJPEG_INTERNAL_H_
/* #include <setjmp.h> */
#define SANITY_CHECK 0
#define TJ_DEBUG 0
#define uint8_t unsigned char
#define uint16_t unsigned short
#define uint32_t unsigned int
#define int16_t short
#define _JBLEN 256
#define HUFFMAN_BITS_SIZE 256
#define HUFFMAN_HASH_NBITS 9
#define HUFFMAN_HASH_SIZE (1UL << HUFFMAN_HASH_NBITS)
#define HUFFMAN_HASH_MASK (HUFFMAN_HASH_SIZE - 1)
#define HUFFMAN_TABLES 4
#define COMPONENTS 3
#define JPEG_MAX_WIDTH 2048
#define JPEG_MAX_HEIGHT 2048
struct jdec_private;
struct huffman_table {
/* Fast look up table, using HUFFMAN_HASH_NBITS bits we can have directly the symbol,
* if the symbol is <0, then we need to look into the tree table */
short int lookup[HUFFMAN_HASH_SIZE];
/* code size: give the number of bits of a symbol is encoded */
unsigned char code_size[HUFFMAN_HASH_SIZE];
/* some place to store value that is not encoded in the lookup table
* FIXME: Calculate if 256 value is enough to store all values
*/
uint16_t slowtable[16-HUFFMAN_HASH_NBITS][256];
};
struct component {
unsigned int Hfactor;
unsigned int Vfactor;
float *Q_table; /* Pointer to the quantisation table to use */
struct huffman_table *AC_table;
struct huffman_table *DC_table;
short int previous_DC; /* Previous DC coefficient */
short int DCT[64]; /* DCT coef */
#if SANITY_CHECK
unsigned int cid;
#endif
};
typedef struct _jmp_buf {
int _jb[_JBLEN + 1];
} jmp_buf[1];
typedef void (*decode_MCU_fct) (struct jdec_private *priv);
typedef void (*convert_colorspace_fct) (struct jdec_private *priv);
struct jdec_private {
/* Public variables */
uint8_t *components[COMPONENTS];
unsigned int width, height; /* Size of the image */
unsigned int flags;
/* Private variables */
const unsigned char *stream_begin, *stream_end;
unsigned int stream_length;
const unsigned char *stream; /* Pointer to the current stream */
unsigned int reservoir, nbits_in_reservoir;
struct component component_infos[COMPONENTS];
float Q_tables[COMPONENTS][64]; /* quantization tables */
struct huffman_table HTDC[HUFFMAN_TABLES]; /* DC huffman tables */
struct huffman_table HTAC[HUFFMAN_TABLES]; /* AC huffman tables */
int default_huffman_table_initialized;
int restart_interval;
int restarts_to_go; /* MCUs left in this restart interval */
int last_rst_marker_seen; /* Rst marker is incremented each time */
/* Temp space used after the IDCT to store each components */
uint8_t Y[64*4], Cr[64], Cb[64];
jmp_buf jump_state;
/* Internal Pointer use for colorspace conversion, do not modify it !!! */
uint8_t *plane[COMPONENTS];
};
#if defined(__GNUC__) && (__GNUC__ > 3) && defined(__OPTIMIZE__)
#define __likely(x) __builtin_expect(!!(x), 1)
#define __unlikely(x) __builtin_expect(!!(x), 0)
#else
#define __likely(x) (x)
#define __unlikely(x) (x)
#endif
#define IDCT tinyjpeg_idct_float
void tinyjpeg_idct_float (struct component *compptr, uint8_t *output_buf, int stride);
int setjmp(jmp_buf env);
void longjmp(jmp_buf env, int val);
#endif
|
module.exports.restaurants_get = async function(req, res, next){
let restaurants;
if(req.params.name)
restaurants = await req.models.Restaurant.findAll().catch(err => next(err));
else restaurants = await req.models.Restaurant.findAll({where: {name: req.params.name}}).catch(err => next(err));
return res.send(restaurants);
}
module.exports.restaurants_add = async function(req, res, next){
let created;
if(req.body.restaurants)
created = await req.models.Restaurant.bulkCreate(req.body.restaurants).catch(err => next(err));
else if(req.body.restaurant)
created = await req.models.Restaurant.create(req.body.restaurant).catch(err => next(err));
return res.send(created);
}
|
#!/bin/bash
VALUES0=( sgraph_page_rank ) # sgraph_bfs sgraph_page_rank sgraph_teenage_follower sgraph_bellman_ford )
# This scenario runs the graph application on the real HOST machine (not in the simulated HOST/PIM systems in gem5)
for V0 in ${VALUES0[*]}
do
source UTILS/default_params.sh
create_scenario "$0/$*" "$V0" "ARMv$1 + HMC2011 + Linux (VExpress_EMM) + PIM(ARMv7)"
export OFFLOADED_KERNEL_NAME=$V0 # Kernel name to offload (Look in SMC/SW/PIM/kernels)
#*******
clonedir $HOST_SW_DIR/app/host_only
run ./build-x64.sh
./main
returntopwd
#*******
done
print_msg "Done!"
|
input=$1
#input=103_Prod_CGG_GuidedWave_P2_deep_tfd_ch100.dugio
linename=$2
echo "gut,tracr" > csv/$linename.csv
dugio read file=$input query=shot:1221-13813[160] line=$linename |dugethw key=tracr,gut output=geom|sort -n |uniq |awk '{print $2","$1}' >> csv/$linename.csv
#echo "gut,tracr" > RY05-5568I4-170_cable5_gun2.csv
#dugio read file=$input query=* line=RY05-5568I4-170_cable5_gun2 |dugethw key=tracr,gut output=geom|sort -n |uniq |awk '{print $2","$1}' >> RY05-5568I4-170_cable5_gun2.csv
#echo "gut,tracr" > RY05-6272P1-015_cable3_gun1.csv
#dugio read file=$input query=* line=RY05-6272P1-015_cable3_gun1 |dugethw key=tracr,gut output=geom|sort -n |uniq |awk '{print $2","$1}' >> RY05-6272P1-015_cable3_gun1.csv
|
package org.museautomation.ui.step.actions;
import org.museautomation.core.step.*;
import org.museautomation.ui.extend.actions.*;
/**
* @author <NAME> (see LICENSE.txt for license details)
*/
public class ChangeStepTypeAction extends UndoableAction
{
public ChangeStepTypeAction(StepConfiguration step, String new_type)
{
_step = step;
_new_type = new_type;
}
@Override
protected boolean executeImplementation()
{
_old_type = _step.getType();
_step.setType(_new_type);
return true;
}
@Override
protected boolean undoImplementation()
{
_step.setType(_old_type);
_old_type = null;
return true;
}
private final StepConfiguration _step;
private final String _new_type;
private String _old_type;
}
|
/**
* <p>Copyright: Copyright (c) 2019</p>
*
* <h3>License</h3>
*
* Copyright (c) 2019 by <NAME>. <br>
* All rights reserved. <br>
*
* <p>Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* <ul>
* <li> Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* <li> Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* <li> Neither the name of the copyright owners, their employers, nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
* </ul>
* <p>THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
*
*
* @author <A HREF="http://disi.unal.edu.co/profesores/jgomezpe"> <NAME> </A>
* (E-mail: <A HREF="mailto:<EMAIL>"><EMAIL></A> )
* @version 1.0
*/
package lifya.stringify;
import java.util.Base64;
import java.util.Base64.Encoder;
import speco.array.Array;
import speco.array.ArrayStringifier;
import java.util.HashMap;
import speco.object.JSONfyable;
import speco.json.JSON;
/**
* <p>Stringifies (Stores into a String) an object</p>
*
*/
public class Stringifier {
/**
* Stringifies the object
* @param obj Object to be stringified
* @return Stringified version of the object
*/
public static String apply(Stringifyable obj) { return obj.stringify(); }
/**
* Stringifies an array with the associated formatting characters
* @param array Array to be stringified
* @param OPEN Array opening character
* @param CLOSE Array closing character
* @param SEPARATOR Array elements separating character
* @return Stringified version of the portion of the array
*/
public static String apply(Object array, char OPEN, char CLOSE, char SEPARATOR) {
ArrayStringifier str = new ArrayStringifier(OPEN,CLOSE,SEPARATOR);
return str.apply(array);
}
/**
* Stringifies an array with '[', ']', and ',' as formatting characters
* @param array Array to be stringified
* @return Stringified version of the array
*/
public static String apply(Array<?> array) { return apply(array,'[', ']', ','); }
/**
* Stringifies an array with the associated formatting characters
* @param array Array to be stringified
* @param OPEN Array opening character
* @param CLOSE Array closing character
* @param SEPARATOR Array elements separating character
* @return Stringified version of the array
*/
public static String apply(Array<?> array, char OPEN, char CLOSE, char SEPARATOR) {
ArrayStringifier str = new ArrayStringifier(OPEN,CLOSE,SEPARATOR);
return str.apply(array);
}
/**
* Stringifies a hashmap with '{', '}', ':', and ',' as formatting characters
* @param map HashMap to be stringified
* @return Stringified version of the hashmap
*/
public static String apply(HashMap<String, Object> map) {
return apply(map, '{', '}', ',', ':');
}
/**
* Stringifies a hashmap with the associated formatting characters
* @param map HashMap to be stringified
* @param OPEN Array opening character
* @param CLOSE Array closing character
* @param SEPARATOR Array elements separating character
* @param ASSIGN key/value assign character
* @return Stringified version of the hashmap
*/
public static String apply(HashMap<String, Object> map,
char OPEN, char CLOSE, char SEPARATOR, char ASSIGN) {
StringBuilder sb = new StringBuilder();
boolean flag = false;
if( OPEN != '\0' ) sb.append(OPEN);
for( String key:map.keySet() ) {
if( flag ) sb.append(SEPARATOR);
sb.append(apply(key));
sb.append(ASSIGN);
sb.append(apply(map.get(key)));
flag = true;
}
if( CLOSE != '\0' ) sb.append(CLOSE);
return sb.toString();
}
/**
* Stringifies a JSON
* @param json JSON to be stringified
* @return Stringified version of the JSON
*/
public static String apply(JSON json) {
StringBuilder sb = new StringBuilder();
boolean flag = false;
sb.append('{');
for( String key:json.keys() ) {
if( flag ) sb.append(',');
sb.append(apply(key));
sb.append(':');
sb.append(apply(json.get(key)));
flag = true;
}
sb.append('}');
return sb.toString();
}
/**
* Stringifies a JSONfyable object
* @param json JSONfyable object to be stringified
* @return Stringified version of the JSONfyable object
*/
public static String apply(JSONfyable json) { return apply(json.json()); }
/**
* Stringifies an object
* @param obj Object to be stringified
* @return Stringified version of the object
*/
@SuppressWarnings("unchecked")
public static String apply( Object obj ){
if(obj==null) return "null";
if(obj.getClass().isArray()) return apply(obj, '[', ']', ',');
if(obj instanceof String) return apply((String)obj);
if(obj instanceof Stringifyable) return apply((Stringifyable)obj);
if(obj instanceof Array) return apply((Array<?>)obj);
if(obj instanceof JSONfyable) return apply((JSONfyable)obj);
if(obj instanceof HashMap) return apply((HashMap<String,Object>)obj);
if(obj instanceof JSON) return apply((JSON)obj);
return obj.toString();
}
/**
* Stringifies a String
* @param str String to be stringified
* @return Stringified version of the String
*/
public static String apply(String str) { return apply(str, '"'); }
/**
* Stringifies a String using the provided character as quotation
* @param str String to be stringified
* @param quotation Character used as quotation for the string
* @return Stringified version of the String
*/
public static String apply(String str, char quotation) {
StringBuilder sb = new StringBuilder();
sb.append(quotation);
for( int i=0; i<str.length(); i++ ){
char c = str.charAt(i);
switch( c ){
case '\\': sb.append("\\\\"); break;
case '\b': sb.append("\\b"); break;
case '\f': sb.append("\\f"); break;
case '\n': sb.append("\\n"); break;
case '\r': sb.append("\\r"); break;
case '\t': sb.append("\\t"); break;
default:
if( c < 32 || c > 255 ){
sb.append("\\u");
sb.append(Integer.toHexString((int)c));
}else if(c==quotation)
sb.append("\\"+quotation);
else
sb.append(c);
break;
}
}
sb.append(quotation);
return sb.toString();
}
/**
* Stringifies a character
* @param c Character to stringify
* @return Stringified version of the character
*/
public static String apply(Character c) { return apply(""+c, '"'); }
/**
* Stringifies a blob (byte array) using Base64 and character # as starter for identifying a blob
* @param blob Byte array/blob to stringify
* @return Stringified version of the blob/byte array
*/
public static String apply(byte[] blob) {
Encoder enc = Base64.getMimeEncoder();
return enc.encodeToString(blob);
}
} |
#!/bin/bash
cd $(dirname $0)/..
./gradlew dokka :docs:runAnk
bundle install --gemfile docs/Gemfile --path vendor/bundle
BUNDLE_GEMFILE=docs/Gemfile bundle exec jekyll serve -s docs
|
# the data dir contains those corpus
#this folder will be created, and all the models and results will be list there.
CONFIG_DIR=/forecasting/u0n-1_s0n/concat_p_only_8/
WORK_DIR=$ROOT_DIR/Expt/workdir/$CONFIG_DIR/
TRAINING_DIR=$WORK_DIR/training/
# cluster strategy for psyc dataset
CLUSTER_STRATEGY=MISC11_ML
INPUT_DIR=psyc_${CLUSTER_STRATEGY}_17_padding
# use pretrained word and char embedding using prepare.sh
VOCAB_DIR=$DATA_DIR/vocab_g_u0n-1_s0n_u_8/
mkdir -p $VOCAB_DIR
# Use ../../utils/preprocess.py to compute score for each paragraph
#TRAIN_FILENAME1=${INPUT_DIR}/train_sc20.json
TRAIN_FILENAME1=${INPUT_DIR}/train.json
#DEV_FILENAME1=${INPUT_DIR}/dev_sc10.json
DEV_FILENAME1=${INPUT_DIR}/dev.json
TEST_FILENAME1=${INPUT_DIR}/test.json
#TEST_FILENAME1=${INPUT_DIR}/dev_sc20.json
RO_TRAIN_FILE1=$RO_DATA_DIR/$TRAIN_FILENAME1
RO_DEV_FILE1=$RO_DATA_DIR/$DEV_FILENAME1
RO_TEST_FILE1=$RO_DATA_DIR/$TEST_FILENAME1
TRAIN_FILE1=$DATA_DIR/prep_data/rmstop_0_rpt_no/$TRAIN_FILENAME1
DEV_FILE1=$DATA_DIR/prep_data/rmstop_0_rpt_no/$DEV_FILENAME1
TEST_FILE1=$DATA_DIR/prep_data/rmstop_0_rpt_no/$TEST_FILENAME1
ALGO="GMLSTM"
LEARNING_RATE=0.0001
#WEIGHT_DECAY=0.0001
WEIGHT_DECAY=0.0
BATCH_SIZE=64
CONTEXT_WINDOW=8
QUESTION_WINDOW=0
HIDDEN_SIZE=64
EPOCH=100
STEPS_PER_CHECKPOINT=100
# DROP_KEPP_PROB in (0, 1], 1 is no dropout
DROP_KEEP_PROB=0.3
USE_SELFATT=
USE_CHAR_EMBED=
MAX_NUM_CHAR_TO_KEEP_FORWARD=4
MAX_NUM_CHAR_TO_KEEP_BACKWARD=4
#USE_CHAR_EMBED=x
# Whether to training the original embedding.
TRAIN_EMBEDDING=
# max_grad_norm / max(global_norm, max_grad_norm), set to inf to disable.
MAX_GRAD_NORM=5
# leave it empty to use random initial WORD_EMB
WORD_EMB_FILE=$RO_DATA_DIR/glove.840B.300d.txt
# WORD_EMB_FILE=
#WORD_EMB_FILE=$DATA_DIR/vectors_words.txt
CHAR_EMB_FILE=
#CHAR_EMB_FILE=$RO_DATA_DIR/glove.840B.300d-char.txt
#CHAR_EMB_FILE=$DATA_DIR/vectors_chars.txt
EMA=0.9999
MAX_P_LEN=600
MAX_Q_LEN=60
NUM_FILTERS=25
ACC_SUM_PROB=0.9
#flat Context-aware question attention
FLAT_C_Q_ATT=
# pos_weight for balanced cross entropy
POS_WEIGHT=0.9
# set gama = 0, decay to standard cross entropy
# key for tokenization to use
TOKEN_KEY_TO_USE=tokenized_utterance
# whether adding p encoding to decode
DECODE_P=
# whether adding q encoding to decode
DECODE_Q=
# TOPK, a list of integers for K values in Recall@K
TOPK_LIST=1,2,3,5,10
# TOPM_FOR_ACC_PROB, with ACC_SUM_PROB in topM
TOPM_FOR_ACC_PROB=5
# WORD_EMBED_SIZE, default 300, exclusive with WORD_EMB_FILE
WORD_EMBED_SIZE=300
# CHAR_EMBED_SIZE, default 100, exclusive with CHAR_EMB_FILE
CHAR_EMBED_SIZE=300
# flat Question-aware context attention
FLAT_Q_C_ATT=
# Dropout keep prob for embedding, 1.0=no_dropout
DROPOUT_KEEP_PROB_EMB=0.7
# Method to encode the dialogue
DIAL_ENCODE=CONCAT
# max_length for a single utterance
MAX_U_LEN=60
# whether to hierarchy_q_pu_att
HIERARCHY_Q_PU_ATT=
# self-att for hierarchy, only can be useful when dial_encode=HIERARCHY
USE_HIERARCHY_SELFATT=
# ema_decay is decay ratio for EMA, 0.0 to disable, 0.9999+ to enable
EMA_DECAY=0.0
# loss_func, default=X_ENTROPY
LOSS_FUNC=X_ENTROPY
# rnn_type, bi-lstm, bi-gru, bi-rnn, lstm, gru, rnn
RNN_TYPE=bi-gru
# whether to use shared encode layer for utterance
USE_SHARED_ENCODING=
# all training files to use
TRAIN_FILES=$TRAIN_FILE1
#TRAIN_FILES=`find ${TRAIN_FILE1}_splits -name "split*" | tr '\n' ','`
# all dev files to use
DEV_FILES=$DEV_FILE1
# all test files to use
TEST_FILES=$TEST_FILE1
# pw_alpha * pairwise_hinge_loss + x_entropy
# indicator whether character elmo embeddings are utilized
USE_CHARACTER_ELMO=x
# list of positions names to add elmo embeddings
ELMO_POSITIONS=
# elmo pretrained LM weight file
ELMO_WEIGHT_FILE=$DATA_DIR/elmo_2x4096_512_2048cnn_2xhighway_5.5B_weights.hdf5
# elmo corresponding to options file
ELMO_OPTION_FILE=$DATA_DIR/elmo_2x4096_512_2048cnn_2xhighway_5.5B_weights.hdf5
# elmo vocabulary file
ELMO_VOCAB_FILE=$DATA_DIR/elmo_vocab_file
# elmo max num character
# elmo embedding output size to be projected into
ELMO_EMB_OUTPUT=128
# whether use character elmo emebdding
USE_CHARACTER_ELMO=x
# positions to inject elmo, keep empty to disable
ELMO_POSITIONS=
# elmo option json file
ELMO_OPTION_FILE=$DATA_DIR/elmo_2x4096_512_2048cnn_2xhighway_5.5B_options.json
# elmo vocabulary file to write and read
ELMO_VOCAB_FILE=$DATA_DIR/elmo_vocab_file
# elmo max num of characters
# input used to decode
DECODE_INPUTS=p_final
#MEMNET PARAMS
GATED_MEMNET=x
PASSAGE_HOPS=2
MEMNET_SHARE_WEIGHTS=x
# whether to use concat p
USE_CONCAT_P=x
# decode_func to use for multiclass decoding
DECODE_FUC=FC
# flat Context-ware response attention
FLAT_C_R_ATT=
# flat response-ware context attention
FLAT_R_C_ATT=
# whether to hierarchy_r_pu_att
HIERARCHY_R_PU_ATT=
# whether adding r encoding to cnn decode
DECODE_R=
# r pass memnet hops
RESPONSE_HOPS=2
# use response utterance or not
USE_RESPONSE_U=
# decode goal
DECODE_GOAL=ALL_LABEL
# Whether to use speaker embedding
USE_SPEAKER_EMBEDDING=
# Whether to use label embedding
USE_LABEL_EMBEDDING=
# dim of label embedding
LABEL_EMBED_DIM=32
# dim of speaker embedding
SPEAKER_EMBED_DIM=8
# filter sizes for cnn
FILTER_SIZES=3,4,5
# whether to decode r with flatten pu_labels embdding
DECODE_R_WITH_FLATTEN_PU_LABELS=
# whether to use response speaker info
USE_RESPONSE_S=
# whether to train speaker emb
TRAIN_SPEAKER_EMBEDDING=x
# whether to train label emb
TRAIN_LABEL_EMBEDDING=x
# dropout keep rate for MLP
DROPOUT_KEEP_PROB_MLP=0.8
# num_attention_heads for snt seq attention
NUM_ATT_HEAD=4
# snt-levl attention algorithm, leave empty for disabling
SNT_ATT_ALGO=
# snt-levl attention hops
SNT_SEQ_HOPS=2
# snt rnn type, for snt-lvl rnn
SNT_RNN_TYPE=bi-gru
# loss_weights for each label, sep with comma, can be float
LOSS_WEIGHTS=1.0,1.0,0.25,0.75,0.75,0.25,0.75,1.0,1.0,1.0,1.0
# focal loss gama for each label, sep with comma, int
FOCAL_LOSS_GAMA=0,0,0,0,0,0,0,0,0,0,0
# use response in context seq, without its label
USE_R_IN_SEQ=
# how to combine the final input states
DECODE_COMBINE=additive
#config file for P model
P_MODEL_CONFIG=
#config file for T model
T_MODEL_CONFIG=
# whether use r in word matching
USE_R_IN_WM_SEQ=
# whether use batch normalization
USE_BATCH_NORM=
|
#!/usr/bin/env sh
# abort on errors
set -e
# build
npm run build
# navigate into the build output directory
cd dist
git init
git add -A
git commit -m 'deploy'
git push -f https://github.com/xwellingtonx/advanced-pet.git master:gh-pages
cd - |
#!/usr/bin/env bash
wget http://hlomodule.oss-cn-zhangjiakou.aliyuncs.com/tao_compiler/tools/bazelisk/v1.7.5/bazelisk-linux-amd64 -O /usr/local/bin/bazel
chmod +x /usr/local/bin/bazel
# This is a workaround for GFW.
# python3 packages
pip3 install numpy oss2 filelock
# replace system git with git wrapper
sys_git=$(which git)
if [ ! -f ${sys_git}.orig ];then
mv $sys_git ${sys_git}.orig
cp platform_alibaba/ci_build/install/git_wrapper.py $sys_git
fi
|
import Vue from 'vue'
import VueRouter, {RouteConfig} from 'vue-router'
import {authService} from "@/services"
import Layout from "@/components/Layout"
import Signin from '@/views/Signin'
import Signup from "@/views/Signup"
import Account from "@/views/Account"
import Comic from "@/views/comic/Comic"
import ComicList from "@/views/comic/ComicList"
import ComicDetails from "@/views/comic/ComicDetails"
/**
* @type {Array<RouteConfig>}
*/
const routes = [
{
path: '/signin',
name: 'signin',
props: true,
component: Signin
},
{
path: '/signup',
name: 'signup',
component: Signup
},
{
path: '/',
redirect: "/comics",
component: Layout,
props: router => ({
user: authService.getCurrentUser()
}),
children: [
{
path: "/comics",
name: "home",
component: Comic,
children: [
{
path: "/comics",
name: "comics",
component: ComicList,
},
{
path: "/comics/:comicId",
name: "comic-details",
props: true,
component: ComicDetails,
},
],
},
{
path: "/account",
name: "account",
component: Account,
},
]
}
]
Vue.use(VueRouter)
const router = new VueRouter({
routes
})
router.beforeEach((to, from, next) => {
switch (to.name) {
case "signin":
case "signup":
next()
break
default:
authService.isAuthenticated().then( _ => {
next()
}).catch( async reason => {
await authService.signout()
reason ? next({name: "signin", params: {reason}}) : next({name: "signin"})
})
}
})
export default router
|
package io.cattle.platform.api.serviceproxy;
import io.cattle.platform.api.instance.ContainerProxyActionHandler;
import io.cattle.platform.api.resource.AbstractNoOpResourceManager;
import io.cattle.platform.api.utils.ApiUtils;
import io.cattle.platform.core.addon.K8sClientConfig;
import io.cattle.platform.core.constants.ClusterConstants;
import io.cattle.platform.core.constants.ServiceConstants;
import io.cattle.platform.core.dao.ClusterDao;
import io.cattle.platform.core.dao.ServiceDao;
import io.cattle.platform.core.model.Cluster;
import io.cattle.platform.core.model.Instance;
import io.cattle.platform.core.model.Service;
import io.cattle.platform.docker.api.model.HostAccess;
import io.cattle.platform.docker.api.model.ServiceProxy;
import io.cattle.platform.hostapi.HostApiAccess;
import io.cattle.platform.hostapi.HostApiService;
import io.cattle.platform.object.ObjectManager;
import io.cattle.platform.object.util.DataAccessor;
import io.cattle.platform.util.type.CollectionUtils;
import io.github.ibuildthecloud.gdapi.context.ApiContext;
import io.github.ibuildthecloud.gdapi.request.ApiRequest;
import io.github.ibuildthecloud.gdapi.util.ResponseCodes;
import io.github.ibuildthecloud.gdapi.validation.ReferenceValidator;
import org.apache.commons.lang3.StringUtils;
import java.util.Date;
import java.util.List;
import java.util.Map;
public class ServiceProxyManager extends AbstractNoOpResourceManager {
ServiceDao serviceDao;
ClusterDao clusterDao;
ContainerProxyActionHandler actionHandler;
ObjectManager objectManager;
ReferenceValidator referenceValidator;
HostApiService apiService;
public ServiceProxyManager(ServiceDao serviceDao, ClusterDao clusterDao, ContainerProxyActionHandler actionHandler, ObjectManager objectManager, ReferenceValidator referenceValidator, HostApiService apiService) {
this.serviceDao = serviceDao;
this.clusterDao = clusterDao;
this.actionHandler = actionHandler;
this.objectManager = objectManager;
this.referenceValidator = referenceValidator;
this.apiService = apiService;
}
@Override
public Object create(String type, ApiRequest request) {
ServiceProxy proxy = request.proxyRequestObject(ServiceProxy.class);
String serviceName = proxy.getService();
if (StringUtils.isBlank(serviceName)) {
request.setResponseCode(ResponseCodes.NOT_FOUND);
return null;
}
String[] parts = StringUtils.split(serviceName, ".", 2);
Service service = null;
if (parts.length == 2 && parts[0].equals("k8s-api")) {
return accessCluster(request, ApiContext.getContext().getIdFormatter().parseId(parts[1]));
}
if (parts.length == 2) {
service = serviceDao.findServiceByName(ApiUtils.getPolicy().getAccountId(), parts[1], parts[0]);
} else {
service = serviceDao.findServiceByName(ApiUtils.getPolicy().getAccountId(), parts[0]);
}
if (service != null) {
List<Long> instanceIds = DataAccessor.fieldLongList(service, ServiceConstants.FIELD_INSTANCE_IDS);
if (instanceIds.size() > 0) {
return actionHandler.perform(objectManager.loadResource(Instance.class, instanceIds.get(0)), request);
}
}
request.setResponseCode(ResponseCodes.NOT_FOUND);
return null;
}
private Cluster getCluster(String id) {
Object obj = referenceValidator.getById(ClusterConstants.TYPE, id);
if (obj instanceof Cluster) {
return (Cluster) obj;
}
return null;
}
private Object accessCluster(ApiRequest request, String id) {
Cluster cluster = getCluster(id);
if (cluster == null) {
return null;
}
K8sClientConfig clientConfig = DataAccessor.field(cluster, ClusterConstants.FIELD_K8S_CLIENT_CONFIG, K8sClientConfig.class);
if (clientConfig == null || StringUtils.isBlank(clientConfig.getAddress())) {
return null;
}
Instance instance = clusterDao.getAnyRancherAgent(cluster);
if (instance == null) {
return null;
}
Map<String, Object> data = CollectionUtils.asMap(
"scheme",clientConfig.getAddress().endsWith("443") ? "https" : "http",
"address", clientConfig.getAddress());
Date expiration = new Date(System.currentTimeMillis() + ContainerProxyActionHandler.EXPIRE_SECONDS.get() * 1000);
HostApiAccess apiAccess = apiService.getAccess(request, instance.getHostId(),
CollectionUtils.asMap("proxy", data),
expiration, ContainerProxyActionHandler.HOST_PROXY_PATH.get());
if (apiAccess == null) {
return null;
}
return new HostAccess(apiAccess.getUrl().replaceFirst("ws", "http"), apiAccess.getAuthenticationToken());
}
}
|
<filename>src/main/java/net/jamsimulator/jams/gui/action/defaults/explorerelement/folder/FolderActionRegions.java
/*
* MIT License
*
* Copyright (c) 2021 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package net.jamsimulator.jams.gui.action.defaults.explorerelement.folder;
import net.jamsimulator.jams.gui.action.context.ContextRegion;
import net.jamsimulator.jams.gui.action.context.ContextSubmenu;
import net.jamsimulator.jams.language.Messages;
public class FolderActionRegions {
public static final ContextRegion NEW = new ContextRegion("new", null, 0);
public static final ContextRegion CLIPBOARD = new ContextRegion("clipboard", null, 1);
public static final ContextRegion OTHER = new ContextRegion("other", null, 2);
public static final ContextRegion SHOW = new ContextRegion("show", null, 999);
public static final ContextRegion ASSEMBLER = new ContextRegion("assembler", null, 2);
public static final ContextSubmenu NEW_SUBMENU = new ContextSubmenu("new", NEW, Messages.ACTION_FOLDER_EXPLORER_ELEMENT_NEW);
public static final ContextRegion NEW_GENERAL = new ContextRegion("general", NEW_SUBMENU, 0);
}
|
<gh_stars>0
module.exports = require( './lib/libra' ).Libra;
|
import React from 'react'
import PropTypes from 'prop-types'
import styled from 'styled-components'
import { media } from '~src/components/variable/mixin'
import Card from '~src/components/molecules/card'
const Wrapper = styled.div`
margin: -16px calc(50% - 50vw);
overflow-x: scroll;
overflow-y: hidden;
padding: 16px 0;
position: relative;
`
const Container = styled.div`
display: flex;
margin: 0 auto;
max-width: 640px;
width: calc(100% - 16px);
${media.xs} {
width: calc(100% - 32px);
}
${media.s} {
width: calc(100% - 48px);
}
${media.ms} {
max-width: 690px;
width: calc(100% - 64px);
}
${media.l} {
max-width: 960px;
}
&::after {
content: '';
display: block;
padding: 4px;
${media.xs} {
padding: 8px;
}
${media.s} {
padding: 12px;
}
${media.ms} {
padding: 16px calc((100vw - 100%) / 4);
}
}
`
export default function Best({ edges }) {
const cards = edges.map((edge, i) => <Card key={i} edge={edge} />)
return (
<Wrapper>
<Container>{cards}</Container>
</Wrapper>
)
}
Best.propTypes = {
edges: PropTypes.array,
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.