text
stringlengths 1
1.05M
|
|---|
#!/bin/bash
REPO_ROOT=`git rev-parse --show-toplevel`
pushd $REPO_ROOT
docker build -f docker/ci/Dockerfile -t banano-ci:latest .
popd
|
///<reference path='.\rule.ts' />
///<reference path='.\rulesEngine.ts' />
///<reference path='.\nodes.ts' />
///<reference path='..\compilation\conditionVisitor.ts' />
///<reference path='.\consequences\consequence.ts' />
module Treaty {
export module Rules {
export class RulesEngineBuilder {
private rules: Rule[] = [];
public addRule(rule: Rule): void {
this.rules.push(rule);
}
public build(): RulesEngine {
var engine = new Treaty.Rules.RulesEngine();
this.compileRules(engine);
return engine;
}
private compileRules(engine: RulesEngine): void {
var compiler = new Treaty.Compilation.RuleCompiler(engine);
this.rules.forEach(rule => compiler.compile(rule));
}
}
}
}
|
<html>
<head>
<title>Random Greeting</title>
</head>
<body>
<script>
let greetings = ["Hello!", "Hi!", "Good morning!", "Good afternoon!", "Good evening!"];
let index = Math.floor(Math.random() * greetings.length);
let greeting = greetings[index];
document.write(greeting);
</script>
</body>
</html>
|
#!/bin/bash
sleep 0.2
feh --bg-fill /home/brandon/.config/images/firefly.png
syndaemon -i 1 -KRd -t
synclient VertScrollDelta=-27 # Reverse synaptics natural scroll
|
echo "=== Acquiring datasets ==="
echo "---"
mkdir -p save
mkdir -p data
cd data
echo "- Downloading WikiText-2 (WT2)"
wget --quiet --continue https://s3.amazonaws.com/research.metamind.io/wikitext/wikitext-2-v1.zip
unzip -q wikitext-2-v1.zip
cd wikitext-2
mv wiki.train.tokens train.txt
mv wiki.valid.tokens valid.txt
mv wiki.test.tokens test.txt
cd ..
echo "- Downloading WikiText-103 (WT2)"
wget --continue https://s3.amazonaws.com/research.metamind.io/wikitext/wikitext-103-v1.zip
unzip -q wikitext-103-v1.zip
cd wikitext-103
mv wiki.train.tokens train.txt
mv wiki.valid.tokens valid.txt
mv wiki.test.tokens test.txt
cd ..
echo "- Downloading enwik8 (Character)"
mkdir -p enwik8
cd enwik8
wget --continue http://mattmahoney.net/dc/enwik8.zip
python prep_enwik8.py
cd ..
echo "- Downloading Penn Treebank (PTB)"
wget --quiet --continue http://www.fit.vutbr.cz/~imikolov/rnnlm/simple-examples.tgz
tar -xzf simple-examples.tgz
mkdir -p penn
cd penn
mv ../simple-examples/data/ptb.train.txt train.txt
mv ../simple-examples/data/ptb.test.txt test.txt
mv ../simple-examples/data/ptb.valid.txt valid.txt
cd ..
echo "- Downloading Penn Treebank (Character)"
mkdir -p pennchar
cd pennchar
mv ../simple-examples/data/ptb.char.train.txt train.txt
mv ../simple-examples/data/ptb.char.test.txt test.txt
mv ../simple-examples/data/ptb.char.valid.txt valid.txt
cd ..
rm -rf simple-examples/
echo "- Downloading WikiText-2 (WT2)"
wget --quiet --continue https://s3.amazonaws.com/research.metamind.io/wikitext/wikitext-2-v1.zip
unzip -q wikitext-2-v1.zip
cd wikitext-2
mv wiki.train.tokens train.txt
mv wiki.valid.tokens valid.txt
mv wiki.test.tokens test.txt
echo "---"
echo "Happy language modeling :)"
|
import { writeFileSync } from "fs";
export const outputTestCase = (html: string, testFileDirPath: string) => {
const cheerio = require('cheerio');
const $ = cheerio.load(html);
const testCases = [];
$('pre', '.lang-ja').each((i, element) => {
if (i != 0) testCases.push($(element).text());
})
testCases.forEach((content, i) => {
const fileName: string = i % 2 == 0 ? `test_${i / 2 + 1}.txt` : `test_${Math.floor(i / 2) + 1}__answer.txt`;
writeFileSync(
`${testFileDirPath}/${fileName}`,
content,
{flag: 'wx'}
);
})
}
|
import {
GET_ERRORS_SIGNUP,
GET_ERRORS_SIGNIN,
} from "../../actions/authActions/types";
const initialState = { errorSignin: "", errorSignup: "" };
export default function (state = initialState, action) {
switch (action.type) {
case GET_ERRORS_SIGNIN:
return { ...state, errorSignin: action.payload };
case GET_ERRORS_SIGNUP:
return { ...state, errorSignup: action.payload };
default:
return state;
}
}
|
<gh_stars>0
import {useEffect} from 'react';
import {useRouteMatch, Route, Switch} from 'react-router-dom';
import {useDispatch, useSelector} from 'react-redux';
import {getChatsAction} from '../../store/chats';
import {healthAction} from '../../store/health';
import {Poll} from '../../util/poll';
import {ChatsList} from '../../components/ChatsList';
import {Chat} from './Chat';
import {ErrorMessage} from '../../components/ErrorMessage';
import './ChatsPage.scss';
function ChatsPage() {
const dispatch = useDispatch();
let {path} = useRouteMatch();
// const health = useSelector(state => state.health);
const chats = useSelector(state => state.chats.chats);
const account = useSelector(state => state.account);
const getErrorMessage = useSelector(state => state.chats.error || null);
// Server health up/down status
useEffect(() => {
dispatch(healthAction());
}, [dispatch]);
// Bootstrap with Account Data, then get initial Chats data
useEffect(() => {
let poll;
(async () => {
// Get initial all Chat data
dispatch(getChatsAction({isAllData:true}));
// Poll Chat List - just list updates (let each Chat handle their full updates)
const callback:any = () => { return dispatch(getChatsAction({isAllData:false})); }
// Delay 4s, rare event of new Chat
poll = new Poll({callback, delay: 4000});
})();
return () => { poll.stop(); }
}, [dispatch]);
return (
<section className="chats-page-container">
<ErrorMessage message={getErrorMessage}/>
{/* Interesting - if a link in a hidden text, it is focussable - WARNING */}
{/* <h2 className="sr-only">ChatsPage TODO (<Link to={`/home`}>home</Link>)</h2> */}
<h2 className="sr-only">ChatsPage</h2>
{/* <div>Connection to API: {process.env.REACT_APP_API_URL} is {health.status} with account {account.username}</div> */}
<div className="chats-page">
<ChatsList chats={chats} />
<Switch>
<Route path={`${path}/:chatId`}>
{account.id &&
<Chat account={account} chats={chats} />
}
</Route>
</Switch>
</div>
</section>
)
}
export default ChatsPage;
|
#!/bin/bash
# Vars and respective defaults
export ACTION=${ACTION:="delete"}
export NAMESPACE=${NAMESPACE:="openshift-etcd"}
export LABEL_SELECTOR=${LABEL_SELECTOR:="''"}
export RUNS=${RUNS:=1}
export DELETE_COUNT=${DELETE_COUNT:=1}
export SLEEP=${SLEEP:=15}
export SCENARIO_TYPE=${SCENARIO_TYPE:=namespace_scenarios}
export SCENARIO_FILE=${SCENARIO_FILE:=- scenarios/namespace_scenario.yaml}
export SCENARIO_POST_ACTION=${SCENARIO_POST_ACTION:=""}
|
<filename>demo-dubbo/dubbo-provider/src/main/java/com/xkcoding/dubbo/provider/info/SpringBootDemoDubboProviderApplication.java
package com.xkcoding.dubbo.provider.info;
import com.alibaba.dubbo.spring.boot.annotation.EnableDubboConfiguration;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
/**
* <p>
* 启动器
* </p>
*
* @author yangkai.shen
* @date Created in 2018-12-25 16:49
*/
@EnableDubboConfiguration
@SpringBootApplication
public class SpringBootDemoDubboProviderApplication {
public static void main(String[] args) {
SpringApplication.run(SpringBootDemoDubboProviderApplication.class, args);
}
}
|
<gh_stars>1-10
module Chartr
include ActiveSupport
require "chart"
Dir["#{File.dirname(__FILE__)}/charts/**"].map do |chart|
require chart if chart =~ /.*?_chart.rb$/
end
end
|
package com.honyum.elevatorMan.net;
import com.honyum.elevatorMan.data.ContartInfo;
import com.honyum.elevatorMan.data.SignInfo;
import com.honyum.elevatorMan.net.base.Response;
import java.util.List;
public class SignInfoResponse extends Response {
public List<SignInfo> getBody() {
return body;
}
public void setBody(List<SignInfo> body) {
this.body = body;
}
private List<SignInfo> body;
/**
* 根据json生成对象
* @param json
* @return
*/
public static SignInfoResponse getSignInfoResponse(String json) {
return (SignInfoResponse) parseFromJson(SignInfoResponse.class, json);
}
}
|
package com.epul.oeuvre.domains;
import javax.persistence.*;
import java.util.Collection;
import java.util.Objects;
@Entity
@Table(name = "oeuvrevente", schema = "baseoeuvre", catalog = "")
public class EntityOeuvrevente {
private Integer idOeuvrevente;
private String titreOeuvrevente;
private String etatOeuvrevente;
private Double prixOeuvrevente;
//private Integer idProprietaire;
private EntityProprietaire proprietaireByIdProprietaire;
private Collection<EntityReservation> reservationsByIdOeuvrevente;
@Id
@GeneratedValue(strategy=GenerationType.IDENTITY)
@Column(name = "id_oeuvrevente", nullable = false)
public Integer getIdOeuvrevente() {
return idOeuvrevente;
}
public void setIdOeuvrevente(int idOeuvrevente) {
this.idOeuvrevente = idOeuvrevente;
}
public void setIdOeuvrevente(Integer idOeuvrevente) {
this.idOeuvrevente = idOeuvrevente;
}
@Basic
@Column(name = "titre_oeuvrevente", nullable = false, length = 200)
public String getTitreOeuvrevente() {
return titreOeuvrevente;
}
public void setTitreOeuvrevente(String titreOeuvrevente) {
this.titreOeuvrevente = titreOeuvrevente;
}
@Basic
@Column(name = "etat_oeuvrevente", nullable = false, length = 1)
public String getEtatOeuvrevente() {
return etatOeuvrevente;
}
public void setEtatOeuvrevente(String etatOeuvrevente) {
this.etatOeuvrevente = etatOeuvrevente;
}
@Basic
@Column(name = "prix_oeuvrevente", nullable = false, precision = 0)
public Double getPrixOeuvrevente() {
return prixOeuvrevente;
}
public void setPrixOeuvrevente(double prixOeuvrevente) {
this.prixOeuvrevente = prixOeuvrevente;
}
public void setPrixOeuvrevente(Double prixOeuvrevente) {
this.prixOeuvrevente = prixOeuvrevente;
}
/*@Basic
@Column(name = "id_proprietaire", insertable =false, updatable=false, nullable = true)
public Integer getIdProprietaire() {
return idProprietaire;
}
public void setIdProprietaire(Integer idProprietaire) {
this.idProprietaire = idProprietaire;
}*/
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
EntityOeuvrevente that = (EntityOeuvrevente) o;
return Objects.equals(idOeuvrevente, that.idOeuvrevente) &&
Objects.equals(titreOeuvrevente, that.titreOeuvrevente) &&
Objects.equals(etatOeuvrevente, that.etatOeuvrevente) &&
Objects.equals(prixOeuvrevente, that.prixOeuvrevente);
}
@Override
public int hashCode() {
return Objects.hash(idOeuvrevente, titreOeuvrevente, etatOeuvrevente, prixOeuvrevente);
}
@ManyToOne
@JoinColumn(name = "id_proprietaire", referencedColumnName = "id_proprietaire")
public EntityProprietaire getProprietaireByIdProprietaire() {
return proprietaireByIdProprietaire;
}
public void setProprietaireByIdProprietaire(EntityProprietaire proprietaireByIdProprietaire) {
this.proprietaireByIdProprietaire = proprietaireByIdProprietaire;
}
@OneToMany(mappedBy = "oeuvreventeByIdOeuvrevente")
public Collection<EntityReservation> getReservationsByIdOeuvrevente() {
return reservationsByIdOeuvrevente;
}
public void setReservationsByIdOeuvrevente(Collection<EntityReservation> reservationsByIdOeuvrevente) {
this.reservationsByIdOeuvrevente = reservationsByIdOeuvrevente;
}
}
|
<filename>vi/.vim/bundle/pencil/app/pencil-core/common/collectionSettingEditor.js
function CollectionSettingEditor(collection) {
this.collection = collection;
var definedGroups = collection.propertyGroups;
this.properties = {};
var strippedGroups = [];
for (var i in definedGroups) {
var group = definedGroups[i];
var strippedGroup = new PropertyGroup();
for (var j in group.properties) {
var property = group.properties[j];
var editor = TypeEditorRegistry.getTypeEditor(property.type);
if (editor) {
strippedGroup.properties.push(property);
this.properties[property.name] = property.value;
}
}
if (strippedGroup.properties.length > 0) {
strippedGroup.name = group.name;
strippedGroups.push(strippedGroup);
}
}
this.groups = strippedGroups;
}
CollectionSettingEditor.prototype.getPropertyValue = function (name) {
return this.collection.properties[name].value;
};
CollectionSettingEditor.prototype.setPropertyValue = function (name, value) {
var prop = this.collection.properties[name];
if (!prop) return;
var Type = window[value.constructor.name];
if (!Type) return;
var literal = value.toString();
var name = ShapeDefCollectionParser.getCollectionPropertyConfigName (this.collection.id, name)
Config.set(name, literal);
prop.value = Type.fromString(value.toString());
};
CollectionSettingEditor.prototype.getTargetObjectName = function () {
return this.collection.displayName;
};
CollectionSettingEditor.prototype.usingQuickMode = function () {
return true;
};
CollectionSettingEditor.prototype.open = function () {
this.propertyWindow = window.openDialog("chrome://pencil/content/propertyDialog.xul", "propertyEditor" + Util.getInstanceToken(), "chrome,dialog,alwaysRaised,dependent", this);
};
CollectionSettingEditor.prototype.onDialogShown = function () {
this.propertyWindow.setup();
};
|
<!DOCTYPE html>
<html>
<head>
<title>Form Demo</title>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.5.1/jquery.min.js"></script>
<script>
$(function() {
$('#submitForm').click(function() {
var data = {
'name': $('#name').val(),
'email': $('#email').val(),
};
$.ajax({
type: 'POST',
url: '/submitForm',
data: JSON.stringify(data),
contentType: 'application/json',
success: function (response) {
alert(response);
},
error: function(err) {
alert(err);
}
});
});
});
</script>
</head>
<body>
<form>
<input type="text" id="name" placeholder="Name" />
<input type="email" id="email" placeholder="Email" />
<button id="submitForm">Submit</button>
</form>
</body>
</html>
|
/* ElementPool.cpp */
//----------------------------------------------------------------------------------------
//
// Project: CCore 2.00
//
// Tag: Fundamental Mini
//
// License: Boost Software License - Version 1.0 - August 17th, 2003
//
// see http://www.boost.org/LICENSE_1_0.txt or the local copy
//
// Copyright (c) 2015 <NAME>. All rights reserved.
//
//----------------------------------------------------------------------------------------
#include <CCore/inc/ElementPool.h>
#include <CCore/inc/MemBase.h>
#include <CCore/inc/Exception.h>
namespace CCore {
/* class MemPool */
Place<void> MemPool::allocBlock(ulen alloc_len)
{
Place<void> ptr=PlaceAt(MemAlloc(alloc_len));
list.ins(new(ptr) Node);
return ptr;
}
void MemPool::newBlock()
{
Place<void> new_block=allocBlock(block_len);
if( avail ) MemShrink(block,block_len-avail);
block=new_block;
cur=new_block+Delta;
avail=block_len-Delta;
}
MemPool::MemPool(ulen block_len_,ulen mem_cap_)
: block(0),
cur(0),
avail(0),
mem_cap(mem_cap_)
{
block_len=AlignDown(block_len_);
if( block_len<=Delta )
{
Printf(Exception,"CCore::MemPool::MemPool(block_len=#;,...) : too short",block_len_);
}
}
MemPool::~MemPool()
{
while( Node *node=list.del() ) MemFree(node);
}
Place<void> MemPool::alloc(ulen len)
{
if( len>MaxLen ) GuardNoMem(len);
len=Align(len);
if( !len ) len=MaxAlign;
if( len>mem_cap ) GuardNoMem(len);
if( avail<len )
{
if( avail>block_len/2 || len+Delta>block_len )
{
auto ret=allocBlock(len+Delta)+Delta;
mem_cap-=len;
return ret;
}
newBlock();
}
avail-=len;
mem_cap-=len;
return cur+=len;
}
void MemPool::shrink_extra()
{
if( avail )
{
MemShrink(block,block_len-avail);
avail=0;
}
}
void MemPool::erase()
{
while( Node *node=list.del() ) MemFree(node);
block=Place<void>(0);
cur=Place<void>(0);
avail=0;
}
/* class ElementPool */
StrLen ElementPool::dup(StrLen str)
{
char *ptr=pool.alloc(str.len);
str.copyTo(ptr);
return StrLen(ptr,str.len);
}
} // namespace CCore
|
#!/bin/bash
python train.py --gpu-id -1 \
--loss Proxy_Anchor \
--model googlenet_cgd \
--embedding-size 1024 \
--batch-size 120 \
--lr 1e-4 \
--dataset cub \
--warm 5 \
--bn-freeze 1 \
--lr-decay-step 5 \
--gd_config SMG \
--experiment_name cse_googlenet_cgd_1024
|
<reponame>raulrozza/Gametask_Mobile<filename>src/modules/chooseGame/view/components/GameImage/index.tsx
import React from 'react';
import { Image, ImageStyle, StyleProp } from 'react-native';
interface GameImageProps {
url?: string;
style?: StyleProp<ImageStyle>;
}
const GameImage: React.FC<GameImageProps> = ({ url, style }) => {
return (
<Image
style={style}
source={
url
? {
uri: url,
}
: require('assets/img/games/placeholder.png')
}
/>
);
};
export default GameImage;
|
<gh_stars>0
package com.twu.Actions;
import com.twu.Helpers.InputReader;
import com.twu.Helpers.Messages;
import com.twu.Helpers.Printer;
import com.twu.biblioteca.BibliotecaManager;
public class MovieCheckOut extends Command {
public MovieCheckOut(String name, Integer id, Printer printer, InputReader inputReader, BibliotecaManager bibliotecaManager) {
super(name, id, printer, inputReader, bibliotecaManager);
}
public void executeAction(){
bibliotecaManager.manageMovieCheckOut(getItemReference());
}
public String getItemReference() {
printer.printWithColor(Messages.SELECT_MESSAGE, "BLUE");
return inputReader.getInput();
}
}
|
#!/usr/bin/env bash
set -o errexit
set -o nounset
set -o pipefail
if ! which go > /dev/null; then
echo "golang needs to be installed"
exit 1
fi
BIN_DIR="$(pwd)/tmp/_output/bin"
mkdir -p ${BIN_DIR}
PROJECT_NAME="nginx-operator"
REPO_PATH="github.com/tsuru/nginx-operator"
BUILD_PATH="${REPO_PATH}/cmd/${PROJECT_NAME}"
echo "building "${PROJECT_NAME}"..."
GOOS=linux GOARCH=amd64 CGO_ENABLED=0 go build -o ${BIN_DIR}/${PROJECT_NAME} $BUILD_PATH
|
define(['io'], function(io) {
return function() {
var self = this;
var socket = io();
this.users;
this.send = function(msg) {
socket.emit('msg', msg);
return true;
};
this.getRooms = function() {
return rooms.getData();
};
this.getUsers = function() {
return users;
};
this.joinRoom = function(id, name) {
socket.emit('joinRoom', JSON.stringify({id: id, name: name}));
};
this.requestRoomList = function() {
socket.emit('getRooms', 'list');
};
this.requestUserList = function() {
socket.emit('getUsers', 'list');
};
this.registerUser = function(username) {
socket.emit('register', {name: username});
};
this.unregiserUser = function() {
socket.emit('unregister', 'unregister');
};
this.setDescription = function(description) {
socket.emit('description', description);
};
socket.on('msg', function(msg) {
console.log('Mesage received: ' + msg);
signal.msgReceived.dispatch(msg);
});
socket.on('userdata', function(rcvData) {
signal.userUpdated.dispatch(JSON.parse(rcvData));
self.requestRoomList();
});
socket.on('userlist', function(data) {
try {
var users = JSON.parse(data);
signal.userlistReceived.dispatch(users);
console.log('Userlist updated.');
}
catch(e) {
console.log('There was an error during userlist fetch');
}
});
socket.on('roomlist', function(data) {
try {
rooms.updateList(JSON.parse(data));
signal.roomlistReceived.dispatch();
console.log('Roomlist updated.');
}
catch(e) {
console.log('There was an error during roomlist fetch');
}
});
socket.on('joinedroom', function(roomData) {
try {
signal.roomJoined.dispatch(JSON.parse(roomData));
self.requestUserList();
console.log('Room data received.');
}
catch(e) {
console.log('There was an error during roomlist fetch');
}
});
socket.on('userJoined', function(userData) {
try {
var data = JSON.parse(userData);
signal.userJoinedRoom.dispatch(data);
console.log('User ' + data.name + ' joined the conversation.');
}
catch(e) {
}
});
socket.on('userLeft', function(userData) {
try {
var data = JSON.parse(userData);
signal.userLeftRoom.dispatch(data);
console.log('User ' + data.name + ' left the conversation.');
}
catch(e) {
}
});
socket.on('reconnect_error', function() {
console.log('Connection error');
coverError('Connection error');
});
socket.on('reconnect', function() {
console.log('Reconnected');
errorModal.dispose();
});
};
});
|
<filename>__tests__/splitwise.test.js
const { OAuth2 } = require('oauth');
const Splitwise = require('../src');
jest.mock('oauth');
describe('Splitwise', () => {
beforeEach(() => {
OAuth2.mockClear();
});
test('creates splitwise compatible parameters', () => {
expect(OAuth2).not.toHaveBeenCalled();
Splitwise({
consumerKey: 'your key here',
consumerSecret: 'your secret here',
});
const mockOAuthInstance = OAuth2.mock.instances[0];
const mockGetOAuthAccessToken = mockOAuthInstance.getOAuthAccessToken;
expect(OAuth2).toHaveBeenCalledTimes(1);
expect(mockGetOAuthAccessToken).toHaveBeenCalledTimes(1);
});
test('calls the provided logger', () => {
const mockLogger = jest.fn();
// ref: https://github.com/facebook/jest/issues/6329
const mockWrapper = (...args) => mockLogger(args);
Splitwise({
consumerKey: 'your key here',
consumerSecret: 'your secret here',
logger: mockWrapper,
});
expect(mockLogger).toHaveBeenCalledTimes(1);
});
test('throws without a consumer key or secret', () => {
expect(() => Splitwise()).toThrow();
expect(() => Splitwise({})).toThrow();
expect(() => Splitwise({ consumerKey: 'foo' })).toThrow();
expect(() => Splitwise({ consumerSecret: 'foo' })).toThrow();
});
test('does not fetch access token if provided', () => {
Splitwise({
consumerKey: 'your key here',
consumerSecret: 'your secret here',
accessToken: 'foo',
});
const mockOAuthInstance = OAuth2.mock.instances[0];
const mockGetOAuthAccessToken = mockOAuthInstance.getOAuthAccessToken;
expect(mockGetOAuthAccessToken).not.toHaveBeenCalled();
});
test('createDebt wraps createExpense', async () => {
expect(OAuth2).not.toHaveBeenCalled();
const sw = Splitwise({
consumerKey: 'your key here',
consumerSecret: 'your secret here',
accessToken: 'foo',
});
const mockOAuthInstance = OAuth2.mock.instances[0];
// eslint-disable-next-line no-underscore-dangle
const mockOauthRequest = mockOAuthInstance.get;
mockOauthRequest.mockImplementation((_, __, cb) => cb(null, '[]'));
expect(mockOauthRequest).not.toHaveBeenCalled();
await sw.getGroups();
expect(OAuth2).toHaveBeenCalledTimes(1);
expect(mockOauthRequest).toHaveBeenCalledTimes(1);
});
});
|
#include <stdio.h>
int isDivisibleBy3(int number) {
while(number > 0) {
if (number < 3) {
return number == 0;
}
int digit = number % 10;
number /= 10;
number = number - 2*digit;
}
return number == 0;
}
int main()
{
int n = 9;
if(isDivisibleBy3(n))
printf("%d is divisible by 3", n);
else
printf("%d is not divisible by 3", n);
return 0;
}
|
<filename>gulpfile.js
const gulp = require('gulp'),
tape = require('gulp-tape'),
tap_colorize = require('tap-colorize'),
nodemon = require('gulp-nodemon'),
sequence = require('gulp-sequence');
const rollup = require('rollup'),
buble = require('rollup-plugin-buble'),
commonjs = require('rollup-plugin-commonjs'),
node_resolve = require('rollup-plugin-node-resolve');
let cache = null;
gulp.task('bundle', rollupBundle);
gulp.task('test', () =>
gulp.src('spec/*.js').pipe(tape({ reporter: tap_colorize() })));
gulp.task('build', sequence('bundle', 'test'));
gulp.task('dev', ['build'], () =>
nodemon({
script: 'sandbox.js',
watch: ['sandbox.js', 'spec/*.js', 'src/**/*.js'],
tasks: ['build']
}));
function dev() { return sequence('bundle', 'test'); }
function rollupBundle() {
return rollup.rollup({
entry: 'src/index.js',
external: ['immutable'],
globals: { immutable: 'immutable' },
cache: cache,
plugins: [
buble(),
node_resolve({
module: true,
jsnext: true,
main: true,
browser: true
}),
commonjs({
include: 'node_modules/**',
namedExports: { immutable: ['Record', 'fromJS', 'Map', 'List'] }
})
]
}).then((bundle) => {
cache = bundle;
console.log('Bundling complete; writing to dist/');
let es = bundle.write({
dest: 'dist/bundle.es2015.js',
format: 'es',
// exports: 'named',
moduleName: 'immuTree',
sourceMap: true
});
let umd = bundle.write({
dest: 'dist/bundle.umd.js',
format: 'umd',
// exports: 'named',
globals: { immutable: 'immutable' },
moduleName: 'immuTree',
sourceMap: true
});
return Promise.all([es, umd]);
}).then((bundles) => {
console.log('Writing complete!');
return bundles;
}).catch((err) => {
console.log(err.message, err.stack);
return null;
});
}
|
#include <iostream>
#include <cstdlib>
#include <ctime>
int main() {
srand(static_cast<unsigned int>(time(0))); // Seed for random number generation
int randomNumber = (rand() % 100) + 1; // Generate random number between 1 and 100
int guess, attempts = 0;
do {
std::cout << "Enter your guess (between 1 and 100): ";
std::cin >> guess;
attempts++;
if (guess < randomNumber) {
std::cout << "Too low! Try again." << std::endl;
} else if (guess > randomNumber) {
std::cout << "Too high! Try again." << std::endl;
} else {
std::cout << "Congratulations! You guessed the correct number in " << attempts << " attempts." << std::endl;
}
} while (guess != randomNumber);
return 0;
}
|
#!/bin/bash
set -ex
# Source env.sh to read all the vars
source /root/main_env.sh
source /root/env.sh
source /root/common_run.sh
checks
# Substitute config with environment vars defined
envsubst < /root/cerberus/config/cerberus.yaml.template > /root/cerberus/config/config.yaml
# Run cerberus
cd /root/cerberus
python3 start_cerberus.py --config=config/config.yaml
|
def optimize_function(f, learning_rate, iterations):
weights = np.random.rand(f.shape[0])
for _ in range(iterations):
grads = estimate_gradient(f, weights)
weights -= learning_rate * grads
return weights
|
#include "EditLexer.h"
#include "EditStyleX.h"
static KEYWORDLIST Keywords_SQL = {{
//++Autogenerated -- start of section automatically generated
"abort abs absent absolute access accessible according account acos action active ada add admin after against aggregate "
"algorithm alias all allocate also alter always "
"analyse analyze and ansi_defaults ansi_null_dflt_off ansi_null_dflt_on ansi_nulls ansi_padding ansi_warnings any "
"application are arithabort arithignore array_agg array_max_cardinality "
"as asc ascii asensitive asin assembly assertion assignment asymmetric at atan atomic attach attribute attributes "
"audit authorization authors auto_increment autoextend_size autoincrement availability avg avg_row_length "
"backup backward base64 before begin begin_frame begin_partition bernoulli between binding binlog bit_length "
"block blocked body bom both breadth break broker browse btree buckets bulk by "
"cache caching call called cardinality cascade cascaded case cast catalog catalog_name ceil ceiling certificate "
"chain chaining change changed channel char_length "
"character_length character_set_catalog character_set_name character_set_schema characteristics characters charset "
"check checked checkpoint checksum cipher "
"class class_origin classification classifier client clone close cluster clustered coalesce cobol code code_units "
"collate collation collation_catalog collation_name collation_schema collect collection "
"column column_format column_name columns columnstore "
"command_function command_function_code comment comments commit committed "
"compact completion component compressed compression compute concat_null_yields_null concurrent concurrently "
"condition condition_identifier condition_number conditional configuration conflict connect connection connection_name "
"consistent constraint constraint_catalog constraint_name constraint_schema constraints constructor constructors "
"contains containstable content context context_info continue contract contributors control "
"conversation conversion convert copy corr corresponding cos cosh cost count covar_pop covar_samp cpu "
"create credential cross cryptographic csv cube cume_dist current current_catalog current_collation "
"current_date current_default_transform_group current_path current_role current_row current_schema "
"current_time current_timestamp current_transform_group_for_type current_user cursor cursor_close_on_commit cursor_name "
"cycle "
"data database databases datafile datalink datefirst dateformat datetime_interval_code datetime_interval_precision "
"day_hour day_microsecond day_minute day_second db dbcc deadlock_priority deallocate decfloat declare "
"default default_auth defaults deferrable deferred define defined definer definition degree "
"delay_key_write delayed delete delimiter delimiters dense_rank deny depends depth deref derived des_key_file desc "
"describe describe_catalog describe_name "
"describe_procedure_specific_catalog describe_procedure_specific_name describe_procedure_specific_schema describe_schema "
"description descriptor destroy destructor detach deterministic "
"diagnostics dialog dictionary directory disable discard disconnect disk dispatch distinct distinctrow distributed div "
"dlnewcopy dlpreviouscopy "
"dlurlcomplete dlurlcompleteonly dlurlcompletewrite dlurlpath dlurlpathonly dlurlpathwrite dlurlscheme dlurlserver "
"dlvalue do do_domain_ids document domain drop dual dump dumpfile duplicate "
"dynamic dynamic_function dynamic_function_code "
"each element else elseif elsif empty enable enclosed encoding encrypted encryption "
"end end-exec end_frame end_partition endpoint ends enforced engine engines equals errlvl error errors escape escaped "
"event events every except exception exchange exclude excluding exclusive exec execute exists exit "
"exp expansion expire explain export expression extended extension extent_size external extract "
"fail failed_login_attempts false family fast faults fetch fields file file_block_size filelistonly fillfactor filter "
"final finish finish_catalog finish_name "
"finish_procedure_specific_catalog finish_procedure_specific_name finish_procedure_specific_schema finish_schema "
"fips_flagger first first_value flag floor flush fmtonly "
"following follows for force forceplan foreign format fortran forward found "
"frac_second frame_row free freetext freetexttable freeze from fs fulfill fulfill_catalog fulfill_name "
"fulfill_procedure_specific_catalog fulfill_procedure_specific_name fulfill_procedure_specific_schema fulfill_schema "
"full fulltext fulltexttable function functions fusion "
"general generated get get_format get_master_public_key get_transmission_status glob global go goto governor "
"grant granted grants greatest group group_replication grouping groups "
"hadr handle handler has_pass_through_columns has_pass_thru_cols hash having header headeronly help hex "
"hierarchy high_priority histogram history hold holdlock host hosts hour_microsecond hour_minute hour_second "
"id identified identity identity_insert identitycol if ignore ignore_domain_ids ignore_server_ids ilike "
"immediate immediately immutable implementation implicit implicit_transactions import in inactive "
"include including increment indent index indexed indexes indicator infile inherit inherits "
"initial initial_size initialize initially inline inner innobase innodb inout input "
"insensitive insert insert_method install instance instantiable instead integrity intersect intersection into invoker "
"io io_after_gtids io_before_gtids io_thread ipc is is_prunable isnull isolation issuer iterate "
"join "
"json_array json_arrayagg json_exists json_object json_objectagg json_query json_table json_table_primitive json_value "
"keep key key_block_size key_member key_type keys kill "
"label labelonly lag language last last_value lateral lead leading leakproof least leave leaves left length less level "
"library like like_regex limit linear lineno lines link list listagg listen ln load "
"local localtime localtimestamp location locator lock lock_timeout locked locks log log10 logfile logged login logs loop "
"low_priority lower "
"map mapping master master_auto_position master_bind master_compression_algorithms master_connect_retry master_delay "
"master_heartbeat_period master_host master_log_file master_log_pos master_password master_port master_public_key_path "
"master_retry_count master_server_id master_ssl "
"master_ssl_ca master_ssl_capath master_ssl_cert master_ssl_cipher master_ssl_crl master_ssl_crlpath master_ssl_key "
"master_ssl_verify_server_cert master_tls_version master_user master_zstd_compression_level "
"match match_number match_recognize matched matches materialized max max_connections_per_hour max_queries_per_hour "
"max_rows max_size max_statement_time max_updates_per_hour max_user_connections maxvalue "
"measures medium member memory merge message message_length message_octet_length message_text method "
"microsecond middleint migrate min min_rows minute_microsecond minute_second minvalue "
"mod mode modifies modify module more move mumps mutex mysql_errno "
"name names namespace natural ndb ndbcluster nested nesting network_namespace never new next nfc nfd nfkc nfkd nil no "
"no_wait no_write_to_binlog nocase nocheck nocount nodegroup noexec nonblocking nonclustered none normalize normalized "
"not nothing notification notify notnull nowait nth_value ntile null nullable nullif nulls numeric_roundabort "
"occurrences_regex octet_length octets of off offset offsets oids oj old old_password omit on one one_shot only "
"open opendatasource openquery openrowset openxml operation operator "
"optimize optimizer_costs option optional optionally options or order ordering ordinality organization others "
"out outer outfile output over overflow overlaps overlay overriding owned owner "
"pack_keys package pad page parallel parameter parameter_mode parameter_name parameter_ordinal_position "
"parameter_specific_catalog parameter_specific_name parameter_specific_schema parameters "
"parse_gcol_expr parseonly parser partial partition partitioning partitions "
"pascal pass passing passthrough password password_lock_time past pattern "
"per percent percent_rank percentile_cont percentile_disc period permission permute persist persist_only phase pivot "
"placing plan plans pli plugin plugin_dir plugins policy pool port portion position position_regex postfix power pragma "
"precedes preceding prefix preorder prepare prepared preserve prev "
"primary print prior priority private private_parameters private_params_s privilege_checks_user privileges "
"proc procedural procedure procedures process processlist profile profiles program property provider proxy prune ptf "
"public publication purge "
"quarter query query_governor_cost_limit queue quick quote quoted_identifier quotes "
"raise raiserror random range rank read read_only read_write reads readtext reassign rebuild "
"receive recheck reconfigure recover recovery recursive redo redo_buffer_size redofile redundant "
"ref_system_id reference references referencing refresh "
"regexp regr_avgx regr_avgy regr_count regr_intercept regr_r2 regr_slope regr_sxx regr_sxy regr_syy reindex "
"relative relay relay_log_file relay_log_pos relay_thread relaylog release reload remote remote_proc_transactions remove "
"rename reorganize repair repeat repeatable replace replica replicas replicate_do_db replicate_do_table "
"replicate_ignore_db replicate_ignore_table replicate_rewrite_db replicate_wild_do_table replicate_wild_ignore_table "
"replication require require_row_format requiring reset resignal resource respect restart restore restrict result resume "
"ret_only_pass_thru return returned_cardinality returned_length returned_octet_length returned_sqlstate returning "
"returns returns_only_pass_through reuse reverse revert revoke rewindonly right rlike role rollback rollup rotate "
"route routine routine_catalog routine_name routine_schema routines "
"row_count row_format row_number rowcount rowguidcol rows rowtype rtree rtrim rule running "
"save savepoint "
"scalar scale schedule schema schema_name schemas scheme scope scope_catalog scope_name scope_schema scoped scroll "
"search second_microsecond secondary secondary_engine secondary_engine_attribute secondary_load secondary_unload section "
"security securityaudit seek select selective self "
"semantickeyphrasetable semanticsimilaritydetailstable semanticsimilaritytable send sensitive sensitivity separator "
"sequence sequences serializable server server_name service session session_user set setof sets setuser "
"share show showplan_all showplan_text showplan_xml shutdown signal signature similar simple sin sinh size skip "
"slave slow snapshot socket some soname sounds source space spatial specific specific_name specification specifictype "
"sql sql_after_gtids sql_after_mts_gaps sql_before_gtids sql_big_result sql_buffer_result sql_cache sql_calc_found_rows "
"sql_no_cache sql_small_result sql_thread sql_tsi_day sql_tsi_frac_second sql_tsi_hour sql_tsi_minute sql_tsi_month "
"sql_tsi_quarter sql_tsi_second sql_tsi_week sql_tsi_year sqlca sqlcode sqlerror sqlexception sqlstate sqlwarning sqrt "
"srid ssl stable stacked standalone start start_catalog start_name "
"start_procedure_specific_catalog start_procedure_specific_name start_procedure_specific_schema start_schema starting "
"starts state statement static statistics stats_auto_recalc stats_persistent stats_sample_pages status "
"stddev_pop stddev_samp stdin stdout stop stoplist storage stored straight_join stream strict string strip structure "
"style subclass_origin subject submultiset subpartition subpartitions subscription subset substring substring_regex "
"succeeds sum super support suspend swaps switches symmetric synonym sysid system system_time system_user "
"table table_checksum table_name table_semantics tables tablesample tablespace tan tanh "
"temp template temporary temptable terminate terminated textsize than then thread_priority through "
"ties timer timestampadd timestampdiff tls to token top top_level_count trailing tran "
"transaction transaction_active transactions_committed transactions_rolled_back transform transforms "
"translate translate_regex translation treat "
"trigger trigger_catalog trigger_name trigger_schema triggers trim trim_array true truncate trusted try_convert tsequal "
"type types "
"uescape unbounded uncommitted unconditional undefined under undo undo_buffer_size undofile unencrypted "
"unicode uninstall union unique unknown unlink unlisten unlock unlogged unmatched unnamed unnest unpivot until untyped "
"update updatetext upgrade upper uri usage use use_frm "
"user user_defined_type_catalog user_defined_type_code user_defined_type_name user_defined_type_schema user_resources "
"using utc_date utc_time utc_timestamp utf16 utf32 utf8 "
"vacuum valid validate validation validator value value_of values var_pop var_samp variable variables variadic vcpu "
"verbose verifyonly version versioning view views virtual visible volatile "
"wait waitfor warnings week weight_string when whenever where while whitespace width_bucket window with within without "
"work workload wrapper write writetext "
"x509 xa xact_abort xid xmlagg xmlattributes xmlbinary xmlcast xmlcomment xmlconcat xmldeclaration xmldocument "
"xmlelement xmlexists xmlforest xmliterate xmlnamespaces xmlparse xmlpi xmlquery xmlroot xmlschema xmlserialize "
"xmltable xmltext xmlvalidate xor "
"year_month yes zerofill zone "
, // 1 data types
"anydata anydataset anytype array "
"bfile bigint bigserial binary binary_double binary_float bit blob bool boolean box byte bytea "
"char character cidr circle clob date datetime datetime2 datetimeoffset day dec decimal double enum "
"fixed float float4 float8 geography geomcollection geometry geometrycollection hierarchyid hour "
"image inet int int1 int2 int3 int4 int8 integer interval json jsonb large line linestring long longblob longtext lseg "
"macaddr macaddr8 mediumblob mediumint mediumtext minute money month multilinestring multipoint multipolygon multiset "
"national native nchar nclob ntext number numeric nvarchar nvarchar2 object path pg_lsn point polygon precision "
"real ref row rowid rowversion "
"sdo_geometry sdo_georaster sdo_topo_geometry second serial serial2 serial4 serial8 signed "
"smalldatetime smallint smallmoney smallserial sql_variant "
"text "
"time timestamp timestamptz timetz timezone_abbr timezone_hour timezone_minute timezone_region tinyblob tinyint tinytext "
"tsquery tsvector txid_snapshot "
"uniqueidentifier unsigned urowid uuid varbinary varbit varchar varchar2 varcharacter varying xml year "
, // 2 functions
"adddate( addtime( aes_decrypt( aes_encrypt( any_value( area( asbinary( astext( aswkb( aswkt( "
"asymmetric_decrypt( asymmetric_derive( asymmetric_encrypt( asymmetric_sign( asymmetric_verify( atan2( "
"benchmark( bin( bin_to_uuid( binlog_gtid_pos( bit_and( bit_count( bit_or( bit_xor( boundary( buffer( "
"can_access_column( can_access_database( can_access_table( can_access_view( centroid( changes( coercibility( "
"column_add( column_check( column_create( column_delete( column_exists( column_get( column_json( column_list( compress( "
"concat( concat_ws( connection_id( conv( convert_tz( convexhull( cot( "
"crc32( create_asymmetric_priv_key( create_asymmetric_pub_key( create_dh_parameters( create_digest( crosses( "
"curdate( curtime( "
"date_add( date_format( date_sub( datediff( dayname( dayofmonth( dayofweek( dayofyear( "
"decode( decode_histogram( degrees( des_decrypt( des_encrypt( dimension( disjoint( distance( "
"elt( encode( encrypt( envelope( export_set( exteriorring( extractvalue( "
"field( find_in_set( found_rows( from_base64( from_days( from_unixtime( "
"geomcollfromtext( geomcollfromwkb( "
"geometrycollectionfromtext( geometrycollectionfromwkb( geometryfromtext( geometryfromwkb( geometryn( geometrytype( "
"geomfromtext( geomfromwkb( get_dd_column_privileges( get_dd_create_options( get_dd_index_sub_part_length( get_lock( "
"glength( group_concat( gtid_subset( gtid_subtract( "
"icu_version( ifnull( iif( inet6_aton( inet6_ntoa( inet_aton( inet_ntoa( instr( interiorringn( "
"internal_auto_increment( internal_avg_row_length( internal_check_time( internal_checksum( "
"internal_data_free( internal_data_length( internal_dd_char_length( "
"internal_get_comment_or_error( internal_get_view_warning_or_error( "
"internal_index_column_cardinality( internal_index_length( internal_keys_disabled( internal_max_data_length( "
"internal_table_rows( internal_update_time( intersects( "
"is_free_lock( is_ipv4( is_ipv4_compat( is_ipv4_mapped( is_ipv6( is_used_lock( is_uuid( is_visible_dd_object( isclosed( "
"isempty( isring( issimple( "
"json_append( json_array_append( json_array_insert( json_array_length( json_compact( json_contains( json_contains_path( "
"json_depth( json_detailed( json_each( json_extract( json_group_array( json_group_object( json_insert( json_keys( "
"json_length( json_loose( json_merge( json_merge_patch( json_merge_preserve( json_patch( json_pretty( json_quote( "
"json_remove( json_replace( json_search( json_set( json_storage_free( json_storage_size( json_tree( json_type( "
"json_unquote( json_valid( julianday( "
"last_day( last_insert_id( last_insert_rowid( lastval( lcase( "
"likelihood( likely( linefromtext( linefromwkb( linestringfromtext( linestringfromwkb( "
"load_extension( load_file( locate( log2( lpad( ltrim( "
"make_set( makedate( maketime( master_gtid_wait( master_pos_wait( mbrcontains( mbrcoveredby( mbrcovers( mbrdisjoint( "
"mbrequal( mbrequals( mbrintersects( mbroverlaps( mbrtouches( mbrwithin( md5( median( mid( mlinefromtext( mlinefromwkb( "
"monthname( mpointfromtext( mpointfromwkb( mpolyfromtext( mpolyfromwkb( multilinestringfromtext( multilinestringfromwkb( "
"multipointfromtext( multipointfromwkb( multipolygonfromtext( multipolygonfromwkb( "
"name_const( nextval( now( numgeometries( numinteriorrings( numpoints( oct( ord( "
"period_add( period_diff( pi( "
"pointfromtext( pointfromwkb( pointn( pointonsurface( polyfromtext( polyfromwkb( polygonfromtext( polygonfromwkb( pow( "
"printf( "
"radians( rand( random_bytes( randomblob( "
"regexp_instr( regexp_like( regexp_replace( regexp_substr( release_all_locks( release_lock( roles_graphml( round( rpad( "
"sec_to_time( setval( sha( sha1( sha2( sign( sleep( soundex( "
"spider_bg_direct_sql( spider_copy_tables( spider_direct_sql( spider_flush_table_mon_cache( sql_thread_wait_after_gtids( "
"sqlite_compileoption_get( sqlite_compileoption_used( sqlite_offset( sqlite_source_id( sqlite_version( "
"st_area( st_asbinary( st_asgeojson( st_astext( st_aswkb( st_aswkt( st_boundary( st_buffer( st_buffer_strategy( "
"st_centroid( st_contains( st_convexhull( st_crosses( "
"st_difference( st_dimension( st_disjoint( st_distance( st_distance_sphere( "
"st_endpoint( st_envelope( st_equals( st_exteriorring( st_geohash( "
"st_geomcollfromtext( st_geomcollfromtxt( st_geomcollfromwkb( "
"st_geometrycollectionfromtext( st_geometrycollectionfromwkb( st_geometryfromtext( st_geometryfromwkb( st_geometryn( "
"st_geometrytype( st_geomfromgeojson( st_geomfromtext( st_geomfromwkb( "
"st_interiorringn( st_intersection( st_intersects( st_isclosed( st_isempty( st_isring( st_issimple( st_isvalid( "
"st_latfromgeohash( st_latitude( st_length( "
"st_linefromtext( st_linefromwkb( st_linestringfromtext( st_linestringfromwkb( st_longfromgeohash( st_longitude( "
"st_makeenvelope( st_mlinefromtext( st_mlinefromwkb( "
"st_mpointfromtext( st_mpointfromwkb( st_mpolyfromtext( st_mpolyfromwkb( "
"st_multilinestringfromtext( st_multilinestringfromwkb( "
"st_multipointfromtext( st_multipointfromwkb( st_multipolygonfromtext( st_multipolygonfromwkb( "
"st_numgeometries( st_numinteriorring( st_numinteriorrings( st_numpoints( st_overlaps( "
"st_pointfromgeohash( st_pointfromtext( st_pointfromwkb( st_pointn( st_pointonsurface( "
"st_polyfromtext( st_polyfromwkb( st_polygonfromtext( st_polygonfromwkb( st_relate( "
"st_simplify( st_srid( st_startpoint( st_swapxy( st_symdifference( st_touches( st_transform( st_union( st_validate( "
"st_within( st_x( st_y( startpoint( statement_digest( statement_digest_text( std( stddev( str_to_date( strcmp( strftime( "
"subdate( substr( substring_index( subtime( sysdate( "
"time_format( time_to_sec( timediff( to_base64( to_days( to_seconds( total( total_changes( touches( typeof( "
"ucase( uncompress( uncompressed_length( unhex( unix_timestamp( unlikely( updatexml( uuid_short( uuid_to_bin( "
"validate_password_strength( variance( "
"wait_for_executed_gtid_set( wait_until_sql_thread_after_gtids( weekday( weekofyear( yearweek( zeroblob( "
, // 3 upper case keywords
"ABORT ABS ABSENT ABSOLUTE ACCESS ACCESSIBLE ACCORDING ACCOUNT ACOS ACTION ACTIVE ADA ADD ADMIN AFTER AGAINST AGGREGATE "
"ALGORITHM ALIAS ALL ALLOCATE ALSO ALTER ALWAYS "
"ANALYSE ANALYZE AND ANSI_DEFAULTS ANSI_NULLS ANSI_NULL_DFLT_OFF ANSI_NULL_DFLT_ON ANSI_PADDING ANSI_WARNINGS ANY "
"APPLICATION ARE ARITHABORT ARITHIGNORE ARRAY_AGG ARRAY_MAX_CARDINALITY "
"AS ASC ASCII ASENSITIVE ASIN ASSEMBLY ASSERTION ASSIGNMENT ASYMMETRIC AT ATAN ATOMIC ATTACH ATTRIBUTE ATTRIBUTES "
"AUDIT AUTHORIZATION AUTHORS AUTOEXTEND_SIZE AUTOINCREMENT AUTO_INCREMENT AVAILABILITY AVG AVG_ROW_LENGTH "
"BACKUP BACKWARD BASE64 BEFORE BEGIN BEGIN_FRAME BEGIN_PARTITION BERNOULLI BETWEEN BINDING BINLOG BIT_LENGTH "
"BLOCK BLOCKED BODY BOM BOTH BREADTH BREAK BROKER BROWSE BTREE BUCKETS BULK BY "
"CACHE CACHING CALL CALLED CARDINALITY CASCADE CASCADED CASE CAST CATALOG CATALOG_NAME CEIL CEILING CERTIFICATE "
"CHAIN CHAINING CHANGE CHANGED CHANNEL "
"CHARACTERISTICS CHARACTERS CHARACTER_LENGTH CHARACTER_SET_CATALOG CHARACTER_SET_NAME CHARACTER_SET_SCHEMA CHARSET "
"CHAR_LENGTH CHECK CHECKED CHECKPOINT CHECKSUM CIPHER "
"CLASS CLASSIFICATION CLASSIFIER CLASS_ORIGIN CLIENT CLONE CLOSE CLUSTER CLUSTERED COALESCE COBOL CODE CODE_UNITS "
"COLLATE COLLATION COLLATION_CATALOG COLLATION_NAME COLLATION_SCHEMA COLLECT COLLECTION "
"COLUMN COLUMNS COLUMNSTORE COLUMN_FORMAT COLUMN_NAME "
"COMMAND_FUNCTION COMMAND_FUNCTION_CODE COMMENT COMMENTS COMMIT COMMITTED "
"COMPACT COMPLETION COMPONENT COMPRESSED COMPRESSION COMPUTE CONCAT_NULL_YIELDS_NULL CONCURRENT CONCURRENTLY "
"CONDITION CONDITIONAL CONDITION_IDENTIFIER CONDITION_NUMBER CONFIGURATION CONFLICT CONNECT CONNECTION CONNECTION_NAME "
"CONSISTENT CONSTRAINT CONSTRAINTS CONSTRAINT_CATALOG CONSTRAINT_NAME CONSTRAINT_SCHEMA CONSTRUCTOR CONSTRUCTORS "
"CONTAINS CONTAINSTABLE CONTENT CONTEXT CONTEXT_INFO CONTINUE CONTRACT CONTRIBUTORS CONTROL "
"CONVERSATION CONVERSION CONVERT COPY CORR CORRESPONDING COS COSH COST COUNT COVAR_POP COVAR_SAMP CPU "
"CREATE CREDENTIAL CROSS CRYPTOGRAPHIC CSV CUBE CUME_DIST CURRENT CURRENT_CATALOG CURRENT_COLLATION "
"CURRENT_DATE CURRENT_DEFAULT_TRANSFORM_GROUP CURRENT_PATH CURRENT_ROLE CURRENT_ROW CURRENT_SCHEMA "
"CURRENT_TIME CURRENT_TIMESTAMP CURRENT_TRANSFORM_GROUP_FOR_TYPE CURRENT_USER CURSOR CURSOR_CLOSE_ON_COMMIT CURSOR_NAME "
"CYCLE "
"DATA DATABASE DATABASES DATAFILE DATALINK DATEFIRST DATEFORMAT DATETIME_INTERVAL_CODE DATETIME_INTERVAL_PRECISION "
"DAY_HOUR DAY_MICROSECOND DAY_MINUTE DAY_SECOND DB DBCC DEADLOCK_PRIORITY DEALLOCATE DECFLOAT DECLARE "
"DEFAULT DEFAULTS DEFAULT_AUTH DEFERRABLE DEFERRED DEFINE DEFINED DEFINER DEFINITION DEGREE "
"DELAYED DELAY_KEY_WRITE DELETE DELIMITER DELIMITERS DENSE_RANK DENY DEPENDS DEPTH DEREF DERIVED DESC DESCRIBE "
"DESCRIBE_CATALOG DESCRIBE_NAME "
"DESCRIBE_PROCEDURE_SPECIFIC_CATALOG DESCRIBE_PROCEDURE_SPECIFIC_NAME DESCRIBE_PROCEDURE_SPECIFIC_SCHEMA DESCRIBE_SCHEMA "
"DESCRIPTION DESCRIPTOR DESTROY DESTRUCTOR DES_KEY_FILE DETACH DETERMINISTIC "
"DIAGNOSTICS DIALOG DICTIONARY DIRECTORY DISABLE DISCARD DISCONNECT DISK DISPATCH DISTINCT DISTINCTROW DISTRIBUTED DIV "
"DLNEWCOPY DLPREVIOUSCOPY "
"DLURLCOMPLETE DLURLCOMPLETEONLY DLURLCOMPLETEWRITE DLURLPATH DLURLPATHONLY DLURLPATHWRITE DLURLSCHEME DLURLSERVER "
"DLVALUE DO DOCUMENT DOMAIN DO_DOMAIN_IDS DROP DUAL DUMP DUMPFILE DUPLICATE "
"DYNAMIC DYNAMIC_FUNCTION DYNAMIC_FUNCTION_CODE "
"EACH ELEMENT ELSE ELSEIF ELSIF EMPTY ENABLE ENCLOSED ENCODING ENCRYPTED ENCRYPTION "
"END END-EXEC ENDPOINT ENDS END_FRAME END_PARTITION ENFORCED ENGINE ENGINES EQUALS ERRLVL ERROR ERRORS ESCAPE ESCAPED "
"EVENT EVENTS EVERY EXCEPT EXCEPTION EXCHANGE EXCLUDE EXCLUDING EXCLUSIVE EXEC EXECUTE EXISTS EXIT "
"EXP EXPANSION EXPIRE EXPLAIN EXPORT EXPRESSION EXTENDED EXTENSION EXTENT_SIZE EXTERNAL EXTRACT "
"FAIL FAILED_LOGIN_ATTEMPTS FALSE FAMILY FAST FAULTS FETCH FIELDS FILE FILELISTONLY FILE_BLOCK_SIZE FILLFACTOR FILTER "
"FINAL FINISH FINISH_CATALOG FINISH_NAME "
"FINISH_PROCEDURE_SPECIFIC_CATALOG FINISH_PROCEDURE_SPECIFIC_NAME FINISH_PROCEDURE_SPECIFIC_SCHEMA FINISH_SCHEMA "
"FIPS_FLAGGER FIRST FIRST_VALUE FLAG FLOOR FLUSH FMTONLY "
"FOLLOWING FOLLOWS FOR FORCE FORCEPLAN FOREIGN FORMAT FORTRAN FORWARD FOUND "
"FRAC_SECOND FRAME_ROW FREE FREETEXT FREETEXTTABLE FREEZE FROM FS FULFILL FULFILL_CATALOG FULFILL_NAME "
"FULFILL_PROCEDURE_SPECIFIC_CATALOG FULFILL_PROCEDURE_SPECIFIC_NAME FULFILL_PROCEDURE_SPECIFIC_SCHEMA FULFILL_SCHEMA "
"FULL FULLTEXT FULLTEXTTABLE FUNCTION FUNCTIONS FUSION "
"GENERAL GENERATED GET GET_FORMAT GET_MASTER_PUBLIC_KEY GET_TRANSMISSION_STATUS GLOB GLOBAL GO GOTO GOVERNOR "
"GRANT GRANTED GRANTS GREATEST GROUP GROUPING GROUPS GROUP_REPLICATION "
"HADR HANDLE HANDLER HASH HAS_PASS_THROUGH_COLUMNS HAS_PASS_THRU_COLS HAVING HEADER HEADERONLY HELP HEX "
"HIERARCHY HIGH_PRIORITY HISTOGRAM HISTORY HOLD HOLDLOCK HOST HOSTS HOUR_MICROSECOND HOUR_MINUTE HOUR_SECOND "
"ID IDENTIFIED IDENTITY IDENTITYCOL IDENTITY_INSERT IF IGNORE IGNORE_DOMAIN_IDS IGNORE_SERVER_IDS ILIKE "
"IMMEDIATE IMMEDIATELY IMMUTABLE IMPLEMENTATION IMPLICIT IMPLICIT_TRANSACTIONS IMPORT IN INACTIVE "
"INCLUDE INCLUDING INCREMENT INDENT INDEX INDEXED INDEXES INDICATOR INFILE INHERIT INHERITS "
"INITIAL INITIALIZE INITIALLY INITIAL_SIZE INLINE INNER INNOBASE INNODB INOUT INPUT "
"INSENSITIVE INSERT INSERT_METHOD INSTALL INSTANCE INSTANTIABLE INSTEAD INTEGRITY INTERSECT INTERSECTION INTO INVOKER "
"IO IO_AFTER_GTIDS IO_BEFORE_GTIDS IO_THREAD IPC IS ISNULL ISOLATION ISSUER IS_PRUNABLE ITERATE "
"JOIN "
"JSON_ARRAY JSON_ARRAYAGG JSON_EXISTS JSON_OBJECT JSON_OBJECTAGG JSON_QUERY JSON_TABLE JSON_TABLE_PRIMITIVE JSON_VALUE "
"KEEP KEY KEYS KEY_BLOCK_SIZE KEY_MEMBER KEY_TYPE KILL "
"LABEL LABELONLY LAG LANGUAGE LAST LAST_VALUE LATERAL LEAD LEADING LEAKPROOF LEAST LEAVE LEAVES LEFT LENGTH LESS LEVEL "
"LIBRARY LIKE LIKE_REGEX LIMIT LINEAR LINENO LINES LINK LIST LISTAGG LISTEN LN LOAD "
"LOCAL LOCALTIME LOCALTIMESTAMP LOCATION LOCATOR LOCK LOCKED LOCKS LOCK_TIMEOUT LOG LOG10 LOGFILE LOGGED LOGIN LOGS LOOP "
"LOWER LOW_PRIORITY "
"MAP MAPPING MASTER MASTER_AUTO_POSITION MASTER_BIND MASTER_COMPRESSION_ALGORITHMS MASTER_CONNECT_RETRY MASTER_DELAY "
"MASTER_HEARTBEAT_PERIOD MASTER_HOST MASTER_LOG_FILE MASTER_LOG_POS MASTER_PASSWORD MASTER_PORT MASTER_PUBLIC_KEY_PATH "
"MASTER_RETRY_COUNT MASTER_SERVER_ID MASTER_SSL "
"MASTER_SSL_CA MASTER_SSL_CAPATH MASTER_SSL_CERT MASTER_SSL_CIPHER MASTER_SSL_CRL MASTER_SSL_CRLPATH MASTER_SSL_KEY "
"MASTER_SSL_VERIFY_SERVER_CERT MASTER_TLS_VERSION MASTER_USER MASTER_ZSTD_COMPRESSION_LEVEL "
"MATCH MATCHED MATCHES MATCH_NUMBER MATCH_RECOGNIZE MATERIALIZED MAX MAXVALUE MAX_CONNECTIONS_PER_HOUR "
"MAX_QUERIES_PER_HOUR MAX_ROWS MAX_SIZE MAX_STATEMENT_TIME MAX_UPDATES_PER_HOUR MAX_USER_CONNECTIONS "
"MEASURES MEDIUM MEMBER MEMORY MERGE MESSAGE MESSAGE_LENGTH MESSAGE_OCTET_LENGTH MESSAGE_TEXT METHOD "
"MICROSECOND MIDDLEINT MIGRATE MIN MINUTE_MICROSECOND MINUTE_SECOND MINVALUE MIN_ROWS "
"MOD MODE MODIFIES MODIFY MODULE MORE MOVE MUMPS MUTEX MYSQL_ERRNO "
"NAME NAMES NAMESPACE NATURAL NDB NDBCLUSTER NESTED NESTING NETWORK_NAMESPACE NEVER NEW NEXT NFC NFD NFKC NFKD NIL NO "
"NOCASE NOCHECK NOCOUNT NODEGROUP NOEXEC NONBLOCKING NONCLUSTERED NONE NORMALIZE NORMALIZED "
"NOT NOTHING NOTIFICATION NOTIFY NOTNULL NOWAIT NO_WAIT NO_WRITE_TO_BINLOG NTH_VALUE NTILE "
"NULL NULLABLE NULLIF NULLS NUMERIC_ROUNDABORT "
"OCCURRENCES_REGEX OCTETS OCTET_LENGTH OF OFF OFFSET OFFSETS OIDS OJ OLD OLD_PASSWORD OMIT ON ONE ONE_SHOT ONLY "
"OPEN OPENDATASOURCE OPENQUERY OPENROWSET OPENXML OPERATION OPERATOR "
"OPTIMIZE OPTIMIZER_COSTS OPTION OPTIONAL OPTIONALLY OPTIONS OR ORDER ORDERING ORDINALITY ORGANIZATION OTHERS "
"OUT OUTER OUTFILE OUTPUT OVER OVERFLOW OVERLAPS OVERLAY OVERRIDING OWNED OWNER Options "
"PACKAGE PACK_KEYS PAD PAGE PARALLEL PARAMETER PARAMETERS PARAMETER_MODE PARAMETER_NAME PARAMETER_ORDINAL_POSITION "
"PARAMETER_SPECIFIC_CATALOG PARAMETER_SPECIFIC_NAME PARAMETER_SPECIFIC_SCHEMA PARSEONLY PARSER PARSE_GCOL_EXPR "
"PARTIAL PARTITION PARTITIONING PARTITIONS PASCAL PASS PASSING PASSTHROUGH PASSWORD PASSWORD_LOCK_TIME PAST PATTERN "
"PER PERCENT PERCENTILE_CONT PERCENTILE_DISC PERCENT_RANK PERIOD PERMISSION PERMUTE PERSIST PERSIST_ONLY PHASE PIVOT "
"PLACING PLAN PLANS PLI PLUGIN PLUGINS PLUGIN_DIR POLICY POOL PORT PORTION POSITION POSITION_REGEX POSTFIX POWER PRAGMA "
"PRECEDES PRECEDING PREFIX PREORDER PREPARE PREPARED PRESERVE PREV "
"PRIMARY PRINT PRIOR PRIORITY PRIVATE PRIVATE_PARAMETERS PRIVATE_PARAMS_S PRIVILEGES PRIVILEGE_CHECKS_USER "
"PROC PROCEDURAL PROCEDURE PROCEDURES PROCESS PROCESSLIST PROFILE PROFILES PROGRAM PROPERTY PROVIDER PROXY PRUNE PTF "
"PUBLIC PUBLICATION PURGE "
"QUARTER QUERY QUERY_GOVERNOR_COST_LIMIT QUEUE QUICK QUOTE QUOTED_IDENTIFIER QUOTES "
"RAISE RAISERROR RANDOM RANGE RANK READ READS READTEXT READ_ONLY READ_WRITE REASSIGN REBUILD "
"RECEIVE RECHECK RECONFIGURE RECOVER RECOVERY RECURSIVE REDO REDOFILE REDO_BUFFER_SIZE REDUNDANT "
"REFERENCE REFERENCES REFERENCING REFRESH REF_SYSTEM_ID "
"REGEXP REGR_AVGX REGR_AVGY REGR_COUNT REGR_INTERCEPT REGR_R2 REGR_SLOPE REGR_SXX REGR_SXY REGR_SYY REINDEX "
"RELATIVE RELAY RELAYLOG RELAY_LOG_FILE RELAY_LOG_POS RELAY_THREAD RELEASE RELOAD REMOTE REMOTE_PROC_TRANSACTIONS REMOVE "
"RENAME REORGANIZE REPAIR REPEAT REPEATABLE REPLACE REPLICA REPLICAS REPLICATE_DO_DB REPLICATE_DO_TABLE "
"REPLICATE_IGNORE_DB REPLICATE_IGNORE_TABLE REPLICATE_REWRITE_DB REPLICATE_WILD_DO_TABLE REPLICATE_WILD_IGNORE_TABLE "
"REPLICATION REQUIRE REQUIRE_ROW_FORMAT REQUIRING RESET RESIGNAL RESOURCE RESPECT RESTART RESTORE RESTRICT RESULT RESUME "
"RETURN RETURNED_CARDINALITY RETURNED_LENGTH RETURNED_OCTET_LENGTH RETURNED_SQLSTATE RETURNING "
"RETURNS RETURNS_ONLY_PASS_THROUGH RET_ONLY_PASS_THRU REUSE REVERSE REVERT REVOKE REWINDONLY RIGHT RLIKE "
"ROLE ROLLBACK ROLLUP ROTATE ROUTE ROUTINE ROUTINES ROUTINE_CATALOG ROUTINE_NAME ROUTINE_SCHEMA "
"ROWCOUNT ROWGUIDCOL ROWS ROWTYPE ROW_COUNT ROW_FORMAT ROW_NUMBER RTREE RTRIM RULE RUNNING "
"SAVE SAVEPOINT "
"SCALAR SCALE SCHEDULE SCHEMA SCHEMAS SCHEMA_NAME SCHEME SCOPE SCOPED SCOPE_CATALOG SCOPE_NAME SCOPE_SCHEMA SCROLL "
"SEARCH SECONDARY SECONDARY_ENGINE SECONDARY_ENGINE_ATTRIBUTE SECONDARY_LOAD SECONDARY_UNLOAD SECOND_MICROSECOND SECTION "
"SECURITY SECURITYAUDIT SEEK SELECT SELECTIVE SELF "
"SEMANTICKEYPHRASETABLE SEMANTICSIMILARITYDETAILSTABLE SEMANTICSIMILARITYTABLE SEND SENSITIVE SENSITIVITY SEPARATOR "
"SEQUENCE SEQUENCES SERIALIZABLE SERVER SERVER_NAME SERVICE SESSION SESSION_USER SET SETOF SETS SETUSER "
"SHARE SHOW SHOWPLAN_ALL SHOWPLAN_TEXT SHOWPLAN_XML SHUTDOWN SIGNAL SIGNATURE SIMILAR SIMPLE SIN SINH SIZE SKIP "
"SLAVE SLOW SNAPSHOT SOCKET SOME SONAME SOUNDS SOURCE SPACE SPATIAL SPECIFIC SPECIFICATION SPECIFICTYPE SPECIFIC_NAME "
"SQL SQLCA SQLCODE SQLERROR SQLEXCEPTION SQLSTATE SQLWARNING SQL_AFTER_GTIDS SQL_AFTER_MTS_GAPS "
"SQL_BEFORE_GTIDS SQL_BIG_RESULT SQL_BUFFER_RESULT SQL_CACHE SQL_CALC_FOUND_ROWS SQL_NO_CACHE SQL_SMALL_RESULT "
"SQL_THREAD SQL_TSI_DAY SQL_TSI_FRAC_SECOND SQL_TSI_HOUR SQL_TSI_MINUTE SQL_TSI_MONTH SQL_TSI_QUARTER SQL_TSI_SECOND "
"SQL_TSI_WEEK SQL_TSI_YEAR SQRT SRID SSL STABLE STACKED STANDALONE START STARTING STARTS START_CATALOG START_NAME "
"START_PROCEDURE_SPECIFIC_CATALOG START_PROCEDURE_SPECIFIC_NAME START_PROCEDURE_SPECIFIC_SCHEMA START_SCHEMA "
"STATE STATEMENT STATIC STATISTICS STATS_AUTO_RECALC STATS_PERSISTENT STATS_SAMPLE_PAGES STATUS "
"STDDEV_POP STDDEV_SAMP STDIN STDOUT STOP STOPLIST STORAGE STORED STRAIGHT_JOIN STREAM STRICT STRING STRIP STRUCTURE "
"STYLE SUBCLASS_ORIGIN SUBJECT SUBMULTISET SUBPARTITION SUBPARTITIONS SUBSCRIPTION SUBSET SUBSTRING SUBSTRING_REGEX "
"SUCCEEDS SUM SUPER SUPPORT SUSPEND SWAPS SWITCHES SYMMETRIC SYNONYM SYSID SYSTEM SYSTEM_TIME SYSTEM_USER "
"TABLE TABLES TABLESAMPLE TABLESPACE TABLE_CHECKSUM TABLE_NAME TABLE_SEMANTICS TAN TANH "
"TEMP TEMPLATE TEMPORARY TEMPTABLE TERMINATE TERMINATED TEXTSIZE THAN THEN THREAD_PRIORITY THROUGH "
"TIES TIMER TIMESTAMPADD TIMESTAMPDIFF TLS TO TOKEN TOP TOP_LEVEL_COUNT TRAILING TRAN "
"TRANSACTION TRANSACTIONS_COMMITTED TRANSACTIONS_ROLLED_BACK TRANSACTION_ACTIVE TRANSFORM TRANSFORMS "
"TRANSLATE TRANSLATE_REGEX TRANSLATION TREAT "
"TRIGGER TRIGGERS TRIGGER_CATALOG TRIGGER_NAME TRIGGER_SCHEMA TRIM TRIM_ARRAY TRUE TRUNCATE TRUSTED TRY_CONVERT TSEQUAL "
"TYPE TYPES "
"UESCAPE UNBOUNDED UNCOMMITTED UNCONDITIONAL UNDEFINED UNDER UNDO UNDOFILE UNDO_BUFFER_SIZE UNENCRYPTED "
"UNICODE UNINSTALL UNION UNIQUE UNKNOWN UNLINK UNLISTEN UNLOCK UNLOGGED UNMATCHED UNNAMED UNNEST UNPIVOT UNTIL UNTYPED "
"UPDATE UPDATETEXT UPGRADE UPPER URI USAGE USE "
"USER USER_DEFINED_TYPE_CATALOG USER_DEFINED_TYPE_CODE USER_DEFINED_TYPE_NAME USER_DEFINED_TYPE_SCHEMA USER_RESOURCES "
"USE_FRM USING UTC_DATE UTC_TIME UTC_TIMESTAMP UTF16 UTF32 UTF8 "
"VACUUM VALID VALIDATE VALIDATION VALIDATOR VALUE VALUES VALUE_OF VARIABLE VARIABLES VARIADIC VAR_POP VAR_SAMP VCPU "
"VERBOSE VERIFYONLY VERSION VERSIONING VIEW VIEWS VIRTUAL VISIBLE VOLATILE "
"WAIT WAITFOR WARNINGS WEEK WEIGHT_STRING WHEN WHENEVER WHERE WHILE WHITESPACE WIDTH_BUCKET WINDOW WITH WITHIN WITHOUT "
"WORK WORKLOAD WRAPPER WRITE WRITETEXT "
"X509 XA XACT_ABORT XID XMLAGG XMLATTRIBUTES XMLBINARY XMLCAST XMLCOMMENT XMLCONCAT XMLDECLARATION XMLDOCUMENT "
"XMLELEMENT XMLEXISTS XMLFOREST XMLITERATE XMLNAMESPACES XMLPARSE XMLPI XMLQUERY XMLROOT XMLSCHEMA XMLSERIALIZE "
"XMLTABLE XMLTEXT XMLVALIDATE XOR "
"YEAR_MONTH YES ZEROFILL ZONE "
, // 4 upper case data types
"ANYDATA ANYDATASET ANYTYPE ARRAY "
"BFILE BIGINT BIGSERIAL BINARY BINARY_DOUBLE BINARY_FLOAT BIT BLOB BOOL BOOLEAN BOX BYTE BYTEA "
"CHAR CHARACTER CIDR CIRCLE CLOB DATE DATETIME DATETIME2 DATETIMEOFFSET DAY DEC DECIMAL DOUBLE ENUM "
"FIXED FLOAT FLOAT4 FLOAT8 GEOGRAPHY GEOMCOLLECTION GEOMETRY GEOMETRYCOLLECTION HIERARCHYID HOUR "
"IMAGE INET INT INT1 INT2 INT3 INT4 INT8 INTEGER INTERVAL JSON JSONB LARGE LINE LINESTRING LONG LONGBLOB LONGTEXT LSEG "
"MACADDR MACADDR8 MEDIUMBLOB MEDIUMINT MEDIUMTEXT MINUTE MONEY MONTH MULTILINESTRING MULTIPOINT MULTIPOLYGON MULTISET "
"NATIONAL NATIVE NCHAR NCLOB NTEXT NUMBER NUMERIC NVARCHAR NVARCHAR2 OBJECT PATH PG_LSN POINT POLYGON PRECISION "
"REAL REF ROW ROWID ROWVERSION "
"SDO_GEOMETRY SDO_GEORASTER SDO_TOPO_GEOMETRY SECOND SERIAL SERIAL2 SERIAL4 SERIAL8 SIGNED "
"SMALLDATETIME SMALLINT SMALLMONEY SMALLSERIAL SQL_VARIANT "
"TEXT "
"TIME TIMESTAMP TIMESTAMPTZ TIMETZ TIMEZONE_ABBR TIMEZONE_HOUR TIMEZONE_MINUTE TIMEZONE_REGION TINYBLOB TINYINT TINYTEXT "
"TSQUERY TSVECTOR TXID_SNAPSHOT "
"UNIQUEIDENTIFIER UNSIGNED UROWID UUID VARBINARY VARBIT VARCHAR VARCHAR2 VARCHARACTER VARYING XML YEAR "
, // 5 upper case functions
"ABS( ACOS( ADDDATE( ADDTIME( AES_DECRYPT( AES_ENCRYPT( ANALYSE( ANY_VALUE( "
"ASCII( ASIN( ASYMMETRIC_DECRYPT( ASYMMETRIC_DERIVE( ASYMMETRIC_ENCRYPT( ASYMMETRIC_SIGN( ASYMMETRIC_VERIFY( "
"ATAN( ATAN2( AVG( Area( AsBinary( AsText( AsWKB( AsWKT( "
"BENCHMARK( BIN( BINLOG_GTID_POS( BIN_TO_UUID( BIT_AND( BIT_COUNT( BIT_LENGTH( BIT_OR( BIT_XOR( Boundary( Buffer( "
"CAN_ACCESS_COLUMN( CAN_ACCESS_DATABASE( CAN_ACCESS_TABLE( CAN_ACCESS_VIEW( CAST( CEIL( CEILING( "
"CHANGES( CHAR( CHARACTER_LENGTH( CHARSET( CHAR_LENGTH( COALESCE( COERCIBILITY( "
"COLLATION( COLUMN_ADD( COLUMN_CHECK( COLUMN_CREATE( COLUMN_DELETE( COLUMN_EXISTS( COLUMN_GET( COLUMN_JSON( COLUMN_LIST( "
"COMPRESS( CONCAT( CONCAT_WS( CONNECTION_ID( CONV( CONVERT( CONVERT_TZ( COS( COT( COUNT( "
"CRC32( CREATE_ASYMMETRIC_PRIV_KEY( CREATE_ASYMMETRIC_PUB_KEY( CREATE_DH_PARAMETERS( CREATE_DIGEST( "
"CUME_DIST( CURDATE( CURRENT_DATE( CURRENT_ROLE( CURRENT_TIME( CURRENT_TIMESTAMP( CURRENT_USER( CURTIME( Centroid( "
"Contains( ConvexHull( Crosses( "
"DATABASE( DATE( DATEDIFF( DATETIME( DATE_ADD( DATE_FORMAT( DATE_SUB( DAY( DAYNAME( DAYOFMONTH( DAYOFWEEK( DAYOFYEAR( "
"DECODE( DECODE_HISTOGRAM( DEFAULT( DEGREES( DENSE_RANK( DES_DECRYPT( DES_ENCRYPT( Dimension( Disjoint( Distance( "
"ELT( ENCODE( ENCRYPT( EXP( EXPORT_SET( EXTRACT( EndPoint( Envelope( Equals( ExteriorRing( ExtractValue( "
"FIELD( FIND_IN_SET( FIRST_VALUE( FLOOR( FORMAT( FOUND_ROWS( FROM_BASE64( FROM_DAYS( FROM_UNIXTIME( "
"GET_DD_COLUMN_PRIVILEGES( GET_DD_CREATE_OPTIONS( GET_DD_INDEX_SUB_PART_LENGTH( GET_FORMAT( GET_LOCK( GLOB( GLength( "
"GREATEST( GROUPING( GROUP_CONCAT( GTID_SUBSET( GTID_SUBTRACT( GeomCollFromText( GeomCollFromWKB( GeomCollection( "
"GeomFromText( GeomFromWKB( GeometryCollection( GeometryCollectionFromText( GeometryCollectionFromWKB( "
"GeometryFromText( GeometryFromWKB( GeometryN( GeometryType( "
"HEX( HOUR( "
"ICU_VERSION( IF( IFNULL( IIF( INET6_ATON( INET6_NTOA( INET_ATON( INET_NTOA( INSERT( INSTR( "
"INTERNAL_AUTO_INCREMENT( INTERNAL_AVG_ROW_LENGTH( INTERNAL_CHECKSUM( INTERNAL_CHECK_TIME( "
"INTERNAL_DATA_FREE( INTERNAL_DATA_LENGTH( INTERNAL_DD_CHAR_LENGTH( "
"INTERNAL_GET_COMMENT_OR_ERROR( INTERNAL_GET_VIEW_WARNING_OR_ERROR( "
"INTERNAL_INDEX_COLUMN_CARDINALITY( INTERNAL_INDEX_LENGTH( INTERNAL_KEYS_DISABLED( INTERNAL_MAX_DATA_LENGTH( "
"INTERNAL_TABLE_ROWS( INTERNAL_UPDATE_TIME( INTERVAL( "
"ISNULL( IS_FREE_LOCK( IS_IPV4( IS_IPV4_COMPAT( IS_IPV4_MAPPED( IS_IPV6( IS_USED_LOCK( IS_UUID( IS_VISIBLE_DD_OBJECT( "
"InteriorRingN( Intersects( IsClosed( IsEmpty( IsRing( IsSimple( "
"JSON( JSON_APPEND( JSON_ARRAY( JSON_ARRAYAGG( JSON_ARRAY_APPEND( JSON_ARRAY_INSERT( JSON_ARRAY_LENGTH( "
"JSON_COMPACT( JSON_CONTAINS( JSON_CONTAINS_PATH( JSON_DEPTH( JSON_DETAILED( JSON_EACH( JSON_EXISTS( JSON_EXTRACT( "
"JSON_GROUP_ARRAY( JSON_GROUP_OBJECT( JSON_INSERT( JSON_KEYS( JSON_LENGTH( JSON_LOOSE( "
"JSON_MERGE( JSON_MERGE_PATCH( JSON_MERGE_PRESERVE( JSON_OBJECT( JSON_OBJECTAGG( JSON_PATCH( JSON_PRETTY( "
"JSON_QUERY( JSON_QUOTE( JSON_REMOVE( JSON_REPLACE( JSON_SEARCH( JSON_SET( JSON_STORAGE_FREE( JSON_STORAGE_SIZE( "
"JSON_TABLE( JSON_TREE( JSON_TYPE( JSON_UNQUOTE( JSON_VALID( JSON_VALUE( JULIANDAY( "
"LAG( LASTVAL( LAST_DAY( LAST_INSERT_ID( LAST_INSERT_ROWID( LAST_VALUE( LCASE( LEAD( LEAST( LEFT( LENGTH( "
"LIKE( LIKELIHOOD( LIKELY( LN( LOAD_EXTENSION( LOAD_FILE( LOCALTIME( LOCALTIMESTAMP( LOCATE( LOG( LOG10( LOG2( LOWER( "
"LPAD( LTRIM( LineFromText( LineFromWKB( LineString( LineStringFromText( LineStringFromWKB( "
"MAKEDATE( MAKETIME( MAKE_SET( MASTER_GTID_WAIT( MASTER_POS_WAIT( MAX( MBRContains( MBRCoveredBy( MBRCovers( "
"MBRDisjoint( MBREqual( MBREquals( MBRIntersects( MBROverlaps( MBRTouches( MBRWithin( MD5( MEDIAN( "
"MICROSECOND( MID( MIN( MINUTE( MLineFromText( MLineFromWKB( MOD( MONTH( MONTHNAME( "
"MPointFromText( MPointFromWKB( MPolyFromText( MPolyFromWKB( "
"MultiLineString( MultiLineStringFromText( MultiLineStringFromWKB( "
"MultiPoint( MultiPointFromText( MultiPointFromWKB( MultiPolygon( MultiPolygonFromText( MultiPolygonFromWKB( "
"NAME_CONST( NEXTVAL( NOW( NTH_VALUE( NTILE( NULLIF( NumGeometries( NumInteriorRings( NumPoints( "
"OCT( OCTET_LENGTH( OLD_PASSWORD( ORD( Overlaps( "
"PASSWORD( PERCENTILE_CONT( PERCENTILE_DISC( PERCENT_RANK( PERIOD_ADD( PERIOD_DIFF( PI( POSITION( POW( POWER( PRINTF( "
"Point( PointFromText( PointFromWKB( PointN( PointOnSurface( "
"PolyFromText( PolyFromWKB( Polygon( PolygonFromText( PolygonFromWKB( "
"QUARTER( QUOTE( "
"RADIANS( RAND( RANDOM( RANDOMBLOB( RANDOM_BYTES( RANK( "
"REGEXP_INSTR( REGEXP_LIKE( REGEXP_REPLACE( REGEXP_SUBSTR( RELEASE_ALL_LOCKS( RELEASE_LOCK( REPEAT( REPLACE( REVERSE( "
"RIGHT( ROLES_GRAPHML( ROUND( ROW_COUNT( ROW_NUMBER( RPAD( RTRIM( "
"SCHEMA( SECOND( SEC_TO_TIME( SESSION_USER( SETVAL( SHA( SHA1( SHA2( SIGN( SIN( SLEEP( SOUNDEX( "
"SPACE( SPIDER_BG_DIRECT_SQL( SPIDER_COPY_TABLES( SPIDER_DIRECT_SQL( SPIDER_FLUSH_TABLE_MON_CACHE( "
"SQLITE_COMPILEOPTION_GET( SQLITE_COMPILEOPTION_USED( SQLITE_OFFSET( SQLITE_SOURCE_ID( SQLITE_VERSION( "
"SQL_THREAD_WAIT_AFTER_GTIDS( SQRT( SRID( STATEMENT_DIGEST( STATEMENT_DIGEST_TEXT( STD( STDDEV( STDDEV_POP( STDDEV_SAMP( "
"STRCMP( STRFTIME( STR_TO_DATE( ST_Area( ST_AsBinary( ST_AsGeoJSON( ST_AsText( ST_AsWKB( ST_AsWKT( "
"ST_Boundary( ST_Buffer( ST_Buffer_Strategy( ST_Centroid( ST_Contains( ST_ConvexHull( ST_Crosses( "
"ST_Difference( ST_Dimension( ST_Disjoint( ST_Distance( ST_Distance_Sphere( "
"ST_EndPoint( ST_Envelope( ST_Equals( ST_ExteriorRing( ST_GeoHash( "
"ST_GeomCollFromText( ST_GeomCollFromTxt( ST_GeomCollFromWKB( ST_GeomFromGeoJSON( ST_GeomFromText( ST_GeomFromWKB( "
"ST_GeometryCollectionFromText( ST_GeometryCollectionFromWKB( ST_GeometryFromText( ST_GeometryFromWKB( ST_GeometryN( "
"ST_GeometryType( "
"ST_InteriorRingN( ST_Intersection( ST_Intersects( ST_IsClosed( ST_IsEmpty( ST_IsRing( ST_IsSimple( ST_IsValid( "
"ST_LatFromGeoHash( ST_Latitude( ST_Length( "
"ST_LineFromText( ST_LineFromWKB( ST_LineStringFromText( ST_LineStringFromWKB( ST_LongFromGeoHash( ST_Longitude( "
"ST_MLineFromText( ST_MLineFromWKB( ST_MPointFromText( ST_MPointFromWKB( ST_MPolyFromText( ST_MPolyFromWKB( "
"ST_MakeEnvelope( ST_MultiLineStringFromText( ST_MultiLineStringFromWKB( "
"ST_MultiPointFromText( ST_MultiPointFromWKB( ST_MultiPolygonFromText( ST_MultiPolygonFromWKB( "
"ST_NumGeometries( ST_NumInteriorRing( ST_NumInteriorRings( ST_NumPoints( ST_Overlaps( "
"ST_PointFromGeoHash( ST_PointFromText( ST_PointFromWKB( ST_PointN( ST_PointOnSurface( "
"ST_PolyFromText( ST_PolyFromWKB( ST_PolygonFromText( ST_PolygonFromWKB( ST_Relate( "
"ST_SRID( ST_Simplify( ST_StartPoint( ST_SwapXY( ST_SymDifference( ST_Touches( ST_Transform( ST_Union( ST_Validate( "
"ST_Within( ST_X( ST_Y( SUBDATE( SUBSTR( SUBSTRING( SUBSTRING_INDEX( SUBTIME( SUM( SYSDATE( SYSTEM_USER( StartPoint( "
"TAN( TIME( TIMEDIFF( TIMESTAMP( TIMESTAMPADD( TIMESTAMPDIFF( TIME_FORMAT( TIME_TO_SEC( "
"TOTAL( TOTAL_CHANGES( TO_BASE64( TO_DAYS( TO_SECONDS( TRIM( TRUNCATE( TYPEOF( Touches( "
"UCASE( UNCOMPRESS( UNCOMPRESSED_LENGTH( UNHEX( UNICODE( UNIX_TIMESTAMP( UNLIKELY( UPPER( USER( "
"UTC_DATE( UTC_TIME( UTC_TIMESTAMP( UUID( UUID_SHORT( UUID_TO_BIN( UpdateXML( "
"VALIDATE_PASSWORD_STRENGTH( VALUE( VALUES( VARIANCE( VAR_POP( VAR_SAMP( VERSION( "
"WAIT_FOR_EXECUTED_GTID_SET( WAIT_UNTIL_SQL_THREAD_AFTER_GTIDS( WEEK( WEEKDAY( WEEKOFYEAR( WEIGHT_STRING( Within( "
"YEAR( YEARWEEK( ZEROBLOB( abs( acos( analyse( ascii( asin( atan( avg( bit_length( "
"cast( ceil( ceiling( char( char_length( character_length( charset( coalesce( collation( contains( convert( cos( count( "
"cume_dist( current_date( current_role( current_time( current_timestamp( current_user( "
"database( date( datetime( day( default( dense_rank( endpoint( equals( exp( extract( first_value( floor( format( "
"geomcollection( geometrycollection( get_format( glob( greatest( grouping( hex( hour( if( insert( interval( isnull( "
"json( json_array( json_arrayagg( json_exists( json_object( json_objectagg( json_query( json_table( json_value( "
"lag( last_value( lead( least( left( length( like( linestring( ln( localtime( localtimestamp( log( log10( lower( "
"max( microsecond( min( minute( mod( month( multilinestring( multipoint( multipolygon( nth_value( ntile( nullif( "
"octet_length( old_password( overlaps( "
"password( percent_rank( percentile_cont( percentile_disc( point( polygon( position( power( quarter( quote( "
"random( rank( repeat( replace( reverse( right( row_count( row_number( rtrim( "
"schema( second( session_user( sin( space( sqrt( srid( stddev_pop( stddev_samp( substring( sum( system_user( "
"tan( time( timestamp( timestampadd( timestampdiff( trim( truncate( "
"unicode( upper( user( utc_date( utc_time( utc_timestamp( uuid( value( values( var_pop( var_samp( version( "
"week( weight_string( within( year( "
, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL
//--Autogenerated -- end of section automatically generated
}};
static EDITSTYLE Styles_SQL[] = {
EDITSTYLE_DEFAULT,
{ SCE_SQL_WORD, NP2StyleX_Keyword, L"bold; fore:#FF8040" },
{ SCE_SQL_WORD2, NP2StyleX_TypeKeyword, L"bold; fore:#1E90FF" },
{ SCE_SQL_USER1, NP2StyleX_BasicFunction, L"fore:#FF0080" },
{ MULTI_STYLE(SCE_SQL_COMMENT, SCE_SQL_COMMENTLINE, SCE_SQL_COMMENTLINEDOC, 0), NP2StyleX_Comment, L"fore:#608060" },
{ MULTI_STYLE(SCE_SQL_STRING, SCE_SQL_CHARACTER, 0, 0), NP2StyleX_String, L"fore:#008000" },
{ SCE_SQL_ESCAPECHAR, NP2StyleX_EscapeSequence, L"fore:#0080C0" },
{ SCE_SQL_NUMBER, NP2StyleX_Number, L"fore:#FF0000" },
{ MULTI_STYLE(SCE_SQL_HEX, SCE_SQL_HEX2, 0, 0), NP2StyleX_BlobHex, L"fore:#C08000" },
{ MULTI_STYLE(SCE_SQL_BIT, SCE_SQL_BIT2, 0, 0), NP2StyleX_BitField, L"fore:#C08000" },
{ SCE_SQL_VARIABLE, NP2StyleX_Variable, L"fore:#9E4D2A" },
{ MULTI_STYLE(SCE_SQL_OPERATOR, SCE_SQL_QOPERATOR, 0, 0), NP2StyleX_Operator, L"fore:#B000B0" },
};
EDITLEXER lexSQL = {
SCLEX_SQL, NP2LEX_SQL,
EDITLEXER_HOLE(L"SQL Query", Styles_SQL),
L"sql; mysql; hsql",
&Keywords_SQL,
Styles_SQL
};
|
const commonPaths = require("./common-paths");
const webpack = require("webpack");
let devConfig = {};
devConfig.prod = require("../dev.env");
const config = {
mode: "development",
output: {
sourceMapFilename: "[name].js.map",
},
devtool: "eval-source-map",
devServer: {
contentBase: commonPaths.outputPath,
},
module: {
rules: [
{
test: /\.s[ac]ss$/i,
use: ["style-loader", "css-loader", "sass-loader"],
},
{
test: /\.(png|svg|jpg|jpeg|gif)$/i,
type: "asset/resource",
},
],
},
plugins: [
new webpack.ProvidePlugin({
process: "process/browser",
}),
],
};
module.exports = config;
|
#!/usr/bin/env bash
TAG="${TRAVIS_TAG:-${TRAVIS_COMMIT}}"
if [ "${1}" == "install" ]; then
! docker pull viderum/ckan-cloud-operator:latest && echo Failed to pull image && exit 1
! docker pull viderum/ckan-cloud-operator:jnlp-latest && echo Failed to pull jnlp image && exit 1
echo Great Success! && exit 0
elif [ "${1}" == "script" ]; then
! docker build --build-arg "CKAN_CLOUD_OPERATOR_IMAGE_TAG=${TAG}" --cache-from viderum/ckan-cloud-operator:latest -t ckan-cloud-operator . && echo Failed to build image && exit 1
! docker build --build-arg "CKAN_CLOUD_OPERATOR_IMAGE_TAG=${TAG}" --cache-from viderum/ckan-cloud-operator:jnlp-latest -t ckan-cloud-operator-jnlp -f Dockerfile.jenkins-jnlp . && echo Failed to build jnlp image && exit 1
echo Great Success! && exit 0
elif [ "${1}" == "deploy" ]; then
docker tag ckan-cloud-operator "viderum/ckan-cloud-operator:${TAG}" &&\
echo && echo "viderum/ckan-cloud-operator:${TAG}" && echo &&\
docker push "viderum/ckan-cloud-operator:${TAG}"
[ "$?" != "0" ] && echo Failed to tag and push && exit 1
docker tag ckan-cloud-operator-jnlp "viderum/ckan-cloud-operator:jnlp-${TAG}" &&\
echo && echo "viderum/ckan-cloud-operator:jnlp-${TAG}" && echo &&\
docker push "viderum/ckan-cloud-operator:jnlp-${TAG}"
[ "$?" != "0" ] && echo Failed to tag and push jnlp image && exit 1
if [ "${TRAVIS_BRANCH}" == "master" ]; then
docker tag ckan-cloud-operator viderum/ckan-cloud-operator:latest &&\
echo && echo viderum/ckan-cloud-operator:latest && echo &&\
docker push viderum/ckan-cloud-operator:latest
[ "$?" != "0" ] && echo Failed to tag and push latest image && exit 1
docker tag ckan-cloud-operator-jnlp viderum/ckan-cloud-operator:jnlp-latest &&\
echo && echo viderum/ckan-cloud-operator:jnlp-latest && echo &&\
docker push viderum/ckan-cloud-operator:jnlp-latest
[ "$?" != "0" ] && echo Failed to tag and push jnlp latest image && exit 1
fi
if [ "${TRAVIS_TAG}" != "" ]; then
export DEPLOY_JNLP_IMAGE="viderum/ckan-cloud-operator:jnlp-${TAG}"
echo "Running Jenkins deploy jnlp job (JNLP_IMAGE=${DEPLOY_JNLP_IMAGE})"
STATUS_CODE=$(curl -X POST "${JENKINS_JNLP_DEPLOY_URL}" --user "${JENKINS_USER}:${JENKINS_TOKEN}" --data "JNLP_IMAGE=${DEPLOY_JNLP_IMAGE}" -s -o /dev/stderr -w "%{http_code}")
echo "jenkins jnlp deploy job status code: ${STATUS_CODE}"
[ "${STATUS_CODE}" != "200" ] && [ "${STATUS_CODE}" != "201" ] && echo Deploy failed && exit 1
fi
echo Great Success! && exit 0
else
echo invalid arguments && exit 1
fi
|
<reponame>suryanshsoni/iwt<filename>web/config.js
var sohagApp = angular.module('SohagApp',['ngRoute']);
sohagApp.config(function($routeProvider, $locationProvider) {
console.log('in config ');
/*$locationProvider.html5Mode(true);
$locationProvider.hashPrefix("!"); //Support for hasbangs url (SEO)
*/
$routeProvider.
when('/',{
templateUrl : 'templates/homepage.html',
controller : 'HomeController as homectrl'
})
});
|
/*
https://www.freecodecamp.org/learn/javascript-algorithms-and-data-structures/regular-expressions/positive-and-negative-lookahead
Use lookaheads in the pwRegex to match passwords that are greater than
5 characters long, do not begin with numbers, and have two consecutive digits.
(1) Your regex should use two positive lookaheads.
(2) Your regex should not match "astronaut"
(3) Your regex should not match "banan1"
(4) Your regex should match "bana12"
(5) Your regex should match "abc123"
(6) Your regex should not match "1234"
(7) Your regex should not match "8pass99"
(8) Your regex should not match "12abcde"
(9) Your regex should match "astr1on11aut"
*/
let sampleWord = "astronaut";
let pwRegex = /^\D(?=\w{5})(?=\w*\d{2})/; // Change this line
let result = pwRegex.test(sampleWord);
|
<reponame>cnlcnn/wallpaper
package com.example.cnlcnn.utils;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
/*
* 项目名: WallPaper
* 创建者: LiChuang
* 创建时间: 2017/5/6
* 描述: 封装扫描本地文件夹中的图片,用于获取下载的图片和添加的本地图片
*/
public class ScanLocalPicture {
//根据自己的需求读取SDCard中的资源图片的路径
private String picturePath;
public ScanLocalPicture(String path) {
this.picturePath = path;
}
/**
* 从SD卡中获取资源图片的路径,存放在list数组中
*/
public List<String> getPicturePathFromSD() {
/* 设定目前所在路径 */
List<String> it = new ArrayList<String>();
File mFile = new File(picturePath);
//该目录下所有文件目录,将所有文件存入ArrayList中 */
File[] files = mFile.listFiles();
// Log.d("localPicturefiles", files.length+"");
for (int i = 0; i < files.length; i++) {
File file = files[i];
if (checkIsImageFile(file.getPath()))
it.add(file.getPath());
}
return it;
}
/**
* 判断是否相应的图片格式
*/
private boolean checkIsImageFile(String fName) {
boolean isImageFormat;
/* 取得扩展名 */
String end = fName
.substring(fName.lastIndexOf(".") + 1, fName.length())
.toLowerCase();
/* 按扩展名的类型决定MimeType */
if (end.equals("jpg") || end.equals("gif") || end.equals("png")
|| end.equals("jpeg") || end.equals("bmp")) {
isImageFormat = true;
} else {
isImageFormat = false;
}
return isImageFormat;
}
}
|
<reponame>Xianghar/concrete5
/* jshint unused:vars, undef:true, node:true */
module.exports = function(grunt, config, parameters, kind, setSkipped, done) {
var path = require('path'), exec = require('child_process').exec;
var fixPathSeparator;
if (path.sep === '/') {
fixPathSeparator = function(x) { return x; };
} else {
fixPathSeparator = function(x) {
return x.replace(/\//g, path.sep);
};
}
var files = [], key, value, key2;
if ((kind === 'css' || kind === 'all') && config && config.less && config.less.release && config.less.release.files) {
for (key in config.less.release.files) {
if (config.less.release.files.hasOwnProperty(key)) {
if (typeof key === 'string' && key.indexOf('<%= DIR_BASE %>/') === 0) {
files.push(fixPathSeparator(key.substr('<%= DIR_BASE %>/'.length)));
}
}
}
}
if ((kind === 'js' || kind === 'all') && config && config.uglify) {
for (key in config.uglify) {
if (config.uglify.hasOwnProperty(key) && typeof key === 'string' && key.length > '_release'.length && key.substr(-'_release'.length) === '_release') {
value = config.uglify[key];
if (value.files) {
for (key2 in value.files) {
if (value.files.hasOwnProperty(key2)) {
if (typeof key2 === 'string' && key2.indexOf('<%= DIR_BASE %>/') === 0) {
files.push(fixPathSeparator(key2.substr('<%= DIR_BASE %>/'.length)));
}
}
}
}
}
}
}
if (files.length === 0) {
process.stderr.write('No files found\n');
done(false);
return;
}
console.log(path.join(__dirname, '..', '..'));
process.stdout.write(setSkipped ? ('Telling git to NOT CONSIDER built assets (' + kind + ')... ') : ('Telling git to CONSIDER built assets (' + kind + ')... '));
var cmd = 'git update-index ' + (setSkipped ? '--assume-unchanged' : '--no-assume-unchanged') + ' ' + files.join(' ');
exec(
cmd,
{
cwd: path.join(__dirname, '..', '..')
},
function(error, stdout, stderr) {
if(error) {
process.stderr.write(stderr || error);
}
else {
process.stdout.write('ok');
}
process.stdout.write('\n');
done(!error);
}
);
};
|
<gh_stars>0
'use strict';
var alphanumericREGEXP = /^[a-zA-Z0-9\s]+$/;
var multiSpaceREGEXP = /\s{2,}/;
var numericREGEXP = /^[0-9]+$/;
app.service('validationService', function() {
var self = this;
/* Required Validation */
this.required = function(x) {
var y;
if (typeof x != 'string') {
y = x.toString();
} else {
y = x;
}
/* Returns true if expression
is not null */
if (y.length > 0) {
return true;
}
return false;
};
/* Alphanumeric Validation */
this.alphanumeric = function(x) {
/* Returns true if expression
is alphanumeric */
if (alphanumericREGEXP.test(x)) {
return true;
}
return false;
};
/* Multi Space Validation */
this.multispace = function(x) {
/* Returns true if expression
has multiple spaces in between words */
if (multiSpaceREGEXP.test(x)) {
return true;
}
return false;
};
/* Numeric Validation */
this.numeric = function(x) {
/* Returns true if expression
has only numeric terms */
if (numericREGEXP.test(x)) {
return true;
}
return false;
};
});
|
package org.javers.repository.mongo;
import static org.javers.common.string.Strings.isNonEmpty;
import com.mongodb.client.MongoDatabase;
import org.javers.core.AbstractContainerBuilder;
import org.javers.repository.mongo.pico.JaversMongoModule;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class MongoRepositoryBuilder extends AbstractContainerBuilder {
private static final Logger logger = LoggerFactory.getLogger(MongoRepositoryBuilder.class);
private String snapshotCollectionName;
private Integer cacheSize;
private MongoDialect dialect;
private MongoDatabase mongoDatabase;
public MongoRepositoryBuilder() {
}
public static MongoRepositoryBuilder mongoRepository() {
return new MongoRepositoryBuilder();
}
public MongoRepositoryBuilder withSnapshotCollectionName(String snapshotCollectionName) {
if (isNonEmpty(snapshotCollectionName)) {
this.snapshotCollectionName = snapshotCollectionName;
}
return this;
}
public MongoRepositoryBuilder withCacheSize(int cacheSize) {
this.cacheSize = cacheSize;
return this;
}
public MongoRepositoryBuilder withDialect(MongoDialect dialect) {
this.dialect = dialect;
return this;
}
public MongoRepositoryBuilder withMongoDatabase(MongoDatabase mongoDatabase) {
this.mongoDatabase = mongoDatabase;
return this;
}
public MongoRepository build() {
logger.info("starting MongoRepository...");
logger.info(" snapshotCollection name: {}", snapshotCollectionName);
logger.info(" cacheSize : {}", cacheSize);
logger.info(" dialect : {}", dialect);
bootContainer();
MongoRepositoryConfiguration config = new MongoRepositoryConfiguration(snapshotCollectionName,
cacheSize, dialect);
addComponent(config);
addComponent(mongoDatabase);
addModule(new JaversMongoModule());
return getContainerComponent(MongoRepository.class);
}
/**
* For testing only
*/
@Override
protected <T> T getContainerComponent(Class<T> ofClass) {
return super.getContainerComponent(ofClass);
}
}
|
<filename>AndroidNanoDegreeProjectCapstone/Phase2/app/src/main/java/io/github/marcelbraghetto/dailydeviations/features/application/MainApp.java<gh_stars>0
package io.github.marcelbraghetto.dailydeviations.features.application;
import android.app.Activity;
import android.app.Application;
import android.support.annotation.NonNull;
import android.webkit.WebView;
import javax.inject.Inject;
import io.github.marcelbraghetto.dailydeviations.features.application.logic.MainAppLogic;
import io.github.marcelbraghetto.dailydeviations.framework.foundation.dagger.AppComponent;
import io.github.marcelbraghetto.dailydeviations.framework.foundation.dagger.AppDaggerModule;
import io.github.marcelbraghetto.dailydeviations.framework.foundation.dagger.DaggerAppComponent;
import io.github.marcelbraghetto.dailydeviations.framework.foundation.utils.BasicActivityLifecycleCallbacks;
/**
* Created by <NAME> on 24/02/16.
*/
public class MainApp extends Application {
private static MainApp sSelf;
private AppComponent mAppComponent;
@Inject MainAppLogic mLogic;
@Override
public void onCreate() {
super.onCreate();
sSelf = this;
mAppComponent = buildDaggerComponent();
getDagger().inject(this);
mLogic.begin();
registerActivityLifecycleCallbacks(new BasicActivityLifecycleCallbacks() {
@Override
public void onActivityStarted(Activity activity) {
mLogic.activityStarted();
}
@Override
public void onActivityStopped(Activity activity) {
mLogic.activityStopped();
}
});
warmupWebView();
}
/**
* From Lollipop onward, constructing a WebView is very expensive the first time
* so we are going to warm it up here before the user might try to open something
* needing a web view and be presented with a jarring main thread freeze.
*/
private void warmupWebView() {
new WebView(getApplicationContext());
}
@NonNull
protected AppComponent buildDaggerComponent() {
return DaggerAppComponent
.builder()
.appDaggerModule(new AppDaggerModule(this))
.build();
}
@NonNull
public static AppComponent getDagger() {
return sSelf.mAppComponent;
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.brooklyn.camp.rest.resource;
import io.brooklyn.camp.CampPlatform;
import io.brooklyn.camp.rest.util.CampRestContext;
import io.brooklyn.camp.rest.util.DtoFactory;
import io.brooklyn.camp.rest.util.WebResourceUtils;
import io.brooklyn.camp.spi.AbstractResource;
import io.brooklyn.camp.spi.collection.ResourceLookup;
import javax.servlet.ServletContext;
import javax.ws.rs.core.Context;
public abstract class AbstractCampRestResource {
// can be injected by jersey when not injected manually
// (seems there is no way to make this optional so note it _must_ be injected; if needed
// see notes on workarounds for test frameworks in original AbstractBrooklynRestResource)
@Context ServletContext servletContext;
private CampRestContext campRestContext;
public synchronized CampRestContext context() {
if (campRestContext!=null) return campRestContext;
campRestContext = new CampRestContext(servletContext);
return campRestContext;
}
public CampPlatform camp() { return context().camp(); }
public DtoFactory dto() { return context().dto(); }
public static <T extends AbstractResource> T lookup(ResourceLookup<T> list, String id) {
T result = list.get(id);
if (result==null)
throw WebResourceUtils.notFound("No such element: %s", id);
return result;
}
}
|
<gh_stars>10-100
@org.springframework.lang.NonNullApi
package org.moduliths.moments;
|
/*******************************************************************************
* Copyright 2015 InfinitiesSoft Solutions Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package com.infinities.skyport.proxy;
import java.util.Map;
import java.util.concurrent.Callable;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.persistence.Transient;
import javax.xml.bind.annotation.XmlTransient;
import org.dasein.cloud.CloudException;
import org.dasein.cloud.CloudProvider;
import org.dasein.cloud.InternalException;
import org.dasein.cloud.VisibleScope;
import org.dasein.cloud.compute.Architecture;
import org.dasein.cloud.compute.Platform;
import org.dasein.cloud.compute.VirtualMachine;
import org.dasein.cloud.compute.VirtualMachineLifecycle;
import org.dasein.cloud.compute.VmState;
import org.dasein.cloud.compute.VmStatus;
import org.dasein.cloud.compute.Volume;
import org.dasein.cloud.network.RawAddress;
import com.infinities.skyport.distributed.DistributedAtomicLong;
public class VirtualMachineProxy extends VirtualMachine {
@XmlTransient
private volatile VirtualMachine virtualMachine;
private String configName;
private String configId;
private String distributedKey;
private final DistributedAtomicLong isLocked;
public VirtualMachineProxy(VirtualMachine virtualMachine, String configName, String configId, String distributedKey,
DistributedAtomicLong isLocked) {
super();
this.virtualMachine = virtualMachine;
this.configName = configName;
this.configId = configId;
this.setDistributedKey(distributedKey);
this.isLocked = isLocked;
}
public VirtualMachine getVirtualMachine() {
return virtualMachine;
}
public void setVirtualMachine(VirtualMachine virtualMachine) {
this.virtualMachine = virtualMachine;
}
@Override
public String getRootPassword() {
return getVirtualMachine().getRootPassword();
}
@Override
public String getRootPassword(long timeoutInMilliseconds) throws InterruptedException {
return getVirtualMachine().getRootPassword(timeoutInMilliseconds);
}
@Override
public String fetchPassword() {
return getVirtualMachine().fetchPassword();
}
@Override
public String toString() {
return getVirtualMachine().toString();
}
@Override
public String getAffinityGroupId() {
return getVirtualMachine().getAffinityGroupId();
}
@Override
public Architecture getArchitecture() {
return getVirtualMachine().getArchitecture();
}
@Override
public boolean isClonable() {
return getVirtualMachine().isClonable();
}
@Override
public long getCreationTimestamp() {
return getVirtualMachine().getCreationTimestamp();
}
@Override
public VmState getCurrentState() {
return isLocked.compareAndSet(1, 1) ? VmState.PENDING : getVirtualMachine().getCurrentState();
}
@Override
public String getDescription() {
return getVirtualMachine().getDescription();
}
@Override
public boolean isImagable() {
return getVirtualMachine().isImagable();
}
@Override
public long getLastBootTimestamp() {
return getVirtualMachine().getLastBootTimestamp();
}
@Override
public long getLastPauseTimestamp() {
return getVirtualMachine().getLastPauseTimestamp();
}
@Override
public String getName() {
return getVirtualMachine().getName();
}
@Override
public boolean isPausable() {
return getVirtualMachine().isPausable();
}
@Override
public boolean isPersistent() {
return getVirtualMachine().isPersistent();
}
@Override
public Platform getPlatform() {
return getVirtualMachine().getPlatform();
}
@Override
public String getPrivateDnsAddress() {
return getVirtualMachine().getPrivateDnsAddress();
}
@Override
public @Nonnull RawAddress[] getPrivateAddresses() {
return getVirtualMachine().getPrivateAddresses();
}
@Override
public String getProviderAssignedIpAddressId() {
return getVirtualMachine().getProviderAssignedIpAddressId();
}
@Override
public String getProviderDataCenterId() {
return getVirtualMachine().getProviderDataCenterId();
}
@Override
public String getProviderMachineImageId() {
return getVirtualMachine().getProviderMachineImageId();
}
@Override
public String getProviderOwnerId() {
return getVirtualMachine().getProviderOwnerId();
}
@Override
public String getProviderRegionId() {
return getVirtualMachine().getProviderRegionId();
}
@Override
public String getProviderVirtualMachineId() {
return getVirtualMachine().getProviderVirtualMachineId();
}
@Override
public String getPublicDnsAddress() {
return getVirtualMachine().getPublicDnsAddress();
}
@Override
public @Nonnull RawAddress[] getPublicAddresses() {
return getVirtualMachine().getPublicAddresses();
}
/**
* Creates an informal association under a group name for the launched VM
* with other virtual machines in the system. The underlying cloud may
* interpret this in any number of ways.
*
* @return the virtual machine group association
*/
@Override
public @Nullable String getVirtualMachineGroup() {
return getVirtualMachine().getVirtualMachineGroup();
}
@Override
public boolean isRebootable() {
return getVirtualMachine().isRebootable();
}
@Override
public String getRootUser() {
return getVirtualMachine().getRootUser();
}
@Override
public String getStateReasonMessage() {
return getVirtualMachine().getStateReasonMessage();
}
@Override
public long getTerminationTimestamp() {
return getVirtualMachine().getTerminationTimestamp();
}
@Override
public Callable<String> getPasswordCallback() {
return getVirtualMachine().getPasswordCallback();
}
@Override
public String getProductId() {
return getVirtualMachine().getProductId();
}
@Override
public String[] getLabels() {
return getVirtualMachine().getLabels();
}
@Override
public Object getTag(String tag) {
return getTags().get(tag);
}
@Override
public synchronized @Nonnull Map<String, String> getTags() {
return getVirtualMachine().getTags();
}
@Override
public String getProviderSubnetId() {
return getVirtualMachine().getProviderSubnetId();
}
@Override
public String getProviderVlanId() {
return getVirtualMachine().getProviderVlanId();
}
@Override
public String getProviderKeypairId() {
return getVirtualMachine().getProviderKeypairId();
}
@Override
public String[] getProviderFirewallIds() {
return getVirtualMachine().getProviderFirewallIds();
}
@Override
public String[] getProviderNetworkInterfaceIds() {
return getVirtualMachine().getProviderNetworkInterfaceIds();
}
@Override
public @Nullable String getProviderKernelImageId() {
return getVirtualMachine().getProviderKernelImageId();
}
@Override
public @Nullable String getProviderRamdiskImageId() {
return getVirtualMachine().getProviderRamdiskImageId();
}
@Override
public @Nonnull String[] getProviderShellKeyIds() {
return getVirtualMachine().getProviderShellKeyIds();
}
@Override
public @Nonnull String[] getProviderVolumeIds(@Nonnull CloudProvider provider) throws CloudException, InternalException {
return getVirtualMachine().getProviderVolumeIds(provider);
}
@Override
public @Nullable Volume[] getVolumes() {
return getVirtualMachine().getVolumes();
}
@Override
public boolean isIoOptimized() {
return getVirtualMachine().isIoOptimized();
}
@Override
public boolean isIpForwardingAllowed() {
return getVirtualMachine().isIpForwardingAllowed();
}
@Override
public String getProviderRoleId() {
return getVirtualMachine().getProviderRoleId();
}
@Override
public VmStatus getProviderVmStatus() {
return getVirtualMachine().getProviderVmStatus();
}
@Override
public VmStatus getProviderHostStatus() {
return getVirtualMachine().getProviderHostStatus();
}
@Override
public VisibleScope getVisibleScope() {
return getVirtualMachine().getVisibleScope();
}
@Override
public VirtualMachineLifecycle getLifecycle() {
return getVirtualMachine().getLifecycle();
}
@Override
public String getSpotRequestId() {
return getVirtualMachine().getSpotRequestId();
}
@Override
public String getResourcePoolId() {
return getVirtualMachine().getResourcePoolId();
}
@Override
public String getClientRequestToken() {
return getVirtualMachine().getClientRequestToken();
}
@Override
public void setTag(String key, String value) {
getVirtualMachine().setTag(key, value);
}
public String getConfigName() {
return configName;
}
public void setConfigName(String configName) {
this.configName = configName;
}
public String getConfigId() {
return configId;
}
public void setConfigId(String configId) {
this.configId = configId;
}
@XmlTransient
@Transient
public boolean lock() {
return isLocked.compareAndSet(0, 1);
}
@XmlTransient
@Transient
public boolean unlock() {
return isLocked.compareAndSet(1, 0);
}
@XmlTransient
@Transient
public boolean isLocked() {
return isLocked.compareAndSet(1, 1);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((configId == null) ? 0 : configId.hashCode());
result = prime * result + ((virtualMachine == null) ? 0 : virtualMachine.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (!super.equals(obj))
return false;
if (getClass() != obj.getClass())
return false;
VirtualMachineProxy other = (VirtualMachineProxy) obj;
if (configId == null) {
if (other.configId != null)
return false;
} else if (!configId.equals(other.configId))
return false;
if (virtualMachine == null) {
if (other.virtualMachine != null)
return false;
} else if (!virtualMachine.equals(other.virtualMachine))
return false;
return true;
}
public String getDistributedKey() {
return distributedKey;
}
public void setDistributedKey(String distributedKey) {
this.distributedKey = distributedKey;
}
}
|
<filename>src/SonosPlayer.tsx
import {
BooleanCharacteristic,
Component,
NumberCharacteristic,
PlatformAccessory,
PlatformAccessoryConfiguration,
Service,
useContext,
useHomebridgeApi,
useLogger,
} from "@credding/homebridge-jsx";
import { Categories } from "homebridge";
import { SonosApiContext } from "./SonosApiContext";
import { PlaybackState } from "./SonosApi";
type SonosSpeakerProps = {
id: string;
name: string;
};
type SonosFavSpeakerProps = {
favId: string;
name: string;
groupId: string;
};
export const SonosFavoritePlayer = ({
favId,
name,
groupId,
}: SonosFavSpeakerProps): Component<PlatformAccessoryConfiguration> => {
const { hap } = useHomebridgeApi();
const sonosApi = useContext(SonosApiContext);
const logger = useLogger();
const getPlaying = async () => {
const ps = await sonosApi.getGroupPlaybackStatus(groupId);
logger.debug("Got playback status %s", JSON.stringify(ps, null, 2));
if (
sonosApi.isFavoriteLoaded(favId) &&
ps.playbackState == PlaybackState.Playing
) {
return true;
}
return false;
};
const setPlaying = async (value: boolean) => {
if (value) {
// desired state is "Playing"
if (sonosApi.isFavoriteLoaded(favId)) {
// if requested favorite is playing already
await sonosApi.groupPlay(groupId);
} else {
await sonosApi.playFavorite(favId, groupId);
}
} else {
// desired state is "paused"
await sonosApi.groupPause(groupId);
}
};
const getVolume = async () => {
const { volume } = await sonosApi.getGroupVolume(groupId);
return volume;
};
const setVolume = async (value: number) => {
await sonosApi.setGroupVolume(groupId, value);
};
return (
<PlatformAccessory
name={name}
uuid={hap.uuid.generate(favId + groupId)}
category={Categories.SPEAKER}
>
<Service type={hap.Service.Lightbulb}>
<BooleanCharacteristic
type={hap.Characteristic.On}
onGet={getPlaying}
onSet={setPlaying}
></BooleanCharacteristic>
<NumberCharacteristic
type={hap.Characteristic.Brightness}
onGet={getVolume}
onSet={setVolume}
></NumberCharacteristic>
</Service>
</PlatformAccessory>
);
};
export const SonosGroupPlayer = ({
id,
name,
}: SonosSpeakerProps): Component<PlatformAccessoryConfiguration> => {
const { hap } = useHomebridgeApi();
const sonosApi = useContext(SonosApiContext);
const logger = useLogger();
const isPlaying = async () => {
const ps = await sonosApi.getGroupPlaybackStatus(id);
logger.debug("Got playback status %s", JSON.stringify(ps, null, 2));
if (ps.playbackState == PlaybackState.Playing) {
return true;
}
return false;
};
const setPlaying = async (value: boolean) => {
if (value) {
// desired state is "Playing"
await sonosApi.groupPlay(id);
} else {
// desired state is "paused"
await sonosApi.groupPause(id);
}
};
const getVolume = async () => {
const { volume } = await sonosApi.getGroupVolume(id);
return volume;
};
const setVolume = async (value: number) => {
await sonosApi.setGroupVolume(id, value);
};
return (
<PlatformAccessory
name={name}
uuid={hap.uuid.generate(id)}
category={Categories.SPEAKER}
>
<Service type={hap.Service.Lightbulb}>
<BooleanCharacteristic
type={hap.Characteristic.On}
onGet={isPlaying}
onSet={setPlaying}
></BooleanCharacteristic>
<NumberCharacteristic
type={hap.Characteristic.Brightness}
onGet={getVolume}
onSet={setVolume}
></NumberCharacteristic>
</Service>
</PlatformAccessory>
);
};
export const SonosPlayer = ({
id,
name,
}: SonosSpeakerProps): Component<PlatformAccessoryConfiguration> => {
const { hap } = useHomebridgeApi();
const sonosApi = useContext(SonosApiContext);
const getMuted = async () => {
const { muted } = await sonosApi.getPlayerVolume(id);
return muted;
};
const setMuted = async (value: boolean) => {
await sonosApi.setPlayerMute(id, value);
};
const getVolume = async () => {
const { volume } = await sonosApi.getPlayerVolume(id);
return volume;
};
const setVolume = async (value: number) => {
await sonosApi.setPlayerVolume(id, value);
};
return (
<PlatformAccessory
name={name}
uuid={hap.uuid.generate(id)}
category={Categories.SPEAKER}
>
<Service type={hap.Service.Lightbulb}>
<BooleanCharacteristic
type={hap.Characteristic.On}
onGet={getMuted}
onSet={setMuted}
></BooleanCharacteristic>
<NumberCharacteristic
type={hap.Characteristic.Brightness}
onGet={getVolume}
onSet={setVolume}
></NumberCharacteristic>
</Service>
</PlatformAccessory>
);
};
|
import { ERowStatus } from '../../typings/enums'
import { IGridRowMeta } from '../../typings/interfaces'
import { IRowOperationFactory } from '../../typings/interfaces/grid-row-operation-factory.interface'
import { GridImplementationFactory } from '../../typings/interfaces/implementations/grid-implementation.factory'
import { TPrimaryKey } from '../../typings/types'
import { Operation } from '../operation.abstract'
export class SetRowMeta extends Operation {
constructor(factory: IRowOperationFactory) { super(factory.gridController) }
public run(rowKey: TPrimaryKey, input: Partial<Pick<IGridRowMeta, 'metadata' | 'rowKey' | 'status' | 'separators'>>): void {
const rowMeta: IGridRowMeta = this.rowOperations.GetRowMeta.run(rowKey) ?? GridImplementationFactory.gridRowMeta({ rowKey })
const status = input.status as ERowStatus | keyof typeof ERowStatus
if (typeof status === 'string') input.status = ERowStatus[status]
if (input.metadata) {
input.metadata.items.forEach(x => rowMeta.metadata.set(x.key, x.value))
delete input.metadata;
}
Object.assign(rowMeta, input)
if (rowMeta.isDirty) this.rowOperations.dirtyRowsMap.set(rowKey, rowMeta)
else if(this.rowOperations.dirtyRowsMap.has(rowKey)) this.rowOperations.dirtyRowsMap.delete(rowKey)
this.dataSource.rowMeta.set(rowKey, rowMeta)
this.gridEvents.MetadataChangedEvent.emit({ rowKey })
}
}
|
/**
* The core structure of a QnA item
*/
export interface QnABaseItem {
/**
* Time when the review was first created
*/
createdDate: Date;
/**
* Unique identifier of a QnA item
*/
id: number;
/**
* Get status of item
*/
status: QnAItemStatus;
/**
* Text description of the QnA item
*/
text: string;
/**
* Time when the review was edited/updated
*/
updatedDate: Date;
/**
* User details for the item.
*/
user: UserIdentityRef;
}
/**
* Identity reference with name and guid
*/
export interface UserIdentityRef {
/**
* User display name
*/
displayName: string;
/**
* User VSID
*/
id: string;
}
export enum QnAItemStatus {
None = 0,
/**
* The Deleted flag is for soft deleting an item
*/
Deleted = 1,
/**
* The UserEditable flag indicates whether the item is editable by the logged in user.
*/
UserEditable = 2,
/**
* The PublisherCreated flag indicates whether the item has been created by extension publisher.
*/
PublisherCreated = 4
}
export interface Response extends QnABaseItem {
}
/**
* The structure of the question / thread
*/
export interface Question extends QnABaseItem {
/**
* List of answers in for the question / thread
*/
responses: Response[];
}
export interface QuestionsResult {
/**
* Flag indicating if there are more QnA threads to be shown (for paging)
*/
hasMoreQuestions: boolean;
/**
* List of the QnA threads
*/
questions: Question[];
}
|
#pragma once
#include "stdafx.h"
namespace memory
{
static uintptr_t find_signature(const char* module, const char* pattern_, const char* mask) {
const auto compare = [](const uint8_t * data, const uint8_t * pattern, const char* mask_) {
for (; *mask_; ++mask_, ++data, ++pattern)
if (*mask_ == 'x' && *data != *pattern)
return false;
return (*mask_) == 0;
};
MODULEINFO module_info = {};
GetModuleInformation(GetCurrentProcess(), GetModuleHandleA(module), &module_info, sizeof MODULEINFO);
auto module_start = uintptr_t(module_info.lpBaseOfDll);
const uint8_t* pattern = reinterpret_cast<const uint8_t*>(pattern_);
for (size_t i = 0; i < module_info.SizeOfImage; i++)
if (compare(reinterpret_cast<uint8_t*>(module_start + i), pattern, mask))
return module_start + i;
return 0;
}
}
|
import tensorflow as tf
from tensorflow.keras.layers import Dense
# Create the model
model = tf.keras.Sequential([
Dense(128, kernel_initializer='glorot_normal', activation='relu'),
Dense(128, kernel_initializer='glorot_normal', activation='relu'),
Dense(128, kernel_initializer='glorot_normal', activation='relu'),
Dense(1, activation='sigmoid')
])
# Compile the model
model.compile(optimizer='adam',
loss='binary_crossentropy',
metrics=['accuracy'])
# Optimize the model using checkpoint
checkpoint_filepath = 'checkpoint.hdf5'
model_checkpoint_callback = tf.keras.callbacks.ModelCheckpoint(
filepath=checkpoint_filepath,
save_weights_only=True,
monitor='val_accuracy',
mode='max',
verbose=1
)
# Train the model
model.fit(x_train, y_train, epochs=100, callbacks=[model_checkpoint_callback])
|
package gov.cms.bfd.pipeline.rda.grpc;
import static org.junit.jupiter.api.Assertions.assertEquals;
import gov.cms.bfd.pipeline.rda.grpc.source.GrpcRdaSource;
import gov.cms.bfd.pipeline.sharedutils.IdHasher;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.nio.charset.StandardCharsets;
import java.time.Duration;
import org.junit.jupiter.api.Test;
public class RdaLoadOptionsTest {
@Test
public void configIsSerializable() throws Exception {
final RdaLoadOptions original =
new RdaLoadOptions(
AbstractRdaLoadJob.Config.builder()
.runInterval(Duration.ofDays(12))
.batchSize(9832)
.build(),
GrpcRdaSource.Config.builder()
.serverType(GrpcRdaSource.Config.ServerType.Remote)
.host("localhost")
.port(5432)
.maxIdle(Duration.ofMinutes(59))
.authenticationToken("<PASSWORD>")
.build(),
new RdaServerJob.Config(),
new IdHasher.Config(1000, "nottherealpepper".getBytes(StandardCharsets.UTF_8)));
final ByteArrayOutputStream bytes = new ByteArrayOutputStream();
try (ObjectOutputStream out = new ObjectOutputStream(bytes)) {
out.writeObject(original);
}
RdaLoadOptions loaded;
try (ObjectInputStream inp =
new ObjectInputStream(new ByteArrayInputStream(bytes.toByteArray()))) {
loaded = (RdaLoadOptions) inp.readObject();
}
assertEquals(original, loaded);
}
}
|
#!/usr/bin/env python
#############################################################################
##
## Copyright (C) 2010 Riverbank Computing Limited.
## Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
## All rights reserved.
##
## This file is part of the examples of PyQt.
##
## $QT_BEGIN_LICENSE:BSD$
## You may use this file under the terms of the BSD license as follows:
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are
## met:
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## * Neither the name of Nokia Corporation and its Subsidiary(-ies) nor
## the names of its contributors may be used to endorse or promote
## products derived from this software without specific prior written
## permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
## $QT_END_LICENSE$
##
#############################################################################
from PyQt5.QtCore import QDir, QFile, QFileInfo, QIODevice, QUrl
from PyQt5.QtWidgets import (QApplication, QDialog, QDialogButtonBox,
QHBoxLayout, QLabel, QLineEdit, QMessageBox, QProgressDialog,
QPushButton, QVBoxLayout)
from PyQt5.QtNetwork import QNetworkAccessManager, QNetworkRequest
class HttpWindow(QDialog):
def __init__(self, parent=None):
super(HttpWindow, self).__init__(parent)
self.url = QUrl()
self.qnam = QNetworkAccessManager()
self.reply = None
self.outFile = None
self.httpGetId = 0
self.httpRequestAborted = False
self.urlLineEdit = QLineEdit('https://qt-project.org')
urlLabel = QLabel("&URL:")
urlLabel.setBuddy(self.urlLineEdit)
self.statusLabel = QLabel(
"Please enter the URL of a file you want to download.")
self.statusLabel.setWordWrap(True)
self.downloadButton = QPushButton("Download")
self.downloadButton.setDefault(True)
self.quitButton = QPushButton("Quit")
self.quitButton.setAutoDefault(False)
buttonBox = QDialogButtonBox()
buttonBox.addButton(self.downloadButton, QDialogButtonBox.ActionRole)
buttonBox.addButton(self.quitButton, QDialogButtonBox.RejectRole)
self.progressDialog = QProgressDialog(self)
self.urlLineEdit.textChanged.connect(self.enableDownloadButton)
self.qnam.authenticationRequired.connect(
self.slotAuthenticationRequired)
self.qnam.sslErrors.connect(self.sslErrors)
self.progressDialog.canceled.connect(self.cancelDownload)
self.downloadButton.clicked.connect(self.downloadFile)
self.quitButton.clicked.connect(self.close)
topLayout = QHBoxLayout()
topLayout.addWidget(urlLabel)
topLayout.addWidget(self.urlLineEdit)
mainLayout = QVBoxLayout()
mainLayout.addLayout(topLayout)
mainLayout.addWidget(self.statusLabel)
mainLayout.addWidget(buttonBox)
self.setLayout(mainLayout)
self.setWindowTitle("HTTP")
self.urlLineEdit.setFocus()
def startRequest(self, url):
self.reply = self.qnam.get(QNetworkRequest(url))
self.reply.finished.connect(self.httpFinished)
self.reply.readyRead.connect(self.httpReadyRead)
self.reply.downloadProgress.connect(self.updateDataReadProgress)
def downloadFile(self):
self.url = QUrl(self.urlLineEdit.text())
fileInfo = QFileInfo(self.url.path())
fileName = fileInfo.fileName()
if not fileName:
fileName = 'index.html'
if QFile.exists(fileName):
ret = QMessageBox.question(self, "HTTP",
"There already exists a file called %s in the current "
"directory. Overwrite?" % fileName,
QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
if ret == QMessageBox.No:
return
QFile.remove(fileName)
self.outFile = QFile(fileName)
if not self.outFile.open(QIODevice.WriteOnly):
QMessageBox.information(self, "HTTP",
"Unable to save the file %s: %s." % (fileName, self.outFile.errorString()))
self.outFile = None
return
self.progressDialog.setWindowTitle("HTTP")
self.progressDialog.setLabelText("Downloading %s." % fileName)
self.downloadButton.setEnabled(False)
self.httpRequestAborted = False
self.startRequest(self.url)
def cancelDownload(self):
self.statusLabel.setText("Download canceled.")
self.httpRequestAborted = True
self.reply.abort()
self.downloadButton.setEnabled(True)
def httpFinished(self):
if self.httpRequestAborted:
if self.outFile is not None:
self.outFile.close()
self.outFile.remove()
self.outFile = None
self.reply.deleteLater()
self.reply = None
self.progressDialog.hide()
return
self.progressDialog.hide()
self.outFile.flush()
self.outFile.close()
redirectionTarget = self.reply.attribute(QNetworkRequest.RedirectionTargetAttribute)
if self.reply.error():
self.outFile.remove()
QMessageBox.information(self, "HTTP",
"Download failed: %s." % self.reply.errorString())
self.downloadButton.setEnabled(True)
elif redirectionTarget is not None:
newUrl = self.url.resolved(redirectionTarget.toUrl())
ret = QMessageBox.question(self, "HTTP",
"Redirect to %s?" % newUrl.toString(),
QMessageBox.Yes | QMessageBox.No)
if ret == QMessageBox.Yes:
self.url = newUrl
self.reply.deleteLater()
self.reply = None
self.outFile.open(QIODevice.WriteOnly)
self.outFile.resize(0)
self.startRequest(self.url)
return
else:
fileName = QFileInfo(QUrl(self.urlLineEdit.text()).path()).fileName()
self.statusLabel.setText("Downloaded %s to %s." % (fileName, QDir.currentPath()))
self.downloadButton.setEnabled(True)
self.reply.deleteLater()
self.reply = None
self.outFile = None
def httpReadyRead(self):
if self.outFile is not None:
self.outFile.write(self.reply.readAll())
def updateDataReadProgress(self, bytesRead, totalBytes):
if self.httpRequestAborted:
return
self.progressDialog.setMaximum(totalBytes)
self.progressDialog.setValue(bytesRead)
def enableDownloadButton(self):
self.downloadButton.setEnabled(self.urlLineEdit.text() != '')
def slotAuthenticationRequired(self, authenticator):
import os
from PyQt5 import uic
ui = os.path.join(os.path.dirname(__file__), 'authenticationdialog.ui')
dlg = uic.loadUi(ui)
dlg.adjustSize()
dlg.siteDescription.setText("%s at %s" % (authenticator.realm(), self.url.host()))
dlg.userEdit.setText(self.url.userName())
dlg.passwordEdit.setText(self.url.password())
if dlg.exec_() == QDialog.Accepted:
authenticator.setUser(dlg.userEdit.text())
authenticator.setPassword(dlg.passwordEdit.text())
def sslErrors(self, reply, errors):
errorString = ", ".join([str(error.errorString()) for error in errors])
ret = QMessageBox.warning(self, "HTTP Example",
"One or more SSL errors has occurred: %s" % errorString,
QMessageBox.Ignore | QMessageBox.Abort)
if ret == QMessageBox.Ignore:
self.reply.ignoreSslErrors()
if __name__ == '__main__':
import sys
app = QApplication(sys.argv)
httpWin = HttpWindow()
httpWin.show()
sys.exit(httpWin.exec_())
|
#!/usr/bin/env bash
# Set the root directory
chainlink_dir=/root/.chainlink
mkdir $chainlink_dir
# AWS CLI Configuration
mkdir /root/.aws/
printf "[profile host]\nrole_arn = ${host_role}\nsource_profile = default\n\n[default]\nregion=${region}\noutput=json" >> /root/.aws/config
# Install Docker
yum update -y
yum install -y docker
service docker start
usermod -a -G docker ec2-user
# Create the config file from env vars
touch $chainlink_dir/config.env
cat << 'EOF' > $chainlink_dir/config.env
${env_vars}
EOF
# Generate random passwords
keystore_pw=$(aws secretsmanager get-random-password \
--password-length 20 \
--require-each-included-type \
--query 'RandomPassword' \
--exclude-punctuation \
--output text)
api_pw=$(aws secretsmanager get-random-password \
--password-length 20 \
--require-each-included-type \
--query 'RandomPassword' \
--exclude-punctuation \
--output text)
echo "$keystore_pw" >> $chainlink_dir/keystore_pw
echo "${login_email}" >> $chainlink_dir/api_pw
echo "$api_pw" >> $chainlink_dir/api_pw
# Run Chainlink
docker run -d \
--restart=always \
--name=chainlink \
--log-driver=awslogs \
--log-opt awslogs-region=${region} \
--log-opt awslogs-group=${log_group} \
--net=host \
--env-file=$chainlink_dir/config.env \
-v $chainlink_dir:$chainlink_dir \
smartcontract/chainlink:${image_tag} \
node \
-a $chainlink_dir/api_pw \
-p $chainlink_dir/keystore_pw \
|| EXIT_CODE=$?
|
<filename>generators/angular/index.js
'use strict';
var fs = require('fs');
var path = require('path');
var util = require('util');
var wiredep = require('wiredep');
var yeoman = require('yeoman-generator');
var chalk = require('chalk');
var yosay = require('yosay');
var lodash = require('underscore.string');
var html = require("html-wiring");
module.exports = yeoman.Base.extend({
constructor: function (args, options) {
yeoman.Base.apply(this, arguments);
this.scriptAppName = this.options.scriptAppName;
this.appPath = this.options.appPath;
this.appname = this.options.appname;
this.pkg = require('../../package.json');
this.sourceRoot(path.join(__dirname, '../templates/common'));
},
initializing: function () {
if (!this.options['skip-welcome-message']) {
this.log(chalk.yellow('Welcome to the angularjs part of the generator. Just answer some questions\n'));
}
},
copyFiles: function () {
// ###############################################
var join = path.join;
this.destinationRoot(join(this.destinationRoot(), this.appname + '-frontend'));
this.sourceRoot(join(__dirname, './templates/common/root'));
this.copy('.editorconfig');
this.copy('.gitattributes');
if (!this.env.options.coffee) {
this.copy('.jscsrc');
}
this.copy('.jshintrc');
this.copy('.yo-rc.json');
this.copy('gitignore', '.gitignore');
this.directory('test');
this.sourceRoot(join(__dirname, './templates/common'));
var appPath = this.options.appPath;
var copy = function (dest) {
this.copy(join('app', dest), join(appPath, dest));
}.bind(this);
copy('404.html');
copy('favicon.ico');
copy('robots.txt');
copy('views/main.html');
this.directory(join('app', 'images'), join(appPath, 'images'));
// ###############################################
},
askForVersion: function () {
var cb = this.async();
this.prompt([{
type: 'string',
name: 'ngVer',
message: 'Which version of angular do you want to use?',
default: '1.5.9'
}]).then((props) => {
this.ngVer = props.ngVer;
cb();
});
},
askForGulp: function () {
var cb = this.async();
this.prompt([{
type: 'confirm',
name: 'gulp',
message: 'Would you like to use Gulp (experimental) instead of Grunt?',
default: false
}]).then((props) => {
this.gulp = props.gulp;
cb();
});
},
askForStyles: function () {
var gulp = this.gulp;
var cb = this.async();
this.prompt([{
type: 'confirm',
name: 'sass',
message: 'Would you like to use Sass?',
default: true,
when: function () {
return gulp;
}
}, {
type: 'confirm',
name: 'compass',
message: 'Would you like to use Sass (with Compass)?',
default: true,
when: function () {
return !gulp;
}
}]).then((props) => {
this.sass = props.sass;
this.compass = props.compass;
cb();
});
},
askForBootstrap: function () {
var compass = this.compass;
var gulp = this.gulp;
var cb = this.async();
this.prompt([{
type: 'confirm',
name: 'bootstrap',
message: 'Would you like to include Bootstrap?',
default: true
}, {
type: 'confirm',
name: 'compassBootstrap',
message: 'Would you like to use the Sass version of Bootstrap?',
default: true,
when: function (props) {
return !gulp && (props.bootstrap && compass);
}
}]).then((props) => {
this.bootstrap = props.bootstrap;
this.compassBootstrap = props.compassBootstrap;
cb();
});
},
askForModules: function () {
var cb = this.async();
var prompts = [{
type: 'checkbox',
name: 'modules',
message: 'Which modules would you like to include?',
choices: [
{
value: 'animateModule',
name: 'angular-animate.js',
checked: true
}, {
value: 'ariaModule',
name: 'angular-aria.js',
checked: false
}, {
value: 'cookiesModule',
name: 'angular-cookies.js',
checked: true
}, {
value: 'resourceModule',
name: 'angular-resource.js',
checked: true
}, {
value: 'messagesModule',
name: 'angular-messages.js',
checked: false
}, {
value: 'routeModule',
name: 'angular-route.js',
checked: true
}, {
value: 'sanitizeModule',
name: 'angular-sanitize.js',
checked: true
}, {
value: 'touchModule',
name: 'angular-touch.js',
checked: true
}
]
}];
this.prompt(prompts).then((props) => {
var hasMod = function (mod) {
return props.modules.indexOf(mod) !== -1;
};
this.animateModule = hasMod('animateModule');
this.ariaModule = hasMod('ariaModule');
this.cookiesModule = hasMod('cookiesModule');
this.messagesModule = hasMod('messagesModule');
this.resourceModule = hasMod('resourceModule');
this.routeModule = hasMod('routeModule');
this.sanitizeModule = hasMod('sanitizeModule');
this.touchModule = hasMod('touchModule');
var angMods = [];
if (this.animateModule) {
angMods.push("'ngAnimate'");
}
if (this.ariaModule) {
angMods.push("'ngAria'");
}
if (this.cookiesModule) {
angMods.push("'ngCookies'");
}
if (this.messagesModule) {
angMods.push("'ngMessages'");
}
if (this.resourceModule) {
angMods.push("'ngResource'");
}
if (this.routeModule) {
angMods.push("'ngRoute'");
this.env.options.ngRoute = true;
}
if (this.sanitizeModule) {
angMods.push("'ngSanitize'");
}
if (this.touchModule) {
angMods.push("'ngTouch'");
}
if (angMods.length) {
this.env.options.angularDeps = '\n ' + angMods.join(',\n ') + '\n ';
}
cb();
});
},
readIndex: function () {
this.engine = require('ejs').render;
this.ngRoute = this.env.options.ngRoute;
this.indexFile = this.engine(this.read('app/index.html'), this);
},
bootstrapFiles: function () {
var sass = this.compass || this.sass;
var cssFile = 'styles/main.' + (sass ? 's' : '') + 'css';
this.copy(
path.join('app', cssFile),
path.join(this.appPath, cssFile)
);
},
appJs: function () {
this.indexFile = html.appendFiles({
html: this.indexFile,
fileType: 'js',
optimizedPath: 'scripts/scripts.js',
sourceFileList: ['scripts/app.js', 'scripts/controllers/main.js'],
searchPath: ['.tmp', this.appPath]
});
},
createIndexHtml: function () {
this.indexFile = this.indexFile.replace(/'/g, "'");
this.write(path.join(this.appPath, 'index.html'), this.indexFile);
},
packageFiles: function () {
var join = path.join;
this.lodash = lodash;
this.coffee = this.env.options.coffee;
this.typescript = this.env.options.typescript;
this.template('root/_bower.json', 'bower.json');
this.template('root/_bowerrc', '.bowerrc');
this.template('root/_package.json', 'package.json');
if (this.gulp) {
this.template('root/_gulpfile.js', 'gulpfile.js');
} else {
this.template('root/_Gruntfile.js', 'Gruntfile.js');
}
if (this.typescript) {
this.template('root/_tsd.json', 'tsd.json');
}
this.template('root/README.md', 'README.md');
this.sourceRoot(join(__dirname, './templates/javascript/'));
},
_injectDependencies: function () {
var taskRunner = this.gulp ? 'gulp' : 'grunt';
if (this.options['skip-install']) {
this.log(
'After running `npm install & bower install`, inject your front end dependencies' +
'\ninto your source code by running:' +
'\n' +
'\n' + chalk.yellow.bold(taskRunner + ' wiredep')
);
} else {
this.spawnCommand(taskRunner, ['wiredep']);
}
},
install: function () {
var cwd = process.cwd();
if (!this.options['skip-install']) {
var fedir = process.cwd() + '/' + this.appname + '-frontend';
process.chdir(fedir);
this.installDependencies({
callback: () => { process.chdir(cwd)}
});
}
}
});
//var Generator = module.exports = function Generator(args, options) {
//
// this.hookFor('angular:common', {
// args: args
// });
//
// this.hookFor('angular:main', {
// args: args
// });
//
// this.hookFor('angular:controller', {
// args: args
// });
//
// this.on('end', function () {
// var jsExt = this.options.coffee ? 'coffee' : 'js';
//
// var bowerComments = [
// 'bower:js',
// 'endbower'
// ];
// if (this.options.coffee) {
// bowerComments.push('bower:coffee');
// bowerComments.push('endbower');
// }
//
// this.invoke('karma:app', {
// options: {
// 'skip-install': this.options['skip-install'],
// 'base-path': '../',
// 'coffee': this.options.coffee,
// 'travis': true,
// 'files-comments': bowerComments.join(','),
// 'app-files': 'app/scripts/**/*.' + jsExt,
// 'test-files': [
// 'test/mock/**/*.' + jsExt,
// 'test/spec/**/*.' + jsExt
// ].join(','),
// 'bower-components-path': 'bower_components'
// }
// });
//
// this.installDependencies({
// skipInstall: this.options['skip-install'],
// skipMessage: this.options['skip-message'],
// callback: this._injectDependencies.bind(this)
// });
//
// if (this.env.options.ngRoute) {
// this.invoke('angular:route', {
// args: ['about']
// });
// }
// });
//
// this.pkg = require('../../package.json');
// this.sourceRoot(path.join(__dirname, '../templates/common'));
//};
|
#!/bin/bash
# The intent of this script is upload produced performance results to BenchView in a CI context.
# There is no support for running this script in a dev environment.
if [ -z "$perfWorkingDirectory" ]; then
echo EnvVar perfWorkingDirectory should be set; exiting...
exit 1
fi
if [ -z "$configuration" ]; then
echo EnvVar configuration should be set; exiting...
exit 1
fi
if [ -z "$architecture" ]; then
echo EnvVar architecture should be set; exiting...
exit 1
fi
if [ -z "$OS" ]; then
echo EnvVar OS should be set; exiting...
exit 1
fi
if [ "$runType" = "private" ]; then
if [ -z "$BenchviewCommitName" ]; then
echo EnvVar BenchviewCommitName should be set; exiting...
exit 1
fi
else
if [ "$runType" = "rolling" ]; then
if [ -z "$GIT_COMMIT" ]; then
echo EnvVar GIT_COMMIT should be set; exiting...
exit 1
fi
else
echo EnvVar runType should be set; exiting...
exit 1
fi
fi
if [ -z "$GIT_BRANCH" ]; then
echo EnvVar GIT_BRANCH should be set; exiting...
exit 1
fi
if [ ! -d "$perfWorkingDirectory" ]; then
echo "$perfWorkingDirectory" does not exist; exiting...
exit 1
fi
rm -r -f "$perfWorkingDirectory/Microsoft.BenchView.JSONFormat" > /dev/null 2>&1
if [ ! -d "$perfWorkingDirectory/Microsoft.BenchView.JSONFormat" ]; then
# curl "http://benchviewtestfeed.azurewebsites.net/nuget/FindPackagesById()?id='Microsoft.BenchView.JSONFormat'" | grep "content type" | sed "$ s/.*src=\"\([^\"]*\)\".*/\1/;tx;d;:x" | xargs curl -o $perfWorkingDirectory/benchview.zip http://benchviewtestfeed.azurewebsites.net/api/v2/package/microsoft.benchview.jsonformat/0.1.0-pre001
curl -o "$perfWorkingDirectory/benchview.zip" http://benchviewtestfeed.azurewebsites.net/api/v2/package/microsoft.benchview.jsonformat/0.1.0-pre024
unzip -q -o "$perfWorkingDirectory/benchview.zip" -d "$perfWorkingDirectory/Microsoft.BenchView.JSONFormat"
fi
# nuget install Microsoft.BenchView.JSONFormat -Source http://benchviewtestfeed.azurewebsites.net/nuget -OutputDirectory "$perfWorkingDirectory" -Prerelease -ExcludeVersion || { echo Failed to install Microsoft.BenchView.JSONFormat NuPkg && exit 1 ; }
# Do this here to remove the origin but at the front of the branch name as this is a problem for BenchView
if [[ "$GIT_BRANCH" == "origin/"* ]]
then
GIT_BRANCH_WITHOUT_ORIGIN=${GIT_BRANCH:7}
else
GIT_BRANCH_WITHOUT_ORIGIN=$GIT_BRANCH
fi
timeStamp=`date --utc '+%Y-%m-%dT%H:%M:%SZ'`
benchViewName="SDK perf $OS $architecture $configuration $runType $GIT_BRANCH_WITHOUT_ORIGIN"
if [[ "$runType" == "private" ]]
then
benchViewName="$benchViewName $BenchviewCommitName"
fi
if [[ "$runType" == "rolling" ]]
then
benchViewName="$benchViewName $GIT_COMMIT"
fi
echo BenchViewName: "$benchViewName"
echo Creating: "$perfWorkingDirectory/submission-metadata.json"
python3.5 "$perfWorkingDirectory/Microsoft.BenchView.JSONFormat/tools/submission-metadata.py" --name "$benchViewName" --user-email "dotnet-bot@microsoft.com" \
-o "$perfWorkingDirectory/submission-metadata.json" || { echo Failed to create: "$perfWorkingDirectory/submission-metadata.json" && exit 1 ; }
echo Creating: "$perfWorkingDirectory/build.json"
python3.5 "$perfWorkingDirectory/Microsoft.BenchView.JSONFormat/tools/build.py" git --branch "$GIT_BRANCH_WITHOUT_ORIGIN" --type "$runType" --source-timestamp "$timeStamp" \
--repository "https://github.com/dotnet/sdk" -o "$perfWorkingDirectory/build.json" || { echo Failed to create: "$perfWorkingDirectory/build.json" && exit 1 ; }
echo Creating: "$perfWorkingDirectory/machinedata.json"
python3.5 "$perfWorkingDirectory/Microsoft.BenchView.JSONFormat/tools/machinedata.py" \
-o "$perfWorkingDirectory/machinedata.json" || { echo Failed to create: "$perfWorkingDirectory/machinedata.json" && exit 1 ; }
echo Creating: "$perfWorkingDirectory/measurement.json"
find "$perfWorkingDirectory" -maxdepth 1 -type f -name "*.xml" -exec echo Processing: "{}" \;
find "$perfWorkingDirectory" -maxdepth 1 -type f -name "*.xml" -exec python3.5 "$perfWorkingDirectory/Microsoft.BenchView.JSONFormat/tools/measurement.py" xunitscenario "{}" --better desc --drop-first-value --append \
-o "$perfWorkingDirectory/measurement.json" \; || { echo Failed to create: "$perfWorkingDirectory/measurement.json" && exit 1 ; }
echo Creating: "$perfWorkingDirectory/submission.json"
python3.5 "$perfWorkingDirectory/Microsoft.BenchView.JSONFormat/tools/submission.py" "$perfWorkingDirectory/measurement.json" \
--build "$perfWorkingDirectory/build.json" \
--machine-data "$perfWorkingDirectory/machinedata.json" \
--metadata "$perfWorkingDirectory/submission-metadata.json" \
--group "SDK Perf Tests" \
--type "$runType" \
--config-name "$configuration" \
--config Configuration "$configuration" \
--config OS "$OS" \
--architecture "$architecture" \
--machinepool "perfsnake" \
-o "$perfWorkingDirectory/submission.json" || { echo Failed to create: "$perfWorkingDirectory/submission.json" && exit 1 ; }
echo Uploading: "$perfWorkingDirectory/submission.json"
python3.5 "$perfWorkingDirectory/Microsoft.BenchView.JSONFormat/tools/upload.py" "$perfWorkingDirectory/submission.json" --container coreclr || { echo Failed to upload: "$perfWorkingDirectory/submission.json" && exit 1 ; }
exit 0
|
<reponame>premss79/zignaly-webapp<filename>src/components/Forms/ProviderSettingsForm/ToggleTextarea/index.js
export { default } from "./ToggleTextarea.js";
|
<?php
function addContact($name, $email) {
$servername = "localhost";
$username = "DBUSERNAME";
$password = "DBPASSWORD";
$dbname = "DATABASENAME";
// Create connection
$conn = new mysqli($servername, $username, $password, $dbname);
// Check connection
if ($conn->connect_error) {
die("Connection failed: " . $conn->connect_error);
}
$sql = "INSERT INTO contacts (name, email) VALUES ('$name', '$email')";
if ($conn->query($sql) === TRUE) {
echo "New contact added successfully";
} else {
echo "Error: " . $sql . "<br>" . $conn->error;
}
$conn->close();
}
?>
|
import testing
from testing import divert_nexus,restore_nexus
from testing import failed,FailedTest
from testing import value_eq,object_eq,text_eq
def clear_all_sims():
from gamess import Gamess
Gamess.ericfmt = None
testing.clear_all_sims()
#end def clear_all_sims
def get_gamess_sim(type='rhf'):
from machines import job
from gamess import Gamess,generate_gamess,GamessInput
from test_gamess_input import get_files
sim = None
Gamess.ericfmt = ''
files = get_files()
if type=='rhf':
gi_input = GamessInput(files['rhf.inp'])
sim = generate_gamess(
identifier = 'rhf',
path = 'rhf',
job = job(machine='ws1',cores=1),
input = gi_input,
)
else:
failed()
#end if
assert(sim is not None)
assert(isinstance(sim,Gamess))
return sim
#end def get_gamess_sim
def test_import():
from gamess import Gamess,generate_gamess
#end def test_import
def test_minimal_init():
from machines import job
from gamess import Gamess,generate_gamess
Gamess.ericfmt = ''
sim = generate_gamess(
job = job(machine='ws1',cores=1),
)
assert(isinstance(sim,Gamess))
clear_all_sims()
#end def test_minimal_init
def test_check_result():
tpath = testing.setup_unit_test_output_directory('gamess_simulation','test_check_result')
sim = get_gamess_sim('rhf')
assert(not sim.check_result('unknown',None))
assert(sim.check_result('orbitals',None))
clear_all_sims()
#end def test_check_result
def test_get_result():
import os
from generic import NexusError,obj
from nexus_base import nexus_core
tpath = testing.setup_unit_test_output_directory('gamess_simulation','test_get_result',divert=True)
nexus_core.runs = ''
sim = get_gamess_sim('rhf')
assert(sim.locdir.rstrip('/')==os.path.join(tpath,'rhf').rstrip('/'))
sim.create_directories()
sim.write_inputs()
if not os.path.exists(sim.imresdir):
os.makedirs(sim.imresdir)
#end if
analyzer = sim.analyzer_type(sim)
analyzer.save(os.path.join(sim.imresdir,sim.analyzer_image))
try:
sim.get_result('unknown',None)
raise FailedTest
except NexusError:
None
except FailedTest:
failed()
except Exception as e:
failed(str(e))
#end try
result = sim.get_result('orbitals',None)
result_ref = obj(
location = 'rhf/rhf.out',
mos = 0,
norbitals = 0,
outfile = 'rhf/rhf.out',
scftyp = 'rohf',
vec = None,
)
result.location = result.location.replace(tpath,'').lstrip('/')
result.outfile = result.outfile.replace(tpath,'').lstrip('/')
assert(object_eq(result,result_ref))
clear_all_sims()
restore_nexus()
#end def test_get_result
def test_incorporate_result():
import os
from generic import NexusError,obj
tpath = testing.setup_unit_test_output_directory('gamess_simulation','test_incorporate_result')
sim = get_gamess_sim('rhf')
try:
sim.incorporate_result('unknown',None,None)
raise FailedTest
except NexusError:
None
except FailedTest:
failed()
except Exception as e:
failed(str(e))
#end try
result = obj(
vec = 'vec text',
norbitals = 10,
)
input = sim.input
assert('vec' not in input)
assert('norb' not in input.guess)
assert('prtmo' not in input.guess)
sim.incorporate_result('orbitals',result,None)
assert(input.vec.text=='vec text')
assert(input.guess.guess=='moread')
assert(input.guess.norb==10)
assert(input.guess.prtmo==True)
clear_all_sims()
#end def test_incorporate_result
def test_check_sim_status():
import os
from generic import NexusError,obj
from nexus_base import nexus_core
tpath = testing.setup_unit_test_output_directory('gamess_simulation','test_check_sim_status',divert=True)
nexus_core.runs = ''
sim = get_gamess_sim('rhf')
assert(sim.locdir.rstrip('/')==os.path.join(tpath,'rhf').rstrip('/'))
assert(not sim.finished)
assert(not sim.failed)
try:
sim.check_sim_status()
raise FailedTest
except IOError:
None
except Exception as e:
failed(str(e))
#end try
sim.create_directories()
outfile = os.path.join(sim.locdir,sim.outfile)
outfile_text = 'EXECUTION OF GAMESS TERMINATED NORMALLY'
out = open(outfile,'w')
out.write(outfile_text)
out.close()
assert(outfile_text in open(outfile,'r').read())
sim.check_sim_status()
assert(sim.finished)
assert(not sim.failed)
clear_all_sims()
restore_nexus()
#end def test_check_sim_status
|
nano _ex3.txt
chmod ugo-x _ex3.txt
ls -l _ex3.txt >> ex3.txt
chmod g-rwx _ex3.txt
chmod uo=rwx _ex3.txt
ls -l _ex3.txt >> ex3.txt
chmod g=u _ex3.txt
ls -l _ex3.txt >> ex3.txt
|
# simplecov
require 'simplecov'
SimpleCov.start
|
#!/bin/bash -f
# Vivado (TM) v2016.4 (64-bit)
#
# Filename : c_addsub_0.sh
# Simulator : Aldec Riviera-PRO Simulator
# Description : Simulation script for compiling, elaborating and verifying the project source files.
# The script will automatically create the design libraries sub-directories in the run
# directory, add the library logical mappings in the simulator setup file, create default
# 'do/prj' file, execute compilation, elaboration and simulation steps.
#
# Generated by Vivado on Fri Apr 14 17:40:09 -0400 2017
# IP Build 1755317 on Mon Jan 23 20:30:07 MST 2017
#
# usage: c_addsub_0.sh [-help]
# usage: c_addsub_0.sh [-lib_map_path]
# usage: c_addsub_0.sh [-noclean_files]
# usage: c_addsub_0.sh [-reset_run]
#
# Prerequisite:- To compile and run simulation, you must compile the Xilinx simulation libraries using the
# 'compile_simlib' TCL command. For more information about this command, run 'compile_simlib -help' in the
# Vivado Tcl Shell. Once the libraries have been compiled successfully, specify the -lib_map_path switch
# that points to these libraries and rerun export_simulation. For more information about this switch please
# type 'export_simulation -help' in the Tcl shell.
#
# You can also point to the simulation libraries by either replacing the <SPECIFY_COMPILED_LIB_PATH> in this
# script with the compiled library directory path or specify this path with the '-lib_map_path' switch when
# executing this script. Please type 'c_addsub_0.sh -help' for more information.
#
# Additional references - 'Xilinx Vivado Design Suite User Guide:Logic simulation (UG900)'
#
# ********************************************************************************************************
# Script info
echo -e "c_addsub_0.sh - Script generated by export_simulation (Vivado v2016.4 (64-bit)-id)\n"
# Main steps
run()
{
check_args $# $1
setup $1 $2
compile
simulate
}
# RUN_STEP: <compile>
compile()
{
# Compile design files
source compile.do 2>&1 | tee -a compile.log
}
# RUN_STEP: <simulate>
simulate()
{
runvsimsa -l simulate.log -do "do {simulate.do}"
}
# STEP: setup
setup()
{
case $1 in
"-lib_map_path" )
if [[ ($2 == "") ]]; then
echo -e "ERROR: Simulation library directory path not specified (type \"./c_addsub_0.sh -help\" for more information)\n"
exit 1
fi
copy_setup_file $2
;;
"-reset_run" )
reset_run
echo -e "INFO: Simulation run files deleted.\n"
exit 0
;;
"-noclean_files" )
# do not remove previous data
;;
* )
copy_setup_file $2
esac
# Add any setup/initialization commands here:-
# <user specific commands>
}
# Copy library.cfg file
copy_setup_file()
{
file="library.cfg"
if [[ ($1 != "") ]]; then
lib_map_path="$1"
else
lib_map_path="C:/Users/Darius/FC/FC.cache/compile_simlib/riviera"
fi
if [[ ($lib_map_path != "") ]]; then
src_file="$lib_map_path/$file"
cp $src_file .
fi
}
# Delete generated data from the previous run
reset_run()
{
files_to_remove=(compile.log elaboration.log simulate.log dataset.asdb work riviera)
for (( i=0; i<${#files_to_remove[*]}; i++ )); do
file="${files_to_remove[i]}"
if [[ -e $file ]]; then
rm -rf $file
fi
done
}
# Check command line arguments
check_args()
{
if [[ ($1 == 1 ) && ($2 != "-lib_map_path" && $2 != "-noclean_files" && $2 != "-reset_run" && $2 != "-help" && $2 != "-h") ]]; then
echo -e "ERROR: Unknown option specified '$2' (type \"./c_addsub_0.sh -help\" for more information)\n"
exit 1
fi
if [[ ($2 == "-help" || $2 == "-h") ]]; then
usage
fi
}
# Script usage
usage()
{
msg="Usage: c_addsub_0.sh [-help]\n\
Usage: c_addsub_0.sh [-lib_map_path]\n\
Usage: c_addsub_0.sh [-reset_run]\n\
Usage: c_addsub_0.sh [-noclean_files]\n\n\
[-help] -- Print help information for this script\n\n\
[-lib_map_path <path>] -- Compiled simulation library directory path. The simulation library is compiled\n\
using the compile_simlib tcl command. Please see 'compile_simlib -help' for more information.\n\n\
[-reset_run] -- Recreate simulator setup files and library mappings for a clean run. The generated files\n\
from the previous run will be removed. If you don't want to remove the simulator generated files, use the\n\
-noclean_files switch.\n\n\
[-noclean_files] -- Reset previous run, but do not remove simulator generated files from the previous run.\n\n"
echo -e $msg
exit 1
}
# Launch script
run $1 $2
|
set -eo nounset
cd /sources
test -f libgudev-231.tar.xz || \
wget --no-check-certificate \
http://ftp.gnome.org/pub/gnome/sources/libgudev/231/libgudev-231.tar.xz
rm -rf libgudev-231
tar xf libgudev-231.tar.xz
pushd libgudev-231
./configure --prefix=/usr --disable-umockdev &&
make
make install
popd
rm -rf libgudev-231
|
import * as zjson from "../../zjson"
import {ZngClass} from "../ts_types"
export class Primitive implements ZngClass<null | string> {
constructor(readonly type: zjson.Primitive, readonly value: string | null) {}
isSet() {
return this.value !== null
}
toString() {
return this.value || ""
}
getType() {
return this.type
}
getValue() {
return this.value
}
toDate() {
if (this.value === null || this.type != "time") {
throw new Error(`Cannot make type: ${this.type} into a Date`)
}
return new Date(+this.value * 1000)
}
toFloat() {
if (this.type != "interval") {
throw new Error(`Cannot make type: ${this.type} into a Float`)
}
if (this.value === null) {
return 0
}
return parseFloat(this.value)
}
toInt() {
if (this.value === null) throw new Error("value is unset")
const int = parseInt(this.value)
if (isNaN(int)) {
throw new Error(`Cannot make type: ${this.type} into an Integer`)
}
return int
}
serialize() {
return {
type: this.type as zjson.Type,
value: this.value
}
}
}
|
import os
def count_images(count_images_in_folder):
number_of_images = []
path, dirs, files = next(os.walk(count_images_in_folder))
num_classes = len(dirs)
for i in files:
if i.endswith('.jpg'):
number_of_images.append(1)
for i in dirs:
subfolder_path = os.path.join(count_images_in_folder, i)
_, _, subfolder_files = next(os.walk(subfolder_path))
for j in subfolder_files:
if j.endswith('.jpg'):
number_of_images.append(1)
total_images = sum(number_of_images)
return total_images, num_classes
|
npm install serverless-dotenv-plugin --save
|
import React from "react"
import { PortraitWrapper } from "./portrait.styles"
import Image from "../image/image"
const Portrait = () => {
return (
<PortraitWrapper>
<Image />
</PortraitWrapper>
)
}
export default Portrait
|
#! /usr/bin/env bash
# Ugly query-replace to add the Entrypoint css and navigation to
# Haddock-generated HTML.
sed -i \
-e 's@<link href="ocean.css" rel="stylesheet" type="text/css" title="Ocean" />@\n<link href="/static/css/ibm-plex.css" rel="stylesheet" />\n<link rel="stylesheet" type="text/css" href="/static/css/tachyons.min.v4.11.1.css" />\n<link rel="stylesheet" type="text/css" href="/static/css/style.css" />\n<link href="/static/css/haddock.css" rel="stylesheet" type="text/css" title="Ocean" />\n@' \
-e 's@</head><body>@</head>\n<body class="ibm-plex-sans">\n<div class="ph3 mt2 mb4 f4 center main-mw">\n<ul class="list flex ma0 pa0 mt4 ep-nav">\n<li class="mr4"><a href="/design-system/">Home</a>\n<li class="mr4"><a href="/design-system/components/">Components</a>\n<li class="mr4"><a href="/design-system/storybook/">Storybook</a>\n<li class="mr4"><a href="/design-system/nix.html">Documentation</a>\n<li class="mr4"><a href="/design-system/haddock/">Haddock</a>\n</ul>\n</div>\n@'\
-e 's@<div id="package-header">@<div id="package-header" class="ph3 mt2 mb4 f4 center main-mw">@' \
-e 's@<div id="content">@<div id="content" class="ph3 mt2 mb4 f4 center main-mw">@' \
$1
|
#!/bin/sh
export PATH=$PWD/toolchain/gcc-linaro-5.1-2015.08-x86_64_arm-linux-gnueabi/bin:$PATH
arm-linux-gnueabi-gcc -Wall -shared -g -fPIC -o inject.o inject.c && arm-linux-gnueabi-strip inject.o
|
<filename>lc1110_delete_nodes_and_return_forest.py
"""Leetcode 1110. Delete Nodes And Return Forest
Medium
URL: https://leetcode.com/problems/delete-nodes-and-return-forest/
Given the root of a binary tree, each node in the tree has a distinct value.
After deleting all nodes with a value in to_delete, we are left with a forest
(a disjoint union of trees).
Return the roots of the trees in the remaining forest. You may return the
result in any order.
Example 1:
1 1
/ \ /
2 3 => 2
/ \ / \ /
4 5 6 7 4 6 7
Input: root = [1,2,3,4,5,6,7], to_delete = [3,5]
Output: [[1,2,null,4],[6],[7]]
Constraints:
- The number of nodes in the given tree is at most 1000.
- Each node has a distinct value between 1 and 1000.
- to_delete.length <= 1000
- to_delete contains distinct values between 1 and 1000.
"""
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, val):
self.val = val
self.left = None
self.right = None
class SolutionHasParentPreorderRecur(object):
def _preorderRecur(self, root, has_parent, result, to_delete_set):
# Base case.
if not root:
return None
if root.val in to_delete_set:
# If root is deleted, its left/right has no parent. Continue traversal.
root.left = self._preorderRecur(root.left, False, result, to_delete_set)
root.right = self._preorderRecur(root.right, False, result, to_delete_set)
# Return None to cut the link between deleted node and its parent.
return None
else:
# If is not deleted and is root, collect it in result.
if not has_parent:
result.append(root)
# Thus its left/right has parent. Continue traversal.
root.left = self._preorderRecur(root.left, True, result, to_delete_set)
root.right = self._preorderRecur(root.right, True, result, to_delete_set)
return root
def delNodes(self, root, to_delete):
"""
:type root: TreeNode
:type to_delete: List[int]
:rtype: List[TreeNode]
Time complexity: O(n).
Space complexity: O(n).
"""
# Apply recursive preorder traversal to collect forest trees's roots.
# The idea is to use has_parent to mark forest's tree root.
# Use set for quick lookup.
to_delete_set = set(to_delete)
result = []
# For main root, has_parent is False
has_parent = False
self._preorderRecur(root, has_parent, result, to_delete_set)
return result
def main():
# Input: root = [1,2,3,4,5,6,7], to_delete = [3,5]
# Output: [[1,2,null,4],[6],[7]] => [1, 6, 7]
root = TreeNode(1)
root.left = TreeNode(2)
root.right = TreeNode(3)
root.left.left = TreeNode(4)
root.left.right = TreeNode(5)
root.right.left = TreeNode(6)
root.right.right = TreeNode(7)
to_delete = [3,5]
forest_roots = SolutionHasParentPreorderRecur().delNodes(root, to_delete)
print [r.val for r in forest_roots]
if __name__ == '__main__':
main()
|
import yaml
def parse_configuration_file(file_path):
with open(file_path, 'r') as file:
config_data = yaml.safe_load(file)
services = config_data.get('services', [])
parsed_data = {}
for service in services:
service_name = list(service.keys())[0]
endpoints = service[service_name]
parsed_data[service_name] = {endpoint['id']: endpoint['publicURL'] for endpoint in endpoints}
return parsed_data
def get_public_urls(service_name):
if service_name in parsed_data:
return list(parsed_data[service_name].values())
else:
return []
config_file_path = 'services.yaml'
parsed_data = parse_configuration_file(config_file_path)
print(get_public_urls('compute')) # Output: ['https://mycloud.org:13074/v2.1/6d2847d6a6414308a67644eefc7b98c7']
print(get_public_urls('placement')) # Output: ['https://mycloud.org:13778/placement']
print(get_public_urls('storage')) # Output: []
|
<gh_stars>1-10
T = [ -10, -8, 0, 1, 2, 5, -2, -4 ]
T.sort()
print("A menor temperatura registrada foi de: %d" % T[0] + "\nSendo a maior: %d" % T[len(T) - 1])
|
/**
* Copyright 2014 isandlaTech
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.psem2m.isolates.ui.admin.panels;
import java.awt.Font;
import javax.swing.JTable;
import org.psem2m.isolates.base.IIsolateLoggerSvc;
import org.psem2m.isolates.ui.admin.api.CJPanel;
import org.psem2m.isolates.ui.admin.api.EUiAdminFont;
/**
* @author ogattaz
*
*/
public abstract class CJPanelTable<T> extends CJPanel {
private static final long serialVersionUID = 7749683299927125353L;
/**
*
*/
public CJPanelTable() {
super();
}
/**
* @param aLogger
*/
public CJPanelTable(final IIsolateLoggerSvc aLogger) {
super(aLogger);
}
/**
* Return true if the row built with the entity must be added
*
* @param aEntity
* the source entity
* @param wDataRow
* the row to add
* @return true if the row must be added
*/
abstract boolean acceptRow(final T aEntity, final String[] wDataRow);
/**
* Add a new row built with the passed entity
*
* @param aEntity
* the source entity
* @return
*/
abstract boolean addRow(final T aEntity);
/**
* Add a new set of row built with the passed entities
*
* @param aEntities
* the table of source entities
*/
abstract void addRows(final T[] aEntities);
/**
* @param aFontName
* @param aSize
*/
void adjustTableRowHeight(final EUiAdminFont aUiAdminFont) {
final Font wNewFont = setTableFont(aUiAdminFont);
getTable().setRowHeight(calcRowHeight(wNewFont));
}
/**
* @param aEntity
* @return
*/
abstract String[] buildRowData(final T aEntity);
/**
* @param aEntity
* @return
*/
abstract String buildRowKey(final T aEntity);
/**
* @param aEntity
* @return
*/
abstract String buildTextInfos(final T aEntity);
/**
* @param aFont
* @return
*/
int calcFontHeight(final Font aFont) {
return getGraphics().getFontMetrics(aFont).getHeight();
}
/**
* @param aFont
* @return
*/
int calcRowHeight(final Font aFont) {
return calcFontHeight(aFont) + 2;
}
abstract JTable getTable();
/**
*
*/
abstract void removeAllRows();
/**
* @param aEntity
*/
abstract void removeRow(final T aEntity);
/**
* @param aEntity
*/
abstract void setRow(final T aEntity);
/**
* @param aEntity
*/
abstract void setRows(final T[] aEntity);
/**
* Sets the font of the table
*
* @param aFontName
* @param aSize
* @return the new Font
*/
abstract Font setTableFont(final EUiAdminFont aFont);
}
|
<reponame>reshmakh/medplum
import { Resource } from '@medplum/core';
export function evalFhirPath(resource: Resource, expression: string): any[] {
return new FhirPath(expression).eval(resource);
}
export class FhirPath {
private readonly original: string;
private readonly components: string[][];
constructor(str: string) {
this.original = str;
const expressions = str.split(' | ');
this.components = new Array(expressions.length);
for (let i = 0; i < expressions.length; i++) {
this.components[i] = expressions[i].split('.');
}
}
eval(resource: Resource): any[] {
const result: any[] = [];
for (const component of this.components) {
result.push(...this.evalExpression(resource, component));
}
return result.flat();
}
evalFirst(resource: Resource) {
const values = this.eval(resource);
return values.length === 0 ? undefined : values[0];
}
private evalExpression(resource: Resource, expression: string[]): any[] {
let curr = [resource];
for (const token of expression) {
const next: any[] = [];
for (const value of curr) {
this.evalToken(value, token, next);
}
curr = next;
}
return curr;
}
private evalToken(value: any, token: string, next: any[]): void {
if (!value) {
return;
}
if (Array.isArray(value)) {
if (Number.isInteger(token)) {
next.push(value[parseInt(token)]);
} else {
for (const child of value) {
this.evalToken(child, token, next);
}
}
} else if (typeof value === 'object') {
if (value.resourceType === token) {
next.push(value);
} else {
next.push(value[token]);
}
}
}
}
|
pub(crate) const REMOTE_ATTR_PARAM_NAME: &str = "remote";
pub(crate) const CREATE_ISSUE_LINK: &str = "https://github.com/myelin-ai/mockiato/issues/new";
fn generate_new_issue_url(remote_attr_name: &str) -> String {
format!("{}?{}={}", CREATE_ISSUE_LINK, REMOTE_ATTR_PARAM_NAME, remote_attr_name)
}
|
import datetime as dt
def first_sunday_of_august(year: int) -> dt.date:
weekday_of_august_first = dt.date(year, 8, 1).isocalendar()[2]
missing_days = 7 - weekday_of_august_first
return dt.date(year, 8, 1 + missing_days)
def next_festa_major(date: dt.date) -> dt.date:
next_year = date.year + 1
next_festa_major_date = first_sunday_of_august(next_year)
if date < next_festa_major_date:
return next_festa_major_date
else:
return first_sunday_of_august(next_year + 1)
|
<reponame>CodingMankk/16-BRVAHDemo
package com.oztaking.www.a16_brvahdemo.MyLoadMoreDemo;
import java.util.List;
/***********************************************
* 文 件 名:
* 创 建 人: OzTaking
* 功 能:下拉刷新请求数据回调接口
* 创建日期:
* 修改时间:
* 修改备注:
***********************************************/
public interface RequestCallBackLoadMore {
/**
*
* @param data 请求成功传入更新的数据
*/
public void onSuccess(List<LoadMoreItem> data);
/**
*
* @param e 失败,抛出异常
*/
public void onFailed(Exception e);
}
|
package de.bitbrain.braingdx.tmx;
import de.bitbrain.braingdx.event.GameEventRouter;
import de.bitbrain.braingdx.world.GameObject;
public class TiledMapInfoExtractor implements GameEventRouter.GameEventInfoExtractor {
@Override
public boolean isSticky(GameObject object) {
return object.getOrSetAttribute(Constants.STICKY, false);
}
@Override
public boolean isTriggerOnEnter(GameObject object) {
return object.getOrSetAttribute(Constants.TRIGGER_ENTERONLY, false);
}
@Override
public String getProducer(GameObject object) {
return object.getOrSetAttribute(Constants.PRODUCER, null);
}
}
|
'use strict';
/**
* Module dependencies.
*/
var path = require('path'),
mongoose = require('mongoose'),
File = mongoose.model('File'),
errorHandler = require(path.resolve('./modules/core/server/controllers/errors.server.controller')),
multer = require('multer'),
config = require(path.resolve('./config/config'));
/**
* Create a File
*/
exports.create = function(req, res) {
var user = req.user;
var message = null;
var upload = multer(config.uploads.fileUpload).single('uploadFile');
if (user) {
upload(req, res, function(uploadError) {
if (uploadError) {
return res.status(400).send({
message: 'Error occurred while uploading File'
});
} else {
var file = new File();
file.user = req.user;
file.filepath = config.uploads.fileUpload.dest + req.file.filename;
file.filename = req.file.originalname;
file.filetype = req.file.mimetype;
file.title = req.body.title || 'Auto Title';
file.content = req.body.content || 'Auto Content';
file.save(function(err) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
res.json(file);
}
});
}
});
} else {
console.log('else user');
res.status(400).send({
message: 'User is not signed in'
});
}
};
/**
* Show the current file
*/
exports.read = function(req, res) {
res.json(req.file);
};
/**
* Update a file
*/
exports.update = function(req, res) {
var file = req.file;
file.title = req.body.title;
file.content = req.body.content;
file.save(function(err) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
res.json(file);
}
});
};
/**
* Delete an file
*/
exports.delete = function(req, res) {
var file = req.file;
file.remove(function(err) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
res.json(file);
}
});
};
/**
* List of files
*/
exports.list = function(req, res) {
console.log(req.user);
File.find({
user: req.user._id
}).sort('-created').populate('user', 'displayName').exec(function(err, files) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
res.json(files);
}
});
};
/**
* file middleware
*/
exports.fileByID = function(req, res, next, id) {
if (!mongoose.Types.ObjectId.isValid(id)) {
return res.status(400).send({
message: 'File is invalid'
});
}
File.findById(id).populate('user', 'displayName').exec(function(err, file) {
if (err) {
return next(err);
} else if (!file) {
return res.status(404).send({
message: 'No file with that identifier has been found'
});
}
if (req.user && file.user.id !== req.user.id) {
return res.status(403).json({
message: 'User is not authorized'
});
}
req.file = file;
next();
});
};
|
#!/usr/bin/env bash
# -----------------------------------------------------------------------------
# This file is part of the xPack distribution.
# (https://xpack.github.io)
# Copyright (c) 2020 Liviu Ionescu.
#
# Permission to use, copy, modify, and/or distribute this software
# for any purpose is hereby granted, under the terms of the MIT license.
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# Safety settings (see https://gist.github.com/ilg-ul/383869cbb01f61a51c4d).
if [[ ! -z ${DEBUG} ]]
then
set ${DEBUG} # Activate the expand mode if DEBUG is anything but empty.
else
DEBUG=""
fi
set -o errexit # Exit if command failed.
set -o pipefail # Exit if pipe failed.
set -o nounset # Exit if variable not set.
# Remove the initial space and instead use '\n'.
IFS=$'\n\t'
# -----------------------------------------------------------------------------
# Identify the script location, to reach, for example, the helper scripts.
script_path="$0"
if [[ "${script_path}" != /* ]]
then
# Make relative path absolute.
script_path="$(pwd)/$0"
fi
script_name="$(basename "${script_path}")"
script_folder_path="$(dirname "${script_path}")"
script_folder_name="$(basename "${script_folder_path}")"
# =============================================================================
helper_folder_path="$(dirname $(dirname "${script_folder_path}"))/scripts/helper"
source "${helper_folder_path}/test-functions-source.sh"
source "${script_folder_path}/common-functions-source.sh"
# -----------------------------------------------------------------------------
detect_architecture
app_lc_name="openocd"
prepare_env "$(dirname $(dirname "${script_folder_path}"))"
# If present, --32 must be the first.
is_32_bit=""
if [ $# -gt 0 -a "$1" == "--32" ]
then
is_32_bit="y"
shift
fi
# -----------------------------------------------------------------------------
if [ "${is_32_bit}" == "y" ]
then
docker_run_test_32 $@
else
docker_run_test $@
fi
# -----------------------------------------------------------------------------
|
import { ASTRoot as ConstraintAST, Node as ConstraintNode } from '../trees/constraint';
import {
Node as ExprNode,
TypedBinaryOpNode,
TypedFunctionInvocationNode,
TypedNode as TypedExprNode,
TypedNumberNode,
TypedStringLiteralNode,
TypedVariableNode,
VariableNode as ExprVariableNode,
} from '../trees/expression';
import { ASTRoot, TypedASTRoot, TypedNode as TypedTextNode, VariableNode as TextVariableNode } from '../trees/text';
import { default as TypeMap, InferredType, UsageLocation } from '../type_map';
function addConstraintTypeUsageForNode(
typeMap: TypeMap,
node: ConstraintNode,
ast: ConstraintAST,
text: ASTRoot,
): void {
const location = {
constraints: ast,
text: text,
};
const addTypeInfo = (variable: string, type: 'unknown' | 'gender' | 'enum' | 'number') => {
typeMap.addTypeUsage(variable, type, {
location,
node,
nodeType: 'constraint',
type,
});
};
if (node.op === '!') {
const variable = node.operand.name;
// We get no type information from this kind of thing. Just log the
// unknown type.
const type: 'unknown' = 'unknown';
addTypeInfo(variable, type);
} else {
const variable = node.lhs.name;
const type = node.rhs.type;
addTypeInfo(variable, type);
}
}
export function inferConstraintTypes(typeMap: TypeMap, ast: ConstraintAST, textAST: ASTRoot) {
for (const constraint of ast.nodes) {
addConstraintTypeUsageForNode(typeMap, constraint, ast, textAST);
}
}
function addExprTypeInfo(
typeMap: TypeMap,
variable: ExprVariableNode,
type: InferredType,
location: UsageLocation,
): InferredType {
if (type === 'error' || type === 'gender' || type === 'enum') {
throw new Error('Invalid expression type usage type: ' + type);
}
return typeMap.addTypeUsage(variable.name, type, {
location,
node: variable,
nodeType: 'expression',
// We have excluded the other values from type in the guard above. I think this is a typescript bug
type: type as 'unknown' | 'number-or-string' | 'number' | 'string',
});
}
function inferExprType(
typeMap: TypeMap,
node: ExprNode,
location: UsageLocation,
resultType?: InferredType,
): InferredType {
const exprType = node.exprNodeType;
switch (node.exprNodeType) {
case 'unary_minus':
inferExprType(typeMap, node.op, location, 'number');
return 'number';
case 'variable': {
if (resultType != null) {
const resultExprType = addExprTypeInfo(typeMap, node, resultType, location);
if (resultExprType === 'gender' || resultExprType === 'enum') {
return 'string';
}
return resultExprType;
}
const nodeExprType = addExprTypeInfo(typeMap, node, 'unknown', location);
if (nodeExprType === 'gender' || nodeExprType === 'enum') {
return 'string';
}
return nodeExprType;
}
case 'number':
return 'number';
case 'string_literal':
return 'string';
case 'function_invocation':
const parameterTypes = typeMap.functionParameterTypes(node.name);
const types = parameterTypes.kind === 'known' ? parameterTypes.types : [];
let idx = 0;
for (const param of node.parameters) {
const typeToInfer = idx < types.length ? types[idx] : undefined;
inferExprType(typeMap, param, location, typeToInfer);
idx++;
}
typeMap.addFunction(node.name);
return 'unknown';
case 'binary_op': {
const binopType = node.binaryOp;
switch (node.binaryOp) {
case 'plus': {
if (resultType !== 'number' || resultType == null) {
resultType = 'number-or-string';
}
const lhs = inferExprType(typeMap, node.lhs, location, resultType);
const rhs = inferExprType(typeMap, node.rhs, location, resultType);
if (lhs !== rhs) {
return 'number-or-string';
}
if (lhs === 'number') {
return 'number';
} else if (lhs === 'string') {
return 'string';
}
return lhs;
}
case 'minus':
case 'divide':
case 'multiply':
inferExprType(typeMap, node.lhs, location, 'number');
inferExprType(typeMap, node.rhs, location, 'number');
return 'number';
default:
throw new Error('Unknown binary operator: ' + binopType);
}
}
default:
throw new Error('Unknown expression type: ' + exprType);
}
}
export function inferExpressionTypes(typeMap: TypeMap, node: ExprNode, location: UsageLocation) {
inferExprType(typeMap, node, location);
}
export function inferTextTypes(typeMap: TypeMap, textAST: ASTRoot, constraintAST?: ConstraintAST) {
const location = {
constraints: constraintAST,
text: textAST,
};
const addVariableTypeInfo = (node: TextVariableNode) => {
typeMap.addTypeUsage(node.value, 'number-or-string', {
location,
node,
nodeType: 'text',
type: 'number-or-string',
});
};
for (const node of textAST.nodes) {
switch (node.textNodeType) {
case 'variable':
addVariableTypeInfo(node);
break;
case 'expr':
inferExpressionTypes(typeMap, node.value, location);
break;
default: // Not needed - nothing to infer in literal types
}
}
}
function makeTypedExpressionNode(node: ExprNode, typeMap: TypeMap): TypedExprNode {
const exprNodeType = node.exprNodeType;
if (node.exprNodeType === 'string_literal') {
return {
exprNodeType: 'string_literal',
exprType: 'string',
isConstant: true,
pos: node.pos,
typed: true,
value: node.value,
} as TypedStringLiteralNode;
} else if (node.exprNodeType === 'number') {
return {
exprNodeType: 'number',
exprType: 'number',
isConstant: true,
pos: node.pos,
typed: true,
value: node.value,
} as TypedNumberNode;
} else if (node.exprNodeType === 'variable') {
if (!typeMap.hasInfoForType(node.name)) {
throw new Error(
`Type for variable ${node.name} not found in type map.` +
'Are you sure you ran the type inference phase first?',
);
}
let type = typeMap.getVariableType(node.name);
if (type === 'gender' || type === 'enum') {
// Expressions don't deal with these type of variables.
// This might need changing if the functions
// get support for declaring types as well.
type = 'string';
}
return {
exprNodeType: 'variable',
exprType: type,
isConstant: false,
name: node.name,
pos: node.pos,
typed: true,
} as TypedVariableNode;
} else if (node.exprNodeType === 'unary_minus') {
const typedOp = makeTypedExpressionNode(node.op, typeMap);
const exprType = typedOp.exprType;
return {
exprNodeType: 'unary_minus',
exprType: exprType,
isConstant: typedOp.isConstant,
op: typedOp,
pos: node.pos,
typed: true,
};
} else if (node.exprNodeType === 'binary_op') {
const typedLhs = makeTypedExpressionNode(node.lhs, typeMap);
const lhsType = typedLhs.exprType;
const typedRhs = makeTypedExpressionNode(node.rhs, typeMap);
const rhsType = typedRhs.exprType;
const makeBinaryResult = (
op: 'plus' | 'minus' | 'divide' | 'multiply',
type: InferredType,
): TypedBinaryOpNode => {
return {
binaryOp: op,
exprNodeType: 'binary_op',
exprType: type,
isConstant: typedLhs.isConstant && typedRhs.isConstant,
lhs: typedLhs,
pos: node.pos,
rhs: typedRhs,
typed: true,
};
};
const binopType = node.binaryOp;
switch (node.binaryOp) {
case 'plus': {
const bothNumbers = lhsType === 'number' && rhsType === 'number';
let type: InferredType = 'number-or-string';
if (bothNumbers) {
type = 'number';
} else if (lhsType === 'string' && rhsType === 'string') {
type = 'string';
}
// Attempt to find the most specific type of both sides
// Basicly if either side is a string, the result
// is also a string. Otherwise it is number-or-string
return makeBinaryResult('plus', type);
}
case 'minus':
case 'divide':
case 'multiply': {
return makeBinaryResult(node.binaryOp, 'number');
}
default:
throw new Error('Unknown binary operator: ' + binopType);
}
} else if (node.exprNodeType === 'function_invocation') {
// For simplicity's sake. We state that functions
// may only return strings. This should be *ok* as
// any potential calculations can be done inside the function.
// And in any case in general they should be used
// to return some kind of escaped markup.
const parameters: TypedExprNode[] = node.parameters.map((n) => makeTypedExpressionNode(n, typeMap));
return {
exprNodeType: 'function_invocation',
exprType: 'string',
isConstant: false,
name: node.name,
parameters: parameters,
pos: node.pos,
typed: true,
} as TypedFunctionInvocationNode;
} else {
throw new Error('Unknown expression type: ' + exprNodeType);
}
}
export function makeTypedExpressionTree(typeMap: TypeMap, node: ExprNode): TypedExprNode {
if (!typeMap.isFrozen()) {
throw new Error('Type map passed must be frozen. Use TypeMap.freeze()');
}
if (typeMap.hasTypeErrors()) {
typeMap.throwTypeErrors();
}
return makeTypedExpressionNode(node, typeMap);
}
export function makeTypedExpressionList(typeMap: TypeMap, ast: ASTRoot): TypedASTRoot {
if (!typeMap.isFrozen()) {
throw new Error('Type map passed must be frozen. Use TypeMap.freeze()');
}
if (typeMap.hasTypeErrors()) {
typeMap.throwTypeErrors();
}
const result: TypedTextNode[] = [];
for (const node of ast.nodes) {
const textType = node.textNodeType;
if (node.textNodeType === 'literal') {
result.push({
pos: node.pos,
textNodeType: 'literal',
textType: 'string',
typed: true,
value: node.value,
});
} else if (node.textNodeType === 'variable') {
const type = typeMap.getVariableType(node.value);
result.push({
pos: node.pos,
textNodeType: 'variable',
textType: type,
typed: true,
value: node.value,
});
} else if (node.textNodeType === 'expr') {
// I don't know why typescript doesn't get that this is an expression.
// but this workaround does the trick.
const val = (node.value as any) as ExprNode;
const exprRes = makeTypedExpressionTree(typeMap, val);
result.push({
pos: node.pos,
textNodeType: 'expr',
textType: exprRes.exprType,
typed: true,
value: exprRes,
});
} else {
throw new Error('Unknown text node type: ' + textType);
}
}
return {
input: ast.input,
nodes: result,
};
}
|
<reponame>chenshun00/datasource-starter
package io.github.chenshun00.multi.datasource.transactional.support;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.core.NamedThreadLocal;
import org.springframework.core.annotation.AnnotationAwareOrderComparator;
import org.springframework.lang.Nullable;
import org.springframework.transaction.support.ResourceHolder;
import org.springframework.transaction.support.ResourceTransactionManager;
import org.springframework.transaction.support.TransactionSynchronization;
import org.springframework.util.Assert;
import java.util.*;
/**
* @author <EMAIL>
* @since 2021/6/7 9:53 上午
*/
public class MyTransactionSynchronizationManager {
private static final Log logger = LogFactory.getLog(MyTransactionSynchronizationManager.class);
private static final ThreadLocal<Map<String, Map<Object, Object>>> resources = new NamedThreadLocal<Map<String, Map<Object, Object>>>("Transactional resources") {
@Override
protected Map<String, Map<Object, Object>> initialValue() {
return new HashMap<>();
}
};
private static final ThreadLocal<Map<String, Set<TransactionSynchronization>>> synchronizations = new NamedThreadLocal<Map<String, Set<TransactionSynchronization>>>("Transaction synchronizations") {
@Override
protected Map<String, Set<TransactionSynchronization>> initialValue() {
return new HashMap<>();
}
};
private static final ThreadLocal<Map<String, String>> currentTransactionName = new NamedThreadLocal<Map<String, String>>("Current transaction name") {
@Override
protected Map<String, String> initialValue() {
return new HashMap<>();
}
};
private static final ThreadLocal<Map<String, Boolean>> currentTransactionReadOnly = new NamedThreadLocal<Map<String, Boolean>>("Current transaction read-only status") {
@Override
protected Map<String, Boolean> initialValue() {
return new HashMap<>();
}
};
private static final ThreadLocal<Map<String, Integer>> currentTransactionIsolationLevel = new NamedThreadLocal<Map<String, Integer>>("Current transaction isolation level") {
@Override
protected Map<String, Integer> initialValue() {
return new HashMap<>();
}
};
private static final ThreadLocal<Map<String, Boolean>> actualTransactionActive = new NamedThreadLocal<Map<String, Boolean>>("Actual transaction active") {
@Override
protected Map<String, Boolean> initialValue() {
return new HashMap<>();
}
};
//-------------------------------------------------------------------------
// Management of transaction-associated resource handles
//-------------------------------------------------------------------------
/**
* Return all resources that are bound to the current thread.
* <p>Mainly for debugging purposes. Resource managers should always invoke
* {@code hasResource} for a specific resource key that they are interested in.
*
* @return a Map with resource keys (usually the resource factory) and resource
* values (usually the active resource object), or an empty Map if there are
* currently no resources bound
* @see #hasResource
*/
public static Map<Object, Object> getResourceMap(String datasource) {
Map<Object, Object> map = resources.get().get(datasource);
return (map != null ? Collections.unmodifiableMap(map) : Collections.emptyMap());
}
/**
* Check if there is a resource for the given key bound to the current thread.
*
* @param key the key to check (usually the resource factory)
* @return if there is a value bound to the current thread
* @see ResourceTransactionManager#getResourceFactory()
*/
public static boolean hasResource(String datasource, Object key) {
Object actualKey = MyTransactionSynchronizationUtils.unwrapResourceIfNecessary(key);
Object value = doGetResource(datasource, actualKey);
return (value != null);
}
/**
* Retrieve a resource for the given key that is bound to the current thread.
*
* @param key the key to check (usually the resource factory)
* @return a value bound to the current thread (usually the active
* resource object), or {@code null} if none
* @see ResourceTransactionManager#getResourceFactory()
*/
@Nullable
public static Object getResource(String datasource, Object key) {
Object actualKey = MyTransactionSynchronizationUtils.unwrapResourceIfNecessary(key);
Object value = doGetResource(datasource, actualKey);
if (value != null && logger.isTraceEnabled()) {
logger.trace("Retrieved value [" + value + "] for key [" + actualKey + "] bound to thread [" +
Thread.currentThread().getName() + "]");
}
return value;
}
/**
* Actually check the value of the resource that is bound for the given key.
*/
@Nullable
private static Object doGetResource(String datasource, Object actualKey) {
Map<Object, Object> map = resources.get().get(datasource);
if (map == null) {
return null;
}
Object value = map.get(actualKey);
// Transparently remove ResourceHolder that was marked as void...
if (value instanceof ResourceHolder && ((ResourceHolder) value).isVoid()) {
map.remove(actualKey);
// Remove entire ThreadLocal if empty...
if (map.isEmpty()) {
resources.remove();
}
value = null;
}
return value;
}
/**
* Bind the given resource for the given key to the current thread.
*
* @param key the key to bind the value to (usually the resource factory)
* @param value the value to bind (usually the active resource object)
* @throws IllegalStateException if there is already a value bound to the thread
* @see ResourceTransactionManager#getResourceFactory()
*/
public static void bindResource(String datasource, Object key, Object value) throws IllegalStateException {
Object actualKey = MyTransactionSynchronizationUtils.unwrapResourceIfNecessary(key);
Assert.notNull(value, "Value must not be null");
Map<Object, Object> map = resources.get().computeIfAbsent(datasource, k -> new HashMap<>());
// set ThreadLocal Map if none found
Object oldValue = map.put(actualKey, value);
// Transparently suppress a ResourceHolder that was marked as void...
if (oldValue instanceof ResourceHolder && ((ResourceHolder) oldValue).isVoid()) {
oldValue = null;
}
if (oldValue != null) {
throw new IllegalStateException("Already value [" + oldValue + "] for key [" +
actualKey + "] bound to thread [" + Thread.currentThread().getName() + "]");
}
if (logger.isTraceEnabled()) {
logger.trace("Bound value [" + value + "] for key [" + actualKey + "] to thread [" +
Thread.currentThread().getName() + "]");
}
}
/**
* Unbind a resource for the given key from the current thread.
*
* @param key the key to unbind (usually the resource factory)
* @return the previously bound value (usually the active resource object)
* @throws IllegalStateException if there is no value bound to the thread
* @see ResourceTransactionManager#getResourceFactory()
*/
public static Object unbindResource(String datasource, Object key) throws IllegalStateException {
Object actualKey = MyTransactionSynchronizationUtils.unwrapResourceIfNecessary(key);
Object value = doUnbindResource(datasource, actualKey);
if (value == null) {
throw new IllegalStateException(
"No value for key [" + actualKey + "] bound to thread [" + Thread.currentThread().getName() + "]");
}
return value;
}
/**
* Unbind a resource for the given key from the current thread.
*
* @param key the key to unbind (usually the resource factory)
* @return the previously bound value, or {@code null} if none bound
*/
@Nullable
public static Object unbindResourceIfPossible(String datasource, Object key) {
Object actualKey = MyTransactionSynchronizationUtils.unwrapResourceIfNecessary(key);
return doUnbindResource(datasource, actualKey);
}
/**
* Actually remove the value of the resource that is bound for the given key.
*/
@Nullable
private static Object doUnbindResource(String datasource, Object actualKey) {
final Map<String, Map<Object, Object>> stringMapMap = resources.get();
Map<Object, Object> map = resources.get().get(datasource);
if (map == null) {
return null;
}
Object value = map.remove(actualKey);
// Remove entire ThreadLocal if empty...
if (stringMapMap.isEmpty()) {
resources.remove();
}
// Transparently suppress a ResourceHolder that was marked as void...
if (value instanceof ResourceHolder && ((ResourceHolder) value).isVoid()) {
value = null;
}
if (value != null && logger.isTraceEnabled()) {
logger.trace("Removed value [" + value + "] for key [" + actualKey + "] from thread [" +
Thread.currentThread().getName() + "]");
}
return value;
}
//-------------------------------------------------------------------------
// Management of transaction synchronizations
//-------------------------------------------------------------------------
/**
* Return if transaction synchronization is active for the current thread.
* Can be called before register to avoid unnecessary instance creation.
*
* @see #registerSynchronization
*/
public static boolean isSynchronizationActive(String datasource) {
return (synchronizations.get().get(datasource) != null);
}
/**
* Activate transaction synchronization for the current thread.
* Called by a transaction manager on transaction begin.
*
* @throws IllegalStateException if synchronization is already active
*/
public static void initSynchronization(String datasource) throws IllegalStateException {
if (isSynchronizationActive(datasource)) {
throw new IllegalStateException("Cannot activate transaction synchronization - already active");
}
logger.trace("Initializing transaction synchronization");
synchronizations.get().put(datasource, new LinkedHashSet<>());
}
/**
* Register a new transaction synchronization for the current thread.
* Typically called by resource management code.
* <p>Note that synchronizations can implement the
* {@link org.springframework.core.Ordered} interface.
* They will be executed in an order according to their order value (if any).
*
* @param synchronization the synchronization object to register
* @throws IllegalStateException if transaction synchronization is not active
* @see org.springframework.core.Ordered
*/
public static void registerSynchronization(String datasource, TransactionSynchronization synchronization)
throws IllegalStateException {
Assert.notNull(synchronization, "TransactionSynchronization must not be null");
Set<TransactionSynchronization> synchs = synchronizations.get().get(datasource);
if (synchs == null) {
throw new IllegalStateException("Transaction synchronization is not active");
}
synchs.add(synchronization);
}
/**
* Return an unmodifiable snapshot list of all registered synchronizations
* for the current thread.
*
* @return unmodifiable List of TransactionSynchronization instances
* @throws IllegalStateException if synchronization is not active
* @see TransactionSynchronization
*/
public static List<TransactionSynchronization> getSynchronizations(String datasource) throws IllegalStateException {
Set<TransactionSynchronization> synchs = synchronizations.get().get(datasource);
if (synchs == null) {
throw new IllegalStateException("Transaction synchronization is not active");
}
// Return unmodifiable snapshot, to avoid ConcurrentModificationExceptions
// while iterating and invoking synchronization callbacks that in turn
// might register further synchronizations.
if (synchs.isEmpty()) {
return Collections.emptyList();
} else {
// Sort lazily here, not in registerSynchronization.
List<TransactionSynchronization> sortedSynchs = new ArrayList<>(synchs);
AnnotationAwareOrderComparator.sort(sortedSynchs);
return Collections.unmodifiableList(sortedSynchs);
}
}
/**
* Deactivate transaction synchronization for the current thread.
* Called by the transaction manager on transaction cleanup.
*
* @throws IllegalStateException if synchronization is not active
*/
public static void clearSynchronization(String datasource) throws IllegalStateException {
if (!isSynchronizationActive(datasource)) {
throw new IllegalStateException("Cannot deactivate transaction synchronization - not active");
}
logger.trace("Clearing transaction synchronization");
synchronizations.get().get(datasource).clear();
}
//-------------------------------------------------------------------------
// Exposure of transaction characteristics
//-------------------------------------------------------------------------
/**
* Expose the name of the current transaction, if any.
* Called by the transaction manager on transaction begin and on cleanup.
*
* @param name the name of the transaction, or {@code null} to reset it
* @see org.springframework.transaction.TransactionDefinition#getName()
*/
public static void setCurrentTransactionName(String datasource, @Nullable String name) {
currentTransactionName.get().put(datasource, name);
}
/**
* Return the name of the current transaction, or {@code null} if none set.
* To be called by resource management code for optimizations per use case,
* for example to optimize fetch strategies for specific named transactions.
*
* @see org.springframework.transaction.TransactionDefinition#getName()
*/
@Nullable
public static String getCurrentTransactionName(String datasource) {
return currentTransactionName.get().get(datasource);
}
/**
* Expose a read-only flag for the current transaction.
* Called by the transaction manager on transaction begin and on cleanup.
*
* @param readOnly {@code true} to mark the current transaction
* as read-only; {@code false} to reset such a read-only marker
* @see org.springframework.transaction.TransactionDefinition#isReadOnly()
*/
public static void setCurrentTransactionReadOnly(String datasource, boolean readOnly) {
currentTransactionReadOnly.get().put(datasource, readOnly ? Boolean.TRUE : null);
}
/**
* Return whether the current transaction is marked as read-only.
* To be called by resource management code when preparing a newly
* created resource (for example, a Hibernate Session).
* <p>Note that transaction synchronizations receive the read-only flag
* as argument for the {@code beforeCommit} callback, to be able
* to suppress change detection on commit. The present method is meant
* to be used for earlier read-only checks, for example to set the
* flush mode of a Hibernate Session to "FlushMode.NEVER" upfront.
*
* @see org.springframework.transaction.TransactionDefinition#isReadOnly()
* @see TransactionSynchronization#beforeCommit(boolean)
*/
public static boolean isCurrentTransactionReadOnly(String datasource) {
return (currentTransactionReadOnly.get().get(datasource) != null);
}
/**
* Expose an isolation level for the current transaction.
* Called by the transaction manager on transaction begin and on cleanup.
*
* @param isolationLevel the isolation level to expose, according to the
* JDBC Connection constants (equivalent to the corresponding Spring
* TransactionDefinition constants), or {@code null} to reset it
* @see java.sql.Connection#TRANSACTION_READ_UNCOMMITTED
* @see java.sql.Connection#TRANSACTION_READ_COMMITTED
* @see java.sql.Connection#TRANSACTION_REPEATABLE_READ
* @see java.sql.Connection#TRANSACTION_SERIALIZABLE
* @see org.springframework.transaction.TransactionDefinition#ISOLATION_READ_UNCOMMITTED
* @see org.springframework.transaction.TransactionDefinition#ISOLATION_READ_COMMITTED
* @see org.springframework.transaction.TransactionDefinition#ISOLATION_REPEATABLE_READ
* @see org.springframework.transaction.TransactionDefinition#ISOLATION_SERIALIZABLE
* @see org.springframework.transaction.TransactionDefinition#getIsolationLevel()
*/
public static void setCurrentTransactionIsolationLevel(String datasource, @Nullable Integer isolationLevel) {
currentTransactionIsolationLevel.get().put(datasource, isolationLevel);
}
/**
* Return the isolation level for the current transaction, if any.
* To be called by resource management code when preparing a newly
* created resource (for example, a JDBC Connection).
*
* @return the currently exposed isolation level, according to the
* JDBC Connection constants (equivalent to the corresponding Spring
* TransactionDefinition constants), or {@code null} if none
* @see java.sql.Connection#TRANSACTION_READ_UNCOMMITTED
* @see java.sql.Connection#TRANSACTION_READ_COMMITTED
* @see java.sql.Connection#TRANSACTION_REPEATABLE_READ
* @see java.sql.Connection#TRANSACTION_SERIALIZABLE
* @see org.springframework.transaction.TransactionDefinition#ISOLATION_READ_UNCOMMITTED
* @see org.springframework.transaction.TransactionDefinition#ISOLATION_READ_COMMITTED
* @see org.springframework.transaction.TransactionDefinition#ISOLATION_REPEATABLE_READ
* @see org.springframework.transaction.TransactionDefinition#ISOLATION_SERIALIZABLE
* @see org.springframework.transaction.TransactionDefinition#getIsolationLevel()
*/
@Nullable
public static Integer getCurrentTransactionIsolationLevel(String datasource) {
return currentTransactionIsolationLevel.get().get(datasource);
}
/**
* Expose whether there currently is an actual transaction active.
* Called by the transaction manager on transaction begin and on cleanup.
*
* @param active {@code true} to mark the current thread as being associated
* with an actual transaction; {@code false} to reset that marker
*/
public static void setActualTransactionActive(String datasource, boolean active) {
actualTransactionActive.get().put(datasource, active ? Boolean.TRUE : null);
}
/**
* Return whether there currently is an actual transaction active.
* This indicates whether the current thread is associated with an actual
* transaction rather than just with active transaction synchronization.
* <p>To be called by resource management code that wants to discriminate
* between active transaction synchronization (with or without backing
* resource transaction; also on PROPAGATION_SUPPORTS) and an actual
* transaction being active (with backing resource transaction;
* on PROPAGATION_REQUIRED, PROPAGATION_REQUIRES_NEW, etc).
*/
public static boolean isActualTransactionActive(String datasource) {
return actualTransactionActive.get().get(datasource) != null;
}
/**
* Clear the entire transaction synchronization state for the current thread:
* registered synchronizations as well as the various transaction characteristics.
*
* @see #setCurrentTransactionName
* @see #setCurrentTransactionReadOnly
* @see #setCurrentTransactionIsolationLevel
* @see #setActualTransactionActive
*/
public static void clear(String datasource) {
synchronizations.get().remove(datasource);
currentTransactionName.get().remove(datasource);
currentTransactionReadOnly.get().remove(datasource);
currentTransactionIsolationLevel.get().remove(datasource);
actualTransactionActive.get().remove(datasource);
}
public static void clear() {
synchronizations.remove();
currentTransactionName.remove();
currentTransactionReadOnly.remove();
currentTransactionIsolationLevel.remove();
actualTransactionActive.remove();
}
}
|
//Google Maps
var mlat = 0;
var mlon = 0;
var asn = null;
function initMap() {
var location = {
lat: mlat,
lng: mlon
};
var map = new google.maps.Map(document.getElementById('map'), {
mapTypeId: google.maps.MapTypeId.map,
zoom: 13,
center: location,
disableDefaultUI: true
});
var marker = new google.maps.Marker({
map: map,
draggable: false,
animation: google.maps.Animation.DROP,
position: location,
title: asn
});
marker.addListener('click', function toggleBounce() {
marker.getAnimation() !== null ? marker.setAnimation(null) : marker.setAnimation(google.maps.Animation.BOUNCE);
});
}
//Format text to CamelCase
function CamelCase(str) {
return str.replace(/(?:^|\s)\w/g, function (match) {
return match.toUpperCase();
});
}
//Get Table Data
function GetGeoData() {
$("#wait").addClass("is-active");
var uri = "https://ip.nf/";
var ip = $("#ip").val();
(ip == null) || (ip == "") ? uri += "me.json": uri = uri + ip + ".json";
$.ajax({
url: uri,
dataType: 'json',
type: 'get',
success: CreateTable
})
}
//Update Table
function CreateTable(data) {
if (data.ip.ip !== null) {
$("#aip").html("<b>" + data.ip.ip + "</b>");
$("#aasn").html("<b>" + data.ip.asn + "</b>");
$("#ahostname").html("<b>" + data.ip.hostname + "</b>");
$("#alat").html("<b>" + data.ip.latitude + "</b>");
$("#alon").html("<b>" + data.ip.longitude + "</b>");
$("#acity").html("<b>" + data.ip.city + "</b>");
$("#acountry").html("<b>" + data.ip.country + "</b>");
$("#acountrycode").html("<b>" + data.ip.country_code + "</b>");
$("#azipcode").html("<b>" + data.ip.post_code + "</b>");
mlat = data.ip.latitude;
mlon = data.ip.longitude;
asn = data.ip.asn;
$("#in").css("display", "auto");
$("#in").css("height", "auto");
$("#hero").show();
initMap();
$("#wait").removeClass("is-active");
} else {
var toast = document.querySelector("#message");
$("#wait").removeClass("is-active");
var msg = {
message: CamelCase("oops...! Something went wrong")
};
toast.MaterialSnackbar.showSnackbar(msg);
}
}
//If Offline
function isOffline() {
var toast = document.querySelector("#warning");
var msg = {
message: "You are offline!"
};
toast.MaterialSnackbar.showSnackbar(msg);
}
//Events Handler
$(document).ready(function () {
window.addEventListener('offline', isOffline, false);
$("#locate").click(GetGeoData);
var dialog = document.querySelector("dialog");
if (!dialog.showModal) {
dialogPolyfill.registerDialog(dialog);
}
$("#help").click(function () {
dialog.showModal();
});
$(".close").click(function () {
dialog.close();
});
});
//Service Worker
if ('serviceWorker' in navigator) {
navigator.serviceWorker.register('./sw.js').then(function (reg) {
console.log('Service Worker segistered successfully', reg);
}).catch(function (err) {
console.warn('Error whilst registering Service Worker', err);
});
}
|
<gh_stars>0
package jdbc0625;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
public class Test02_SelectCount {
public static void main(String[] args) {
/*
* sungjuk 테이블의 전체 행갯수를 출력하시오
*/
String url="jdbc:oracle:thin:@localhost:1521:xe";
String user="java0514";
String password="<PASSWORD>";
String driver="oracle.jdbc.driver.OracleDriver";
//자원반납해야하는것.
Connection con=null;
PreparedStatement pstmt=null;
ResultSet rs=null;
try{
//2)드라이버 로딩
Class.forName(driver);
//3)오라클 DB 서버 연결
con=DriverManager.getConnection(url, user, password);
System.out.println("오라클DB 서버 연결 성공!!");
//---------------------DB연결-------------------------------------------
StringBuilder sql=new StringBuilder();
sql.append(" SELECT COUNT(*) AS cnt "); //칼럼명을 넣어준다!!!!!
sql.append(" FROM sungjuk ");
pstmt=con.prepareStatement(sql.toString());
rs=pstmt.executeQuery();
if(rs.next()){//cursor가 존재하는지?
System.out.println("자료있음");
do{
System.out.println("전체 행갯수:"+rs.getInt("cnt"));//칼럼명
System.out.println("전체 행갯수:"+rs.getInt(1));//첫번째 칼럼
}while(rs.next()); //while문 true면 do가 실행
}else{
System.out.println("자료없음");
}//if
/*
* 출력값
전체 행갯수:10
전체 행갯수:10
*/
}catch(Exception e){
System.out.println("실패!"+e);
}finally{
//자원반납(순서주의!!!!!!)
//맨 처음에 열었던걸 나중에 닫기 > 역순
try{
if(rs!=null){rs.close();}
}catch(Exception e){}
try{
if(pstmt!=null){pstmt.close();}
}catch(Exception e){}
try{
if(con!=null){con.close();}
}catch(Exception e){}
}//try
}//main
}//class
|
#!/bin/bash
#set -x
usage() {
echo "Usage: $0 [options] <script.sh> [listOfMachines.txt ...]" 1>&2
echo "" 1>&2
echo "Options:" 1>&2
echo " -U, --user Use a user other than root" 1>&2
echo " -q, --quick Quick mode, don't ask questions" 1>&2
echo " -s, --ssh Force use of SSH copy mode" 1>&2
echo " -l, --logdir Directory to write log files to" 1>&2
echo " -u, --url Base of URL to get scripts from" 1>&2
echo " -w, --webdir Base of directory to copy scripts, should match URL" 1>&2
echo " -c, --counter File to use as a counter" 1>&2
echo " -f, --localfirst Do local machine first" 1>&2
echo " -L, --locallast Do local machine last" 1>&2
#echo " -c, --confirm-close Confirm closing run windows"
echo "" 1>&2
echo "If url and webdir are specified, wget will be used for copying scripts," 1>&2
echo "otherwise SSH will be used to copy." 1>&2
echo "" 1>&2
echo "Options that may be stored set in ~/.domany.conf:" 1>&2
echo " USER, QUICK, FORCESSH, LOGDIR, URL, WEBDIR, COUNTER, LOCAL" 1>&2
exit 1
}
pingMachines () {
echo "About to ping $(cat $workMachinesFile | wc -l) machine(s), please wait."
# fping the whole list to show the ones that are up/down when we start.
perl -p -e 's/(^|\s+)[^\s]+@/$1/' $workMachinesFile | fping > $fpingMachinesFile
err=$?
cat $fpingMachinesFile | grep 'is alive$' | cut -f 1 -d ' ' | sort > $fpingMachinesFile.up
cat $fpingMachinesFile | grep 'is unreachable$' | cut -f 1 -d ' ' | sort > $fpingMachinesFile.down
if [ -s "$fpingMachinesFile.up" ]; then
echo
echo "Machines up:"
cat $fpingMachinesFile.up
fi
if [ -s "$fpingMachinesFile.down" ]; then
echo
echo "Machines down:"
cat $fpingMachinesFile.down
downCount=$(wc -l $fpingMachinesFile.down | cut -f 1 -d ' ')
if [ $err -ne 0 ]; then
if [ ! "$QUICK" ]; then
echo
read -p "$downCount machine(s) down, do you want to exclude them? [Y/n/r/^c] " answer
if [ $? -ne 0 ]; then
exit 1
fi
if [ "$answer" = "y" -o "$answer" = "Y" -o -z "$answer" ]; then
cat $fpingMachinesFile.up > $workMachinesFile
fi
if [ "$answer" = "r" -o "$ansrwer" = "R" ]; then
pingMachines
fi
else
echo
echo "Excluding $downCount down machine(s)."
cat $fpingMachinesFile.up > $workMachinesFile
fi
fi
fi
}
#set -x
TEMP=$(getopt --options U:qsl:u:w:c:fL --long user:quick,localfirst,locallast,ssh,logdir:url:webdir:counter: -n $0 -- "$@")
if [ $? != 0 ]; then
usage
fi
eval set -- "$TEMP"
URL=
WEBDIR=
QUICK=
LOGDIR=./log
FORCESSH=
USER=root
COUNTER=
LOCAL=
test -e ~/.domany.conf && source ~/.domany.conf
while true ; do
case "$1" in
-U|--user)
USER=$2
shift
;;
-q|--quick)
QUICK=1
shift
;;
-s|--ssh)
FORCESSH=1
shift
;;
-u|--url)
URL=$2
shift 2
;;
-w|--webdir)
WEBDIR=$2
shift 2
;;
-c|--counter)
COUNTER=$2
shift 2
;;
-f|--localfirst)
LOCAL=first
shift
;;
-L|--locallast)
LOCAL=last
shift
;;
--)
shift
break
;;
*)
usage
;;
esac
done
if [ "$FORCESSH" ]; then
URL=
WEBDIR=
fi
if [ "$WEBDIR" -o "$URL" ]; then
if [ ! "$WEBDIR" -o ! "$URL" ]; then
echo "ERROR: You must specify webdir with url" 1>&2
usage
fi
fi
SCRIPTFILE=$1
shift
if [ -z "$SCRIPTFILE" ]; then
usage
fi
scriptBaseName=$(basename $SCRIPTFILE)
workDir=$(mktemp -d /tmp/$scriptBaseName-XXXXXX)
workMachinesFile=$workDir/machines.txt
doneMachinesFile=$workDir/doneMachines.txt
downMachinesFile=$workDir/downMachines.txt
errorMachinesFile=$workDir/errorMachines.txt
fpingMachinesFile=$workDir/fpingMachines.txt
doneLogFile=$workDir/done.txt
touch $doneMachinesFile
touch $downMachinesFile
touch $errorMachinesFile
touch ${workMachinesFile}.sort
cat > $workMachinesFile.edit <<TEMPLATEEOF
# Edit this file to change the hosts that the script will run on.
# Anything after #'s is ignored.
# Script: $SCRIPTFILE
TEMPLATEEOF
if [ "$*" ]; then
cat $* > ${workMachinesFile}.sort
fi
sort -n ${workMachinesFile}.sort | uniq >> $workMachinesFile.edit
sensible-editor $workMachinesFile.edit
cat $workMachinesFile.edit | sed -e 's/\s*\#.*//' | egrep -v '^[[:space:]]*$' > $workMachinesFile
pingMachines
if [ ! -s "$workMachinesFile" ]; then
echo "Nothing to do."
exit 0
fi
if [ "$LOCAL" ]; then
echo "LOCAL: $LOCAL"
if grep -iq $(hostname -f) $workMachinesFile ; then
touch $workMachinesFile.local
if [ "$LOCAL" = "first" ]; then
hostname -f >> $workMachinesFile.local
fi
grep -v $(hostname -f) $workMachinesFile >> $workMachinesFile.local
if [ "$LOCAL" = "last" ]; then
hostname -f >> $workMachinesFile.local
fi
fi
cp $workMachinesFile.local $workMachinesFile
fi
echo
if [ "$COUNTER" ]; then
echo "Locking counter..."
lockfile -1 -r -1 ${COUNTER}.lock
if [ $? -ne 0 ]; then
echo "ERROR: Unable to lock $COUNTER" 1>&2
exit 1
fi
set -e
touch ${COUNTER}
counterVal=$(cat $COUNTER);
counterVal=$(($counterVal + 1))
echo $counterVal > $COUNTER
rm -f ${COUNTER}.lock
LOGDIR=$LOGDIR/${counterVal}_$(date +%F-%T)_${scriptBaseName}
mkdir -p $LOGDIR
set +e
echo "Beginning launch loop, counter=$counterVal."
else
echo "Beginning launch loop."
fi
startTime=$(date +"%F %T %Z")
if [ "$URL" ]; then
tempScriptFileBaseName=$(basename $(mktemp -u -t $scriptBaseName-XXXXXX));
tempScriptFile=/tmp/$tempScriptFileBaseName
tempWebScriptFile=$WEBDIR/$tempScriptFileBaseName
URL=$URL/$tempScriptFileBaseName
rsync $SCRIPTFILE $tempWebScriptFile
fi
cat $workMachinesFile
for machine in $(cat $workMachinesFile); do
user=${machine%@*}
if [[ -z "$user" ]] || [[ "$user" = "$machine" ]]; then
user=$USER
fi
machine=${machine#*@}
if [ "$counterVal" ]; then
scriptLogFile=$LOGDIR/$(date +%F-%T)_${machine}.log
else
scriptLogFile=$LOGDIR/$(date +%F-%T)_${scriptBaseName}_${machine}.log
fi
if fping -q $machine; then
if [ ! "$QUICK" ]; then
read -p "Press enter to do $machine, ! to do all, ^C to stop: " line || exit 1
if [[ "$line" = "!" ]]; then
QUICK=yes
fi
else
echo "Doing $user@$machine."
fi
launchScript=$(mktemp $workDir/launchScript-XXXXXX);
if [ "$URL" ]; then
cat > $launchScript <<LAUNCHSCRIPTEOF
#!/bin/bash
ssh -o "StrictHostKeyChecking no" -t $user@$machine "wget $URL -O $tempScriptFile && chmod +x $tempScriptFile && nice $tempScriptFile; err=\\\$?; rm $tempScriptFile; exit \\\$err"
err=\$?
echo -en "\\033]0;Done $machine\\007\a"
if [ \$err -ne 0 ]; then
echo "$machine # WWW Script returned error code: \$err" >> $errorMachinesFile
read -p "ERROR: Press enter to close"
else
echo $machine >> $doneMachinesFile;
fi
LAUNCHSCRIPTEOF
else
tempScriptFile=$(mktemp -u -t $scriptBaseName-XXXXXX);
cat > $launchScript <<LAUNCHSCRIPTEOF
#!/bin/bash
echo Copying $SCRIPTFILE to $user@$machine:$tempScriptFile
scp $SCRIPTFILE $user@$machine:$tempScriptFile
if [ \$? -ne 0 ]; then
echo "$machine # Unable to copy script" >> $errorMachinesFile
else
echo "Running $tempScriptFile on $machine"
ssh -o "StrictHostKeyChecking no" -t $user@$machine "chmod +x $tempScriptFile; nice $tempScriptFile; err=\\\$?; rm $tempScriptFile; exit \\\$err"
err=\$?
echo -en "\\033]0;Done $machine\\007\a"
if [ \$err -ne 0 ]; then
echo "$machine - Script returned error code: \$err" >> $errorMachinesFile
read -p "ERROR: Press enter to close"
else
echo $machine >> $doneMachinesFile;
#read -p "OK: Press enter to close"
fi
fi
LAUNCHSCRIPTEOF
fi
chmod +x $launchScript
#echo "Doing $machine..."
mate-terminal \
--sm-disable \
--disable-factory \
--title "Doing $machine" \
--execute script -f $scriptLogFile -c $launchScript &
sleep 1
else
echo "Skipping machine $machine since it isn't answering pings."
echo $machine >> $downMachinesFile
fi
done
echo
echo "Done launching commands, waiting for completion..."
sleep 1
wait
endTime=$(date +"%F %T %Z")
if [ "$counterVal" ]; then
echo "Counter: $counterVal" >> $doneLogFile
fi
echo "Script: $scriptBaseName" >> $doneLogFile
echo "Start: $startTime" >> $doneLogFile
echo "End: $endTime" >> $doneLogFile
if [ -s "$doneMachinesFile" ]; then
echo >> $doneLogFile
echo "The following machines were done successfully (maybe):" >> $doneLogFile
sort -n < $doneMachinesFile >> $doneLogFile
fi
if [ -s "$errorMachinesFile" ]; then
echo >> $doneLogFile
echo "The following machines had errors:" >> $doneLogFile
sort -n < $errorMachinesFile >> $doneLogFile
fi
if [ -s "$downMachinesFile" ]; then
echo >> $doneLogFile
echo "The following machines were skipped because they didn't answer pings:" >> $doneLogFile
sort -n < $downMachinesFile >> $doneLogFile
fi
echo
cat $doneLogFile
if [ "$counterVal" ]; then
mv $doneLogFile $LOGDIR/done.log
else
mv $doneLogFile $LOGDIR/$(date +%F-%T)_${scriptBaseName}_done.log
fi
if [ "$tempWebScriptFile" ]; then
if echo $tempWebScriptFile | grep -q :; then
tempWebHost=$(echo $tempWebScriptFile | cut -f 1 -d cut)
tempWebFile=$(echo $tempWebScriptFile | cut -f 2- -d cut)
ssh $tempWebHost rm $tempWebFile
else
rm $tempWebScriptFile
fi
fi
rm -rf $workDir
|
export const determinePrepay = creditLimit => creditLimit === 0;
|
struct Inner: Decodable {
let property1: String
let property2: Int
init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
self.property1 = try container.decode(String.self, forKey: .property1)
self.property2 = try container.decode(Int.self, forKey: .property2)
}
private enum CodingKeys: String, CodingKey {
case property1
case property2
}
}
|
#!/bin/bash
# Copyright 2018 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Is meant to be run in the build directory.
readonly SCRIPT_ROOT="$(cd $(dirname ${BASH_SOURCE[0]} ) && pwd)"
readonly DATA_DIR="$SCRIPT_ROOT/test_data/extract_zircon_constants"
echo Testing extract-zircon-constants.py
exec python "$DATA_DIR/extract-zircon-constants.py" \
--dry-run \
--errors "$DATA_DIR/errors.h" \
--rights "$DATA_DIR/rights.h" \
--types "$DATA_DIR/types.h" \
--dartfmt "$DATA_DIR/dartfmt" \
--dart-constants "$DATA_DIR/constants.dart"
|
package pl.edu.agh.panda5.stages;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.scenes.scene2d.Stage;
import pl.edu.agh.panda5.Panda5;
import pl.edu.agh.panda5.utils.ScoreSerializer;
import java.util.List;
import java.util.stream.Collectors;
public class MenuStage extends Stage {
private Panda5 game;
private ScoreSerializer serializer;
public MenuStage(Panda5 game){
this.game = game;
serializer = game.getSerializer();
setUpInputProcessor();
//setUpMenuButtons(textures.get("StartButton"),textures.get("ScoreButton"),textures.get("ExitButton"));
}
public List<String> showTopTen(){
return serializer
.getTopScores(8)
.stream()
.map(s-> s.replace(";"," Score-> "))
.collect(Collectors.toList());
}
private void setUpInputProcessor() {
Gdx.input.setInputProcessor(this);
}
}
|
<reponame>Arwaabdelrahem/RestaurantApi-nestjs<gh_stars>0
import { Injectable, UnauthorizedException } from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
import { InjectModel } from '@nestjs/mongoose';
import { PassportStrategy } from '@nestjs/passport';
import { Model } from 'mongoose';
import { ExtractJwt, Strategy } from 'passport-jwt';
import { User } from './auth.model';
import { JwtPayload } from './jwt-payload.interface';
@Injectable()
export class JwtStrategy extends PassportStrategy(Strategy) {
constructor(
@InjectModel('User') private readonly User: Model<User>,
private configService: ConfigService,
) {
super({
secretOrKey: configService.get('JWT_KEY'),
jwtFromRequest: ExtractJwt.fromAuthHeaderAsBearerToken(),
});
}
async validate(payload: JwtPayload) {
const { email } = payload;
const user = await this.User.findOne({ email });
if (!user) throw new UnauthorizedException('Invalid email or password');
return user;
}
}
|
#!/bin/bash
echo "Building momenton-server service docker image"
cd momenton-server
sudo docker build . -t golra03/momentonserver:latest -t golra03/momentonserver:v1.1
echo "pushing momenton-client service docker image to the Docker hub"
sudo docker push golra03/momentonserver:latest
sudo docker push golra03/momentonserver:v1.1
|
#!/bin/sh
#$ -cwd
#$ -l s_gpu=1
#$ -l h_rt=24:00:00
# ==============================================================================
# Copyright (c) 2020, Yamagishi Laboratory, National Institute of Informatics
# Author: Erica Cooper (ecooper@nii.ac.jp)
# All rights reserved.
# ==============================================================================
. /etc/profile.d/modules.sh
module load intel cuda/9.0.176 nccl/2.2.13 cudnn/7.3
export PATH="/home/7/18IA1182/miniconda3/bin:$PATH"
source activate tacotron2
export PYTHONPATH=/home/7/18IA1182/external:/home/7/18IA1182/external/tacotron2:/home/7/18IA1182/external/self_attention_tacotron:/home/7/18IA1182/external/multi_speaker_tacotron:$PYTHONPATH
export TF_FORCE_GPU_ALLOW_GROWTH=true
cd /home/7/18IA1182/external/self_attention_tacotron
python train.py --source-data-root=/gs/hs0/tgh-19IAA/ecooper/data/vctk0.91-preprocessed-phone/source --target-data-root=/gs/hs0/tgh-19IAA/ecooper/data/vctk_hpf_sv56_preprocess_30db/target --checkpoint-dir=/gs/hs0/tgh-19IAA/ecooper/experiments/test-code/checkpoint --selected-list-dir=/home/7/18IA1182/external/multi_speaker_tacotron/selected_lists/vctk_selected_lists_F --hparams=tacotron_model="DualSourceSelfAttentionTacotronModel",encoder="SelfAttentionCBHGEncoder",decoder="DualSourceTransformerDecoder",initial_learning_rate=0.00005,decay_learning_rate=False,cbhg_out_units=512,use_accent_type=False,embedding_dim=512,encoder_prenet_out_units=[512,512],encoder_prenet_drop_rate=0.5,projection1_out_channels=512,projection2_out_channels=512,self_attention_out_units=64,self_attention_encoder_out_units=64,decoder_prenet_out_units=[256,256],decoder_out_units=1024,attention_out_units=128,attention1_out_units=128,attention2_out_units=64,decoder_self_attention_num_hop=2,decoder_self_attention_out_units=1024,outputs_per_step=2,max_iters=500,attention=forward,attention2=additive,cumulative_weights=False,attention_kernel=31,attention_filters=32,use_zoneout_at_encoder=True,decoder_version="v2",num_symbols=256,eval_throttle_secs=600,eval_start_delay_secs=120,num_evaluation_steps=200,keep_checkpoint_max=200,use_l2_regularization=True,l2_regularization_weight=1e-7,use_postnet_v2=True,batch_size=48,dataset="vctk.dataset.DatasetSource",save_checkpoints_steps=1683,target_file_extension="target.tfrecord",use_external_speaker_embedding=True,embedding_file="/home/7/18IA1182/external/multi_speaker_tacotron/speaker_embeddings/vctk-x-vector.txt",speaker_embedding_dim='200',speaker_embedding_projection_out_dim=64,speaker_embedd_to_decoder=True,num_speakers=372,speaker_embedding_offset=5,source='phone',warm_start=True,ckpt_to_initialize_from=/gs/hs0/tgh-19IAA/ecooper/experiments/nancy-phone,vars_to_warm_start=["^((?!multi_speaker|decoder_pre_net_wrapper/pre_net/dense/kernel|^dense/kernel|^dense/bias|dual_source_transformer_decoder/memory_layer/kernel|dual_source_transformer_decoder/memory_layer_1/kernel|dual_source_transformer_decoder/decoder/decoder_rnn_v2/multi_rnn_cell/).)*$"],logfile=/gs/hs0/tgh-19IAA/ecooper/experiments/test-code/train.log --hparam-json-file=/gs/hs0/tgh-19IAA/ecooper/data/vctk_hpf_sv56_preprocess_30db/target/hparams.json
|
#!/bin/bash
watch -n2 rl_list_mcast_sockets.sh
|
#!/bin/bash
# Execute clusterGenes command, suppress error output
clusterGenes 2> /dev/null
# Check the exit status of the command
if [[ "$?" == 255 ]]; then
# Perform specific action if exit status is 255
echo "ClusterGenes command exited with status 255"
# Add your specific action here
fi
|
package Find_Pivot_Index;
public class Solution {
public int pivotIndex(int[] nums) {
if (nums == null || nums.length == 0) {
return -1;
}
/**
* Compute the whole sum then judge if the current sum is half of the whole sum
* 38ms
*/
int sum = 0;
for (int num: nums) {
sum += num;
}
int curSum = 0;
for (int i = 0; i < nums.length; i++) {
if (2 * curSum == sum - nums[i]) {
return i;
}
curSum += nums[i];
}
return -1;
}
public static void main(String[] args) {
Solution s = new Solution();
System.out.println(s.pivotIndex(new int[]{1,7,3,6,5,6})); // 3
System.out.println(s.pivotIndex(new int[]{1,2,3})); // -1
System.out.println(s.pivotIndex(new int[]{-1,-1,-1,-1,-1,-1})); // 2
}
}
|
<gh_stars>0
/*
This file is part of cpp-ethereum.
cpp-ethereum is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
cpp-ethereum is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with cpp-ethereum. If not, see <http://www.gnu.org/licenses/>.
*/
/** @file QContractDefinition.h
* @author Yann <EMAIL>
* @date 2014
*/
#pragma once
#include <QObject>
#include <QQmlListProperty>
#include <libsolidity/AST.h>
#include "QFunctionDefinition.h"
#include "QBasicNodeDefinition.h"
namespace dev
{
namespace mix
{
class QContractDefinition: public QBasicNodeDefinition
{
Q_OBJECT
Q_PROPERTY(QQmlListProperty<dev::mix::QFunctionDefinition> functions READ functions CONSTANT)
Q_PROPERTY(dev::mix::QFunctionDefinition* constructor READ constructor CONSTANT)
Q_PROPERTY(QQmlListProperty<dev::mix::QFunctionDefinition> events READ events CONSTANT)
public:
QContractDefinition(QObject* _parent, solidity::ContractDefinition const* _contract);
/// Get all the functions of the contract.
QQmlListProperty<QFunctionDefinition> functions() const { return QQmlListProperty<QFunctionDefinition>(const_cast<QContractDefinition*>(this), const_cast<QContractDefinition*>(this)->m_functions); }
/// Get the constructor of the contract.
QFunctionDefinition* constructor() const { return m_constructor; }
/// Get all the functions of the contract.
QList<QFunctionDefinition*> const& functionsList() const { return m_functions; }
/// Find function by hash, returns nullptr if not found
QFunctionDefinition const* getFunction(dev::FixedHash<4> _hash) const;
/// Get events
QQmlListProperty<QFunctionDefinition> events() const { return QQmlListProperty<QFunctionDefinition>(const_cast<QContractDefinition*>(this), const_cast<QContractDefinition*>(this)->m_events); }
/// Get events
QList<QFunctionDefinition*> const& eventsList() const { return m_events; }
private:
QList<QFunctionDefinition*> m_functions;
QFunctionDefinition* m_constructor;
QList<QFunctionDefinition*> m_events;
};
}
}
|
import cv2
from pathlib import Path
class FaceDetector:
def __init__(self):
self.faces_detected = 0
def process_images(self, image_files: list, output_file: str) -> int:
"""
Process a list of images, detect faces, and save the detected faces as separate files.
Args:
- image_files: A list of file paths to the input images.
- output_file: The base file name for the output images.
Returns:
- int: The total number of faces detected and saved.
"""
try:
for idx, filename in enumerate(image_files):
image = cv2.imread(filename)
# Perform face detection on the image using a face detection algorithm
# Assume the detected face is stored in the variable 'detected_face'
detected_face = image # Placeholder for the detected face
if detected_face is not None:
resized_image = cv2.resize(detected_face, (100, 100)) # Resize the detected face
cv2.imwrite(str(output_file) + str(idx) + Path(filename).suffix, resized_image) # Save the detected face
self.faces_detected += 1 # Increment the count of detected faces
except Exception as e:
print('Failed to extract from image: {}. Reason: {}'.format(filename, e))
return self.faces_detected
|
<reponame>IBMDecisionOptimization/OPL-jdbc-data-source<filename>src/main/java/com/ibm/opl/customdatasource/JdbcWriter.java
package com.ibm.opl.customdatasource;
import ilog.concert.IloException;
import ilog.concert.IloTuple;
import ilog.opl.IloOplElement;
import ilog.opl.IloOplElementDefinition;
import ilog.opl.IloOplElementDefinitionType.Type;
import ilog.opl.IloOplModel;
import ilog.opl.IloOplModelDefinition;
import ilog.opl.IloOplTupleSchemaDefinition;
import ilog.opl_core.cppimpl.IloTupleSchema;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Map;
import com.ibm.opl.customdatasource.JdbcConfiguration.OutputParameters;
import com.ibm.opl.customdatasource.sql.NamedParametersPreparedStatement;
/**
* The class to write data using JDBC.
*
*/
public class JdbcWriter {
private static long DEFAULT_BATCH_SIZE = 10000;
private static long _num = 0;
private String _name;
private JdbcConfiguration _configuration;
private IloOplModelDefinition _def;
private IloOplModel _model;
private long _batch_size;
private static String _nextName() {
synchronized (JdbcWriter.class) {
_num ++;
return "writer" + _num;
}
}
/**
* Convenience method to write the output of a model to a database.
*
* @param config The database connection configuration.
* @param model The OPL model.
*/
public static void writeOutput(JdbcConfiguration config, IloOplModel model) {
IloOplModelDefinition definition = model.getModelDefinition();
JdbcWriter writer = new JdbcWriter(config, definition, model);
writer.customWrite();
}
public JdbcWriter(String name, JdbcConfiguration configuration, IloOplModelDefinition def, IloOplModel model) {
_name = name;
_configuration = configuration;
_def = def;
_model = model;
_batch_size = DEFAULT_BATCH_SIZE;
}
public JdbcWriter(JdbcConfiguration configuration, IloOplModelDefinition def, IloOplModel model) {
this(JdbcWriter._nextName(), configuration, def, model);
}
public JdbcWriter(JdbcConfiguration configuration, IloOplModel model) {
this(configuration, model.getModelDefinition(), model);
}
public String getName() {
return _name;
}
public void customWrite() {
long startTime = System.currentTimeMillis();
System.out.println("Writing elements to database");
for(JdbcConfiguration.OutputParameters op: _configuration.getOutputParameters()) {
System.out.println("Writing " + op.name);
customWrite(op.name, op);
}
long endTime = System.currentTimeMillis();
System.out.println("Done (" + (endTime - startTime)/1000.0 + " s)");
}
static final String CREATE_QUERY = "CREATE TABLE %(";
String createTableQuery(IloTupleSchema schema, String table) {
String query = CREATE_QUERY.replace("%", table);
IloOplElementDefinition elementDefinition = _def.getElementDefinition(schema.getName());
IloOplTupleSchemaDefinition tupleSchema = elementDefinition.asTupleSchema();
for (int i = 0; i < schema.getSize(); i++) {
String columnName = schema.getColumnName(i);
Type type = tupleSchema.getComponent(i).getElementDefinitionType();
query += columnName;
query += " ";
if (type == Type.INTEGER)
query += "INT";
else if (type == Type.FLOAT)
query += "FLOAT";
else if (type == Type.STRING)
query += "VARCHAR(30)";
if (i < (schema.getSize() - 1))
query += ", ";
}
query += ")";
return query;
}
static final String INSERT_QUERY = "INSERT INTO %(";
String getPlaceholderString(int size) {
StringBuffer b = new StringBuffer();
for (int i=0; i < size-1; i++)
b.append("?,");
b.append("?");
return b.toString();
}
String getInsertQuery(IloTupleSchema schema, String table) {
String query = INSERT_QUERY.replace("%", table);
for (int i = 0; i < schema.getSize(); i++) {
String columnName = schema.getColumnName(i);
query += columnName;
if (i < (schema.getSize() - 1))
query += ", ";
}
query += ") VALUES(" + getPlaceholderString(schema.getSize()) + ")";
return query;
}
/**
* ValuesUpdater update the values in a PreparedStatement with the contents of the specified IloTuple.
*
*/
public static interface ValuesUpdater {
/**
* Update the parameters in a PreparedStatement with the values of the specified tuple.
* @param tuple
* @throws SQLException
*/
void updateValues(IloTuple tuple) throws SQLException;
}
public static class NullValuesUpdater implements ValuesUpdater {
public void updateValues(IloTuple tuple) throws SQLException {
// do nothing
}
}
/**
* A ValuesUpdater updating values by name.
*
*/
public static class NamedValuesUpdater implements ValuesUpdater {
String[] _names = null;
Type[] _types = null;
NamedParametersPreparedStatement _stmt;
NamedValuesUpdater(IloTupleSchema schema, IloOplTupleSchemaDefinition tupleSchemaDef,
NamedParametersPreparedStatement stmt) {
_names = new String[schema.getSize()];
_types = new Type[schema.getSize()];
for (int i=0; i < schema.getSize(); i++) {
_names[i] = tupleSchemaDef.getComponent(i).getName();
_types[i] = tupleSchemaDef.getComponent(i).getElementDefinitionType();
}
_stmt = stmt;
}
public void updateValues(IloTuple tuple) throws SQLException {
final NamedParametersPreparedStatement stmt = _stmt;
for (int i=0; i < _names.length; i++) {
final Type columnType = _types[i];
final String name = _names[i];
if (columnType == Type.INTEGER)
stmt.setInt(name, tuple.getIntValue(i));
else if (columnType == Type.FLOAT)
stmt.setDouble(name, tuple.getNumValue(i));
else if (columnType == Type.STRING)
stmt.setString(name, tuple.getStringValue(i));
}
}
}
/**
* A ValuesUpdater updating values by index.
*
*/
public static class IndexedValuesUpdater implements ValuesUpdater{
Type[] _types = null;
PreparedStatement _stmt;
int _max;
IndexedValuesUpdater(IloTupleSchema schema, IloOplTupleSchemaDefinition tupleSchemaDef,
PreparedStatement stmt) {
_types = new Type[schema.getSize()];
for (int i=0; i < schema.getSize(); i++) {
_types[i] = tupleSchemaDef.getComponent(i).getElementDefinitionType();
}
_stmt = stmt;
try {
_max = stmt.getParameterMetaData().getParameterCount();
} catch (SQLException e) {
_max = 9999999;
}
}
public void updateValues(IloTuple tuple) throws SQLException {
PreparedStatement stmt = _stmt;
for (int i=0; i < _types.length && i < this._max; i++) {
int columnIndex = i + 1;
Type columnType = _types[i];
if (columnType == Type.INTEGER)
stmt.setInt(columnIndex, tuple.getIntValue(i));
else if (columnType == Type.FLOAT)
stmt.setDouble(columnIndex, tuple.getNumValue(i));
else if (columnType == Type.STRING)
stmt.setString(columnIndex, tuple.getStringValue(i));
}
}
}
static final String DROP_QUERY = "DROP TABLE %";
/**
* Writes a model element to database.
*
* @param name The model element name.
* @param table The database table.
*/
void customWrite(String name, OutputParameters op) {
String table = op.outputTable;
IloOplElement elt = _model.hasElement(name) ? _model.getElement(name) : null;
ilog.opl_core.cppimpl.IloTupleSet tupleSet = (elt != null) ? (ilog.opl_core.cppimpl.IloTupleSet) elt.asTupleSet() : null;
IloTupleSchema schema = (tupleSet != null) ? tupleSet.getSchema_cpp() : null;
try (Connection conn = DriverManager.getConnection(_configuration.getUrl(), _configuration.getUser(),
_configuration.getPassword())) {
try (Statement stmt = conn.createStatement()) {
String sql;
// drop existing table if exists
if (op.autodrop) {
DatabaseMetaData dbm = conn.getMetaData();
boolean exists = false;
try (ResultSet rs = dbm.getTables(null, null, table, null)) {
exists = rs.next();
}
if (exists) {
sql = DROP_QUERY.replaceFirst("%", table);
try (Statement drop = conn.createStatement()) {
drop.executeUpdate(sql);
}
}
}
// create table using tuple fields
// first create query
sql = null;
if (op.outputTable != null && op.createStatement == null) {
sql = createTableQuery(schema, table);
} else if (op.createStatement != null) {
sql = op.createStatement;
}
if (sql != null) {
stmt.execute(sql);
}
}
NamedParametersPreparedStatement np_stmt = null;
try {
IloOplTupleSchemaDefinition tupleSchemaDef = null;
if (schema != null) {
IloOplElementDefinition tupleDef = _def.getElementDefinition(schema.getName());
tupleSchemaDef = tupleDef.asTupleSchema();
final Type[] columnType = new Type[schema.getSize()];
for (int i = 0; i < columnType.length; ++i)
columnType[i] = tupleSchemaDef.getComponent(i).getElementDefinitionType();
}
String psql = null;
if (op.outputTable != null && op.insertStatement == null) {
psql = getInsertQuery(schema, table);
} else {
psql = op.insertStatement;
}
np_stmt = new NamedParametersPreparedStatement(conn, psql);
conn.setAutoCommit(false); // begin transaction
// The helper to updater a statement given a tuple
ValuesUpdater updater = null;
if (tupleSet == null) {
updater = new NullValuesUpdater();
} else if (np_stmt.hasNamedParameters()) {
updater = new NamedValuesUpdater(schema, tupleSchemaDef, np_stmt);
} else {
// the named parameters prepared statement did not parse any named parameters
// assume this is then regular prepared statement, and use the statement instead.
updater = new IndexedValuesUpdater(schema, tupleSchemaDef, np_stmt.getStatement());
}
// the insert loop
long icount = 1;
if (tupleSet != null) {
for (java.util.Iterator it1 = tupleSet.iterator(); it1.hasNext();) {
IloTuple tuple = (IloTuple) it1.next();
updater.updateValues(tuple);
if (_batch_size == 0) {
np_stmt.executeUpdate(); // no batch
}
else {
np_stmt.addBatch();
if ((icount % _batch_size) == 0) {
np_stmt.executeBatch();
}
}
icount ++;
}
} else {
np_stmt.executeUpdate();
}
// flush batches if any
if (_batch_size != 0) {
np_stmt.executeBatch();
}
conn.commit();
} catch (SQLException e) {
conn.rollback();
throw e;
} finally {
if (np_stmt != null)
np_stmt.close();
}
} catch (SQLException e) {
e.printStackTrace();
}
}
}
|
my_list = [1, 2, 3, 4, 5]
total = sum(my_list)
print("Sum of all elements in given list:", total)
|
#!/bin/bash
# Copyright (c) Stanford University, The Regents of the University of
# California, and others.
#
# All Rights Reserved.
#
# See Copyright-SimVascular.txt for additional details.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject
# to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
export SV_EXTERNALS_VERSION_NUMBER=2019.06
export EXTERNALS_SV_ARCH_DIR=x64
osid=$(lsb_release -si)
osrel=$(lsb_release -sr)
osver=$(lsb_release -sc)
case "$osid" in
'Ubuntu')
export EXTERNALS_SV_OS_DIR=ubuntu
export EXTERNALS_SV_OS_LONG_NAME_DIR=ubuntu
export EXTERNALS_SV_COMPILER_SHORT_NAME=gnu
case "$osver" in
'bionic')
export SV_EXTERN_LINUX_VERSION=ubuntu_18
export EXTERNALS_SV_FULL_OS_VER_NO=18.04
export EXTERNALS_SV_COMPILER_VER_NO=7.3
export EXTERNALS_BUILD_DATE=2019.06.13
export EXTERNALS_SV_LOWERCASE_CMAKE_BUILD_TYPE=release
;;
'xenial')
export SV_EXTERN_LINUX_VERSION=ubuntu_16
export EXTERNALS_SV_FULL_OS_VER_NO=16.04
export EXTERNALS_SV_COMPILER_VER_NO=5.4
export EXTERNALS_BUILD_DATE=2019.06.13
export EXTERNALS_SV_LOWERCASE_CMAKE_BUILD_TYPE=release
;;
'trusty')
export SV_EXTERN_LINUX_VERSION=ubuntu_14
export EXTERNALS_SV_FULL_OS_VER_NO=14.04
export EXTERNALS_SV_COMPILER_VER_NO=4.8
echo "Error - Ubuntu 14 no longer supported!"
exit
;;
*)
echo "Error!"
exit
;;
esac
;;
'CentOS')
export EXTERNALS_SV_OS_DIR=centos
export EXTERNALS_SV_OS_LONG_NAME_DIR=centos
export EXTERNALS_SV_COMPILER_SHORT_NAME=gnu
case "$osrel" in
7*)
export SV_EXTERN_LINUX_VERSION=centos_7
export EXTERNALS_SV_COMPILER_VER_NO=6.3
export EXTERNALS_SV_FULL_OS_VER_NO=7.6
export EXTERNALS_BUILD_DATE=2019.06.13
export EXTERNALS_SV_LOWERCASE_CMAKE_BUILD_TYPE=release
;;
6*)
export SV_EXTERN_LINUX_VERSION=centos_6
export EXTERNALS_SV_FULL_OS_VER_NO=6.9
export EXTERNALS_SV_COMPILER_VER_NO=5.3
echo "Error - CentOS 6 2018.05 not supported!"
exit
;;
*)
echo "Error!"
exit
;;
esac
;;
*)
echo "Error!"
exit
;;
esac
export EXTERNALS_TOP=`pwd`/ext
export EXTERNALS_PARENT_URL=http://simvascular.stanford.edu/downloads/public/simvascular/externals/$SV_EXTERNALS_VERSION_NUMBER/linux/$EXTERNALS_SV_OS_LONG_NAME_DIR/$EXTERNALS_SV_FULL_OS_VER_NO/$EXTERNALS_SV_COMPILER_SHORT_NAME/$EXTERNALS_SV_COMPILER_VER_NO/$EXTERNALS_SV_ARCH_DIR/$EXTERNALS_SV_LOWERCASE_CMAKE_BUILD_TYPE/$EXTERNALS_BUILD_DATE
export EXTERNALS_TAR_FILE_PREFIX=$EXTERNALS_SV_OS_LONG_NAME_DIR.$EXTERNALS_SV_FULL_OS_VER_NO.$EXTERNALS_SV_COMPILER_SHORT_NAME.$EXTERNALS_SV_COMPILER_VER_NO.$EXTERNALS_SV_ARCH_DIR.$EXTERNALS_SV_LOWERCASE_CMAKE_BUILD_TYPE.$EXTERNALS_BUILD_DATE
# default is windows, so override with gnu compilers and linux
echo "CLUSTER=x64_linux" > cluster_overrides.mk
echo "CXX_COMPILER_VERSION=gcc" >> cluster_overrides.mk
echo "FORTRAN_COMPILER_VERSION=gfortran" >> cluster_overrides.mk
echo "SV_COMPILER=gnu" >> cluster_overrides.mk
echo "SV_COMPILER_VERSION=$EXTERNALS_SV_COMPILER_VER_NO" >> cluster_overrides.mk
echo "SV_EXTERNALS_VERSION_NUMBER=$SV_EXTERNALS_VERSION_NUMBER" >> global_overrides.mk
echo "OPEN_SOFTWARE_BINARIES_TOPLEVEL=$EXTERNALS_TOP/bin/$EXTERNALS_SV_COMPILER_SHORT_NAME/$EXTERNALS_SV_COMPILER_VER_NO/$EXTERNALS_SV_ARCH_DIR/$EXTERNALS_SV_LOWERCASE_CMAKE_BUILD_TYPE" > global_overrides.mk
mkdir -p $EXTERNALS_TOP
chmod -R a+rwx $EXTERNALS_TOP
mkdir -p $EXTERNALS_TOP/tarfiles
mkdir -p $EXTERNALS_TOP/bin/$EXTERNALS_SV_COMPILER_SHORT_NAME/$EXTERNALS_SV_COMPILER_VER_NO/$EXTERNALS_SV_ARCH_DIR/$EXTERNALS_SV_LOWERCASE_CMAKE_BUILD_TYPE
pushd $EXTERNALS_TOP/tarfiles
wget $EXTERNALS_PARENT_URL/$EXTERNALS_TAR_FILE_PREFIX.freetype.2.6.3.tar.gz
wget $EXTERNALS_PARENT_URL/$EXTERNALS_TAR_FILE_PREFIX.gdcm.2.6.3.tar.gz
wget $EXTERNALS_PARENT_URL/$EXTERNALS_TAR_FILE_PREFIX.hdf5.1.10.1.tar.gz
wget $EXTERNALS_PARENT_URL/$EXTERNALS_TAR_FILE_PREFIX.itk.4.13.2.tar.gz
wget $EXTERNALS_PARENT_URL/$EXTERNALS_TAR_FILE_PREFIX.mitk.2018.04.2.tar.gz
wget $EXTERNALS_PARENT_URL/$EXTERNALS_TAR_FILE_PREFIX.mmg.5.3.9.tar.gz
wget $EXTERNALS_PARENT_URL/$EXTERNALS_TAR_FILE_PREFIX.opencascade.7.3.0.tar.gz
wget $EXTERNALS_PARENT_URL/$EXTERNALS_TAR_FILE_PREFIX.python.3.5.5.tar.gz
wget $EXTERNALS_PARENT_URL/$EXTERNALS_TAR_FILE_PREFIX.qt.5.11.3.tar.gz
wget $EXTERNALS_PARENT_URL/$EXTERNALS_TAR_FILE_PREFIX.tcltk.8.6.4.tar.gz
wget $EXTERNALS_PARENT_URL/$EXTERNALS_TAR_FILE_PREFIX.tinyxml2.6.2.0.tar.gz
wget $EXTERNALS_PARENT_URL/$EXTERNALS_TAR_FILE_PREFIX.vtk.8.1.1.tar.gz
popd
pushd $EXTERNALS_TOP/bin/$EXTERNALS_SV_COMPILER_SHORT_NAME/$EXTERNALS_SV_COMPILER_VER_NO/$EXTERNALS_SV_ARCH_DIR/$EXTERNALS_SV_LOWERCASE_CMAKE_BUILD_TYPE
for i in $EXTERNALS_TOP/tarfiles/$EXTERNALS_TAR_FILE_PREFIX.*.tar.gz; do
echo "untar $i"
tar xzf $i
done
popd
|
#!/bin/bash
# Color codes for bash output
BLUE='\e[36m'
GREEN='\e[32m'
RED='\e[31m'
YELLOW='\e[33m'
CLEAR='\e[39m'
# Handle MacOS being incapable of tr, grep, and others
export LC_ALL=C
#----DEFAULTS----#
# Generate a 5-digit random cluster identifier for resource tagging purposes
RANDOM_IDENTIFIER=$(head /dev/urandom | LC_CTYPE=C tr -dc a-z0-9 | head -c 5 ; echo '')
# Ensure USER has a value
if [ -z "$JENKINS_HOME" ]; then
USER=${USER:-"unknown"}
else
USER=${USER:-"jenkins"}
fi
SHORTNAME=$(echo $USER | head -c 8)
# Generate a default resource name
RESOURCE_NAME="$SHORTNAME-$RANDOM_IDENTIFIER"
NAME_SUFFIX="gcp"
# Default to eastus
GCLOUD_CREDS_FILE=${GCLOUD_CREDS_FILE:-"$HOME/.gcp/osServiceAccount.json"}
GCLOUD_REGION=${GCLOUD_REGION:-"us-east1"}
GCLOUD_NODE_COUNT=${GCLOUD_NODE_COUNT:-"3"}
GCLOUD_CLUSTER_CHANNEL=${GCLOUD_CLUSTER_CHANNEL:-"regular"}
GCLOUD_CLUSTER_VERSION=${GCLOUD_CLUSTER_VERSION:-""}
#----VALIDATE ENV VARS----#
# Validate that we have all required env vars and exit with a failure if any are missing
missing=0
if [ -z "$GCLOUD_CREDS_FILE" ]; then
printf "${RED}GCLOUD_CREDS_FILE env var not set. flagging for exit.${CLEAR}\n"
missing=1
fi
if [ -z "$GCLOUD_PROJECT_ID" ]; then
printf "${RED}GCLOUD_PROJECT_ID env var not set. flagging for exit.${CLEAR}\n"
missing=1
fi
if [ "$missing" -ne 0 ]; then
exit $missing
fi
if [ ! -z "$CLUSTER_NAME" ]; then
RESOURCE_NAME="$CLUSTER_NAME-$RANDOM_IDENTIFIER"
printf "${BLUE}Using $RESOURCE_NAME to identify all created resources.${CLEAR}\n"
else
printf "${BLUE}Using $RESOURCE_NAME to identify all created resources.${CLEAR}\n"
fi
#----VERIFY GCLOUD CLI----#
if [ -z "$(which gcloud)" ]; then
printf "${RED}Could not find the gcloud cli, exiting. Try running ./install.sh.${CLEAR}\n"
exit 1
fi
#----LOG IN----#
# Log in and optionally choose a specific subscription
printf "${BLUE}Logging in to the gcloud cli.${CLEAR}\n"
#gcloud auth activate-service-account --key-file ~/.secrets/gc-acm-cicd.json
gcloud auth activate-service-account --key-file $GCLOUD_CREDS_FILE
if [ "$?" -ne 0 ]; then
printf "${RED}gcloud cli login failed, check credentials. Exiting.${CLEAR}\n"
exit 1
fi
printf "${BLUE}Setting the gcloud cli's project id to ${GCLOUD_PROJECT_ID}.${CLEAR}\n"
gcloud config set project ${GCLOUD_PROJECT_ID}
#----CREATE GKE CLUSTER----#
# Check the cluster version before create the clusters
if [ ! -z "${GCLOUD_CLUSTER_VERSION}" ] && [ "${GCLOUD_CLUSTER_VERSION}" != "" ]; then
printf "${BLUE}Verify the customized cluster version ${GCLOUD_CLUSTER_VERSION} in channel ${GCLOUD_CLUSTER_CHANNEL} in region ${GCLOUD_REGION}.${CLEAR}\n"
printf "${YELLOW}"
valid_cluster_version=$(gcloud container get-server-config --flatten="channels" --region=${GCLOUD_REGION} --format="json(channels.validVersions)" --filter="channels.channel=${GCLOUD_CLUSTER_CHANNEL}" | jq -r .[].channels.validVersions[])
if [[ "${valid_cluster_version[@]}" =~ "${GCLOUD_CLUSTER_VERSION}" ]]; then
printf "${BLUE}Success found the GKE cluster version with ${GCLOUD_CLUSTER_VERSION} in channel ${GCLOUD_CLUSTER_CHANNEL} in region ${GCLOUD_REGION}.${CLEAR}\n"
printf "${YELLOW}"
else
printf "${RED}Could not find GKE cluster version with ${GCLOUD_CLUSTER_VERSION} in channel ${GCLOUD_CLUSTER_CHANNEL} in region ${GCLOUD_REGION}.${CLEAR}\n"
exit 1
fi
else
printf "${BLUE}Get the default cluster version from channel ${GCLOUD_CLUSTER_CHANNEL} in region ${GCLOUD_REGION}.${CLEAR}\n"
printf "${YELLOW}"
GCLOUD_CLUSTER_VERSION=$(gcloud container get-server-config --flatten="channels" --region=${GCLOUD_REGION} --format="json(channels.defaultVersion)" --filter="channels.channel=${GCLOUD_CLUSTER_CHANNEL}" | jq .[].channels.defaultVersion -r)
fi
GKE_CLUSTER_NAME="${RESOURCE_NAME}-${NAME_SUFFIX}"
printf "${BLUE}Creating an GKE cluster named ${GKE_CLUSTER_NAME}.${CLEAR}\n"
printf "${YELLOW}"
gcloud container clusters create ${GKE_CLUSTER_NAME} --num-nodes=${GCLOUD_NODE_COUNT} --region="${GCLOUD_REGION}" --release-channel="${GCLOUD_CLUSTER_CHANNEL}" --cluster-version="${GCLOUD_CLUSTER_VERSION}"
if [ "$?" -ne 0 ]; then
printf "${RED}Failed to provision GKE cluster. See error above. Exiting${CLEAR}\n"
exit 1
fi
printf "${GREEN}Successfully provisioned GKE cluster ${GKE_CLUSTER_NAME}.${CLEAR}\n"
#----EXTRACTING KUBECONFIG----#
printf "${BLUE}Getting Kubeconfig for the cluster named ${GKE_CLUSTER_NAME}.${CLEAR}\n"
printf "${YELLOW}"
export KUBECONFIG=$PWD/${GKE_CLUSTER_NAME}.kubeconfig
gcloud container clusters get-credentials ${GKE_CLUSTER_NAME} --region="${GCLOUD_REGION}"
if [ "$?" -ne 0 ]; then
printf "${RED}Failed to get credentials for GKE cluster ${GKE_CLUSTER_NAME}, complaining and continuing${CLEAR}\n"
exit 1
fi
unset KUBECONFIG
#----Make KUBECONFIG that is useable from anywhere ----#
export KUBECONFIG_SAVED=$KUBECONFIG
export KUBECONFIG=$(pwd)/${GKE_CLUSTER_NAME}.kubeconfig
# Check for which base64 command we have available so we can use the right option
echo | base64 -w 0 > /dev/null 2>&1
if [ $? -eq 0 ]; then
# GNU coreutils base64, '-w' supported
BASE64_OPTION=" -w 0"
else
# Openssl base64, no wrapping by default
BASE64_OPTION=" "
fi
echo | kubectl apply -f - &> /dev/null <<EOF
apiVersion: v1
kind: ServiceAccount
metadata:
name: cluster-admin
namespace: kube-system
EOF
echo | kubectl apply -f - &> /dev/null <<EOF
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
name: kube-system-cluster-admin
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: cluster-admin
subjects:
- kind: ServiceAccount
name: cluster-admin
namespace: kube-system
EOF
sleep 1
#TMP - DEBUG - TODO
cp $(pwd)/${GKE_CLUSTER_NAME}.kubeconfig $(pwd)/${GKE_CLUSTER_NAME}.kubeconfig.orig
cat > "$(pwd)/${GKE_CLUSTER_NAME}.kubeconfig.portable" <<EOF
apiVersion: v1
clusters:
- cluster:
server: $(kubectl config view --minify -o jsonpath='{.clusters[0].cluster.server}')
insecure-skip-tls-verify: true
name: $(kubectl config view --minify -o jsonpath='{.clusters[0].name}')
contexts:
- context:
cluster: $(kubectl config view --minify -o jsonpath='{.clusters[0].name}')
namespace: default
user: kube-system-cluster-admin/$(kubectl config view --minify -o jsonpath='{.clusters[0].name}')
name: kube-system-cluster-admin/$(kubectl config view --minify -o jsonpath='{.clusters[0].name}')
current-context: kube-system-cluster-admin/$(kubectl config view --minify -o jsonpath='{.clusters[0].name}')
kind: Config
preferences: {}
users:
- name: kube-system-cluster-admin/$(kubectl config view --minify -o jsonpath='{.clusters[0].name}')
user:
token: $(kubectl get $(kubectl get secret -n kube-system -o name | grep cluster-admin-token | head -n 1) -n kube-system -o jsonpath={.data.token} | base64 -d ${BASE64_OPTION})
EOF
# take portable kubeconfig and replace original kubeconfig
cp $(pwd)/${GKE_CLUSTER_NAME}.kubeconfig.portable $(pwd)/${GKE_CLUSTER_NAME}.kubeconfig
rm $(pwd)/${GKE_CLUSTER_NAME}.kubeconfig.portable
# Set KUBECONFIG to what it used to be
export KUBECONFIG=$KUBECONFIG_SAVED
printf "${GREEN}You can find your kubeconfig file for this cluster in $(pwd)/${GKE_CLUSTER_NAME}.kubeconfig.\n${CLEAR}"
printf "${CLEAR}"
#-----DUMP STATE FILE----#
cat > $(pwd)/${GKE_CLUSTER_NAME}.json <<EOF
{
"CLUSTER_NAME": "${GKE_CLUSTER_NAME}",
"REGION": "${GCLOUD_REGION}",
"PLATFORM": "GCLOUD"
}
EOF
printf "${GREEN}GKE cluster provision successful. Cluster named ${GKE_CLUSTER_NAME} created. \n"
printf "State file saved for cleanup in $(pwd)/${GKE_CLUSTER_NAME}.json${CLEAR}\n"
|
<reponame>i-a-n/eui
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import refractor from 'refractor';
import visit from 'unist-util-visit';
import { Plugin } from 'unified';
export const FENCED_CLASS = 'remark-prismjs--fenced';
const attacher: Plugin = () => {
return (ast) => visit(ast, 'code', visitor);
function visitor(node: any) {
const { data = {}, lang: language } = node;
if (!language) {
return;
}
node.data = data;
data.hChildren = refractor.highlight(node.value, language);
data.hProperties = {
...data.hProperties,
language,
className: [
'prismjs',
...(data.hProperties?.className || []),
`language-${language}`,
FENCED_CLASS,
],
};
}
};
export default attacher;
|
<gh_stars>0
package weixin.guanjia.core.util;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import redis.clients.jedis.Jedis;
import weixin.liuliangbao.util.RedisConnectionPoolFactory;
/**
* Created by GuoLiang on 2016/5/11 18:35.
*/
public class RedisUtil {
public static final Logger LOGGER = LoggerFactory.getLogger(RedisUtil.class);
public static byte[] getRedis(byte[] key) {
Jedis jedis = null;
try {
jedis = RedisConnectionPoolFactory.getResource();
return jedis.get(key);
} catch (Exception ex) {
LOGGER.error(ex.getMessage(), ex);
RedisConnectionPoolFactory.returnBrokenResource(jedis);
jedis = null;
return null;
} finally {
RedisConnectionPoolFactory.returnResource(jedis);
}
}
public static String getRedis(String key) {
Jedis jedis = null;
try {
jedis = RedisConnectionPoolFactory.getResource();
return jedis.get(key);
} catch (Exception ex) {
LOGGER.error(ex.getMessage(), ex);
RedisConnectionPoolFactory.returnBrokenResource(jedis);
jedis = null;
return null;
} finally {
RedisConnectionPoolFactory.returnResource(jedis);
}
}
public static String getRedis(String key, String field, Integer expireTime) {
Jedis jedis = null;
try {
jedis = RedisConnectionPoolFactory.getResource();
if (null != expireTime) {
return jedis.hget(key, field);
} else {
return jedis.hget(key, field);
}
} catch (Exception ex) {
LOGGER.error(ex.getMessage(), ex);
RedisConnectionPoolFactory.returnBrokenResource(jedis);
jedis = null;
return null;
} finally {
RedisConnectionPoolFactory.returnResource(jedis);
}
}
public static void setRedis(String key, String field, String value, Integer expireTime) {
Jedis jedis = null;
try {
jedis = RedisConnectionPoolFactory.getResource();
jedis.hset(key, field, value);
} catch (Exception ex) {
LOGGER.error(ex.getMessage(), ex);
RedisConnectionPoolFactory.returnBrokenResource(jedis);
jedis = null;
} finally {
RedisConnectionPoolFactory.returnResource(jedis);
}
}
public static void setRedis(byte[] key, int expreTime, byte[] value) {
Jedis jedis = null;
try {
jedis = RedisConnectionPoolFactory.getResource();
jedis.setex(key, expreTime, value);
} catch (Exception ex) {
LOGGER.error(ex.getMessage(), ex);
RedisConnectionPoolFactory.returnBrokenResource(jedis);
jedis = null;
} finally {
RedisConnectionPoolFactory.returnResource(jedis);
}
}
public static void setRedis(String key, int expreTime, String value) {
Jedis jedis = null;
try {
jedis = RedisConnectionPoolFactory.getResource();
jedis.setex(key, expreTime, value);
} catch (Exception ex) {
LOGGER.error(ex.getMessage(), ex);
RedisConnectionPoolFactory.returnBrokenResource(jedis);
jedis = null;
} finally {
RedisConnectionPoolFactory.returnResource(jedis);
}
}
public static void setRedis(String key, String value) {
Jedis jedis = null;
try {
jedis = RedisConnectionPoolFactory.getResource();
jedis.set(key, value);
} catch (Exception ex) {
LOGGER.error(ex.getMessage(), ex);
RedisConnectionPoolFactory.returnBrokenResource(jedis);
jedis = null;
} finally {
RedisConnectionPoolFactory.returnResource(jedis);
}
}
public static void del(String key) {
if (StringUtils.isBlank(key)) return;
Jedis jedis = null;
try {
jedis = RedisConnectionPoolFactory.getResource();
jedis.del(key);
} catch (Exception ex) {
LOGGER.error(ex.getMessage(), ex);
RedisConnectionPoolFactory.returnBrokenResource(jedis);
jedis = null;
} finally {
RedisConnectionPoolFactory.returnResource(jedis);
}
}
}
|
#!/bin/bash
#PBS -N bin_label_and_evaluate
#PBS -l nodes=2:ppn=8
#PBS -l walltime=999999:00:00
export PATH=/home1/jialh/tools/miniconda3/bin:$PATH
sample=$1
workdir=$2
method=$3
assembler=$4
configdir=${workdir}/configs
echo "begin to deal with ${sample}"
if [ -s ${configdir}/${sample}_config.yaml ] && [ ! -s ${workdir}/01binlabel/${sample}/final/${sample}.tsv ]
then
echo "${workdir}/${sample}_config.yaml exists."
mkdir -p ${workdir}/01binlabel
cd ${workdir}/01binlabel
/home1/jialh/tools/miniconda3/bin/snakemake \
-s /home3/ZXMGroup/VIROME/G3compare/pipeline/bin_label_and_evaluate/${method}_Snakefile \
--configfile ${configdir}/${sample}_config.yaml \
--restart-times 0 --keep-going --cores 16 --latency-wait 100 --forcerun
cd ${workdir}
elif [ -s ${workdir}/01binlabel/${sample}/final/${sample}.tsv ]
then
echo "${sample}/final/${sample}.tsv exists."
else
echo "${workdir}/${sample}_config.yaml doesn't exist."
fi
#done
|
package com.seatgeek.placesautocomplete.model;
public final class PlaceGeometry {
public final PlaceLocation location;
public PlaceGeometry(final PlaceLocation location) {
this.location = location;
}
}
|
package cn.st.factory;
import cn.st.dao.UserDao;
import cn.st.dao.impl.UserDaoImpl;
/**
* @description:
* @author: st
* @create: 2021-01-30 17:05
**/
public class StaticFactory {
public static UserDao getUserDao(){
return new UserDaoImpl();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.