text stringlengths 1 1.05M |
|---|
#!/bin/sh
if [ -d "CMakeFiles" ];then rm -rf CMakeFiles; fi
if [ -f "Makefile" ];then rm -f Makefile; fi
if [ -f "cmake_install.cmake" ];then rm -f cmake_install.cmake; fi
if [ -f "CMakeCache.txt" ];then rm -f CMakeCache.txt; fi
cmake -DCMAKE_TOOLCHAIN_FILE="../../../../../../../tools/cmake_toolchain_files/armgcc.cmake" -G "Unix Makefiles" -DCMAKE_BUILD_TYPE=ddr_debug .
make -j
if [ -d "CMakeFiles" ];then rm -rf CMakeFiles; fi
if [ -f "Makefile" ];then rm -f Makefile; fi
if [ -f "cmake_install.cmake" ];then rm -f cmake_install.cmake; fi
if [ -f "CMakeCache.txt" ];then rm -f CMakeCache.txt; fi
cmake -DCMAKE_TOOLCHAIN_FILE="../../../../../../../tools/cmake_toolchain_files/armgcc.cmake" -G "Unix Makefiles" -DCMAKE_BUILD_TYPE=ddr_release .
make -j
if [ -d "CMakeFiles" ];then rm -rf CMakeFiles; fi
if [ -f "Makefile" ];then rm -f Makefile; fi
if [ -f "cmake_install.cmake" ];then rm -f cmake_install.cmake; fi
if [ -f "CMakeCache.txt" ];then rm -f CMakeCache.txt; fi
cmake -DCMAKE_TOOLCHAIN_FILE="../../../../../../../tools/cmake_toolchain_files/armgcc.cmake" -G "Unix Makefiles" -DCMAKE_BUILD_TYPE=debug .
make -j
if [ -d "CMakeFiles" ];then rm -rf CMakeFiles; fi
if [ -f "Makefile" ];then rm -f Makefile; fi
if [ -f "cmake_install.cmake" ];then rm -f cmake_install.cmake; fi
if [ -f "CMakeCache.txt" ];then rm -f CMakeCache.txt; fi
cmake -DCMAKE_TOOLCHAIN_FILE="../../../../../../../tools/cmake_toolchain_files/armgcc.cmake" -G "Unix Makefiles" -DCMAKE_BUILD_TYPE=release .
make -j
if [ -d "CMakeFiles" ];then rm -rf CMakeFiles; fi
if [ -f "Makefile" ];then rm -f Makefile; fi
if [ -f "cmake_install.cmake" ];then rm -f cmake_install.cmake; fi
if [ -f "CMakeCache.txt" ];then rm -f CMakeCache.txt; fi
cmake -DCMAKE_TOOLCHAIN_FILE="../../../../../../../tools/cmake_toolchain_files/armgcc.cmake" -G "Unix Makefiles" -DCMAKE_BUILD_TYPE=flash_debug .
make -j
if [ -d "CMakeFiles" ];then rm -rf CMakeFiles; fi
if [ -f "Makefile" ];then rm -f Makefile; fi
if [ -f "cmake_install.cmake" ];then rm -f cmake_install.cmake; fi
if [ -f "CMakeCache.txt" ];then rm -f CMakeCache.txt; fi
cmake -DCMAKE_TOOLCHAIN_FILE="../../../../../../../tools/cmake_toolchain_files/armgcc.cmake" -G "Unix Makefiles" -DCMAKE_BUILD_TYPE=flash_release .
make -j
|
<reponame>muhsatrio/feedback-fisdas<gh_stars>0
$(document).ready(function() {
$(".page1").click(function() {
$(".halaman1").fadeIn();
$(".halaman2").hide();
$(".halaman3").hide();
$(".halaman4").hide();
$(".halaman5").hide();
$(".halaman6").hide();
$(".halaman7").hide();
$(".halaman8").hide();
$(".halaman9").hide();
});
$(".page2").click(function() {
$(".halaman1").hide();
$(".halaman2").fadeIn();
$(".halaman3").hide();
$(".halaman4").hide();
$(".halaman5").hide();
$(".halaman6").hide();
$(".halaman7").hide();
$(".halaman8").hide();
$(".halaman9").hide();
});
$(".page3").click(function() {
$(".halaman1").hide();
$(".halaman2").hide();
$(".halaman3").fadeIn();
$(".halaman4").hide();
$(".halaman5").hide();
$(".halaman6").hide();
$(".halaman7").hide();
$(".halaman8").hide();
$(".halaman9").hide();
});
$(".page4").click(function() {
$(".halaman1").hide();
$(".halaman2").hide();
$(".halaman3").hide();
$(".halaman4").fadeIn();
$(".halaman5").hide();
$(".halaman6").hide();
$(".halaman7").hide();
$(".halaman8").hide();
$(".halaman9").hide();
});
$(".page5").click(function() {
$(".halaman1").hide();
$(".halaman2").hide();
$(".halaman3").hide();
$(".halaman4").hide();
$(".halaman5").fadeIn();
$(".halaman6").hide();
$(".halaman7").hide();
$(".halaman8").hide();
$(".halaman9").hide();
});
$(".page6").click(function() {
$(".halaman1").hide();
$(".halaman2").hide();
$(".halaman3").hide();
$(".halaman4").hide();
$(".halaman5").hide();
$(".halaman6").fadeIn();
$(".halaman7").hide();
$(".halaman8").hide();
$(".halaman9").hide();
});
$(".page7").click(function() {
$(".halaman1").hide();
$(".halaman2").hide();
$(".halaman3").hide();
$(".halaman4").hide();
$(".halaman5").hide();
$(".halaman6").hide();
$(".halaman7").fadeIn();
$(".halaman8").hide();
$(".halaman9").hide();
});
$(".page8").click(function() {
$(".halaman1").hide();
$(".halaman2").hide();
$(".halaman3").hide();
$(".halaman4").hide();
$(".halaman5").hide();
$(".halaman6").hide();
$(".halaman7").hide();
$(".halaman8").fadeIn();
$(".halaman9").hide();
});
$(".page9").click(function() {
$(".halaman1").hide();
$(".halaman2").hide();
$(".halaman3").hide();
$(".halaman4").hide();
$(".halaman5").hide();
$(".halaman6").hide();
$(".halaman7").hide();
$(".halaman8").hide();
$(".halaman9").fadeIn();
});
$(".button-2015-cowo").click(function() {
$(".cowo-2015").show();
$(".cewe-2015").hide();
$(".cowo-2016").hide();
$(".cewe-2016").hide();
});
$(".button-2015-cewe").click(function() {
$(".cowo-2015").hide();
$(".cewe-2015").show();
$(".cowo-2016").hide();
$(".cewe-2016").hide();
});
$(".button-2016-cowo").click(function() {
$(".cowo-2015").hide();
$(".cewe-2015").hide();
$(".cowo-2016").show();
$(".cewe-2016").hide();
});
$(".button-2016-cewe").click(function() {
$(".cowo-2015").hide();
$(".cewe-2015").hide();
$(".cowo-2016").hide();
$(".cewe-2016").show();
});
$(".button-logout").click(function() {
window.location.href = 'logout';
});
}); |
import pytest
def get_cookie_bar_buttons(drv):
'''
Get two buttons from the cookie bar panel
:param drv: a webdriver
:return: a dictionary with the two button webelements
'''
btn_dict = dict()
btns = drv.find_elements_by_xpath('//div[@class="cookie__bar__buttons"]/button')
btn_dict['accept'] = btns[1]
btn_dict['decline'] = btns[0]
return btn_dict
def get_cookie(drv):
'''
read the following cookie value: vue-cookie-accept-decline-cookie-policy-panel
:param drv: a webdriver
:return: the value of the cookie
'''
return drv.get_cookie('vue-cookie-accept-decline-cookie-policy-panel')['value']
def test_cookie_bar_is_visible(init_webdriver):
'''
Check the cookie policy panel is visible
:param init_webdriver: pytest fixture, initialize the webdriver and return the driver
:return: None
'''
firefox = init_webdriver
assert firefox.find_element_by_id('cookie-policy-panel').is_displayed()
@pytest.mark.parametrize('action', ['accept', 'decline'])
@pytest.mark.xfail
def test_hide_cookie_bar(init_webdriver, action):
'''
Click I accept! or I decline! and check the cookie policy panel is visible
I except this test will fail
:param init_webdriver: pytest fixture, initialize the webdriver and return the driver
:param action: "accept", or "decline" to click the button,
:return: None
'''
firefox = init_webdriver
get_cookie_bar_buttons(firefox)[action].click()
assert firefox.find_element_by_id('cookie-policy-panel').is_displayed()
def test_accept_cookie(init_webdriver):
'''
Accept cookie test case. Click the I accept! button on the cookie bar
:param init_webdriver: pytest fixture, initialize the webdriver and return the driver
:return: None
'''
firefox = init_webdriver
get_cookie_bar_buttons(firefox)['accept'].click()
assert get_cookie(firefox) == 'accept'
@pytest.mark.xfail
def test_decline_cookie(init_webdriver):
'''
Decline cookie test case. Click the I decline! button on the cookie bar
It is funny, because I don't want cookies, but the app is store this deceison in a cookie :).
:param init_webdriver: pytest fixture, initialize the webdriver and return the driver
:return: None
'''
firefox = init_webdriver
get_cookie_bar_buttons(firefox)['decline'].click()
assert get_cookie(firefox) == 'decline'
|
# frozen_string_literal: true
RSpec.describe RuboCop::Cop::Style::MultilineMemoization, :config do
shared_examples 'with all enforced styles' do
context 'with a single line memoization' do
it 'allows expression on first line' do
expect_no_offenses('foo ||= bar')
end
it 'allows expression on the following line' do
expect_no_offenses(<<~RUBY)
foo ||=
bar
RUBY
end
end
context 'with a multiline memoization' do
context 'without a `begin` and `end` block' do
it 'allows with another block on the first line' do
expect_no_offenses(<<~RUBY)
foo ||= bar.each do |b|
b.baz
bb.ax
end
RUBY
end
it 'allows with another block on the following line' do
expect_no_offenses(<<~RUBY)
foo ||=
bar.each do |b|
b.baz
b.bax
end
RUBY
end
it 'allows with a conditional on the first line' do
expect_no_offenses(<<~RUBY)
foo ||= if bar
baz
else
bax
end
RUBY
end
it 'allows with a conditional on the following line' do
expect_no_offenses(<<~RUBY)
foo ||=
if bar
baz
else
bax
end
RUBY
end
end
end
end
context 'EnforcedStyle: keyword' do
let(:cop_config) { { 'EnforcedStyle' => 'keyword' } }
include_examples 'with all enforced styles'
context 'with a multiline memoization' do
context 'without a `begin` and `end` block' do
context 'when the expression is wrapped in parentheses' do
it 'registers an offense when expression starts on first line' do
expect_offense(<<~RUBY)
foo ||= (
^^^^^^^^^ Wrap multiline memoization blocks in `begin` and `end`.
bar
baz
)
RUBY
expect_correction(<<~RUBY)
foo ||= begin
bar
baz
end
RUBY
end
it 'registers an offense when expression starts on following line' do
expect_offense(<<~RUBY)
foo ||=
^^^^^^^ Wrap multiline memoization blocks in `begin` and `end`.
(
bar
baz
)
RUBY
expect_correction(<<~RUBY)
foo ||=
begin
bar
baz
end
RUBY
end
it 'registers an offense with multiline expression' do
expect_offense(<<~RUBY)
foo ||= (bar ||
^^^^^^^^^^^^^^^ Wrap multiline memoization blocks in `begin` and `end`.
baz)
RUBY
expect_correction(<<~RUBY)
foo ||= begin
bar ||
baz
end
RUBY
end
end
end
end
end
context 'EnforcedStyle: braces' do
let(:cop_config) { { 'EnforcedStyle' => 'braces' } }
include_examples 'with all enforced styles'
context 'with a multiline memoization' do
context 'without braces' do
context 'when the expression is wrapped in' \
' `begin` and `end` keywords' do
it 'registers an offense for begin...end block on first line' do
expect_offense(<<~RUBY)
foo ||= begin
^^^^^^^^^^^^^ Wrap multiline memoization blocks in `(` and `)`.
bar
baz
end
RUBY
expect_correction(<<~RUBY)
foo ||= (
bar
baz
)
RUBY
end
it 'registers an offense for begin...end block on following line' do
expect_offense(<<~RUBY)
foo ||=
^^^^^^^ Wrap multiline memoization blocks in `(` and `)`.
begin
bar
baz
end
RUBY
expect_correction(<<~RUBY)
foo ||=
(
bar
baz
)
RUBY
end
end
end
end
end
end
|
<reponame>newnoiseworks/gg
package utils
import (
"crypto/md5"
"encoding/hex"
"fmt"
"io/fs"
"io/ioutil"
"log"
"os"
"path"
"path/filepath"
"regexp"
"strings"
"text/template"
"github.com/iancoleman/strcase"
"gopkg.in/yaml.v2"
)
var matchFirstCap = regexp.MustCompile("(.)([A-Z][a-z]+)")
var matchAllCap = regexp.MustCompile("([a-z0-9])([A-Z])")
func getData(environment string, buildPath string) *map[interface{}]interface{} {
fp := make(map[interface{}]interface{})
fp["profile"] = GetProfile(environment).GetProfileAsMap()
resourceDir := buildPath
if strings.HasPrefix(environment, "..") {
paths := strings.Split(environment, "./")
resourceDir = ""
for _, p := range paths {
if p == "." {
resourceDir += "../"
}
}
}
err := filepath.Walk(fmt.Sprintf("%s/resources/", resourceDir), func(tmpl string, info fs.FileInfo, err error) error {
if err != nil {
log.Printf("no resources directory found in %v. in buildpath %v", resourceDir, buildPath)
return nil
}
name := info.Name()
if info.IsDir() == false && strings.HasSuffix(name, ".yml") {
c := make(map[interface{}]interface{})
yamlFile, err := ioutil.ReadFile(tmpl)
if err != nil {
log.Printf("yamlFile Get err: #%v ", err)
}
err = yaml.Unmarshal(yamlFile, &c)
if err != nil {
log.Fatalf("Unmarshal err: %v", err)
}
for k, v := range c {
fp[k] = v
}
}
return nil
})
if err != nil {
log.Fatal(err)
}
return &fp
}
func BuildTemplatesFromPath(path string, environment string, buildPath string, templateExtension string) {
fp := getData(environment, buildPath)
fmt.Println(fmt.Sprintf("build %s config files", path))
fmt.Println(fmt.Sprintf("%s/%s", buildPath, path))
err := filepath.Walk(fmt.Sprintf("%s/%s", buildPath, path), func(tmpl string, info fs.FileInfo, err error) error {
if err != nil {
log.Fatal(err)
}
name := info.Name()
if info.IsDir() == false && strings.HasSuffix(name, "."+templateExtension) {
processTemplate(tmpl, name, fp, templateExtension)
}
return nil
})
if err != nil {
log.Fatal(err)
}
}
func processTemplate(tmpl string, name string, fp *map[interface{}]interface{}, templateExtension string) {
final_path := strings.ReplaceAll(tmpl, "."+templateExtension, "")
fmt.Println(fmt.Sprintf(" >> build %s >> %s", tmpl, final_path))
t, err := template.New(path.Base(tmpl)).Funcs(template.FuncMap{
"md5": func(text string) string {
hash := md5.Sum([]byte(text))
return hex.EncodeToString(hash[:])
},
"upperSnake": func(text string) string {
snake := matchFirstCap.ReplaceAllString(text, "${1}_${2}")
snake = matchAllCap.ReplaceAllString(snake, "${1}_${2}")
return strings.ToUpper(snake)
},
"camel": func(text string) string {
return strcase.ToCamel(text)
},
}).ParseFiles(tmpl)
if err != nil {
log.Fatal(err)
}
f, err := os.Create(final_path)
if err != nil {
log.Fatal("create file: ", err)
}
err = t.Execute(f, fp)
if err != nil {
log.Fatal("execute: ", err)
}
}
|
//Import dipendenze
const autoprefixer = require('gulp-autoprefixer');
const browserSync = require('browser-sync').create();
const concat = require('gulp-concat');
const cleanCSS = require('gulp-clean-css');
const del = require('del');
const gulp = require('gulp');
const rename = require('gulp-rename');
const run = require('gulp-run-command').default;
const sass = require('gulp-sass');
const merge = require('merge2');
const uglify = require('gulp-uglify');
const imagemin = require('gulp-imagemin');
const cache = require('gulp-cache');
const size = require('gulp-size');
const babel = require('gulp-babel');
const yaml = require('gulp-yaml');
const sassVars = require('gulp-sass-vars');
const log = require('fancy-log');
const runSequence = require('gulp4-run-sequence');
const fs = require('fs');
const prompt = require('gulp-prompt');
const webp = require('gulp-webp');
const fileClean = require('gulp-clean');
// Lista delle path necesarie ai tasks
const paths = {
here: './',
_site: {
root: '_site',
assets: {
root: '_site/assets',
css: '_site/assets/css',
js: '_site/assets/js',
fonts: '_site/assets/fonts',
img: '_site/assets/img',
json: '_site/assets/json',
}
},
_posts: {
root: '_posts'
},
_src: {
root: '_src',
sass: {
root: '_src/sass',
all: '_src/sass/**/*',
app: '_src/sass/app',
vendor: '_src/sass/vendor'
},
css: {
root:'_src/css',
all: '_src/css/**/*',
vendor: '_src/css/vendor'
},
js: {
root:'_src/js',
all: '_src/js/**/*',
app: '_src/js/app',
vendor: '_src/js/vendor',
critical: ['_src/js/vendor/jquery.min.js', '_src/js/vendor/popper.min.js', '_src/js/vendor/bootstrap.js'],
optional: ['_src/js/vendor/plugins/*.js', '_src/js/vendor/leap.min.js', '_src/js/app/custom.js'],
other: ['_src/js/vendor/plugins/other/*.js']
}
},
assets: {
root: 'assets',
css: {
root:'assets/css',
all: 'assets/css/**/*'
},
js: {
root:'assets/js',
all: 'assets/js/**/*'
},
img: {
root: 'assets/img',
all: ['assets/img/**/*.png', 'assets/img/**/*.jpg'],
svg: 'assets/img/**/*.svg'
},
fonts: {
root: 'assets/fonts',
all: 'assets/fonts/**/*'
},
html: {
root: ['_layouts', '_includes'],
all: ['_layouts/**/*.html', '_includes/**/*.html']
},
json: {
root: 'assets/json',
all: 'assets/json/**/*'
}
}
};
// Task che cancella la cartella _site
gulp.task('clean:jekyll', function(callback) {
run('jekyll clean')();
callback();
});
gulp.task('build:variables', function(callback) {
return gulp.src('./_config.yml')
.pipe(yaml({ safe: true }))
.pipe(rename({basename: 'site'}))
.pipe(gulp.dest(paths.assets.json.root));
});
//Task che compila i file SASS, li unisce con le gli altri CSS dei vendor (Leaflet, hightlight, ...) e li minimizza nel file paroparo.min.css
gulp.task('build:styles:loader', function () {
var site = JSON.parse(fs.readFileSync(paths.assets.json.root + "/site.json"));
var colors = {};
for (i in site.colors) {
colors[Object.keys(site.colors[i])[0]] = Object.values(site.colors[i])[0];
}
return gulp.src(paths._src.sass.app + "/loader.scss")
.pipe(sassVars(colors))
.pipe(sass({
onError: browserSync.notify
}))
.pipe(cleanCSS())
.pipe(autoprefixer())
.pipe(concat("loader.css"))
.pipe(rename({suffix: '.min'}))
.pipe(browserSync.stream())
.pipe(size({title: "build:styles:loader"}))
.pipe(gulp.dest(paths._site.assets.css))
.pipe(gulp.dest("_includes/loader"));
});
//Task che compila i file SASS, li unisce con le gli altri CSS dei vendor (Leaflet, hightlight, ...) e li minimizza nel file paroparo.min.css
gulp.task('build:styles:paroparo', function () {
var site = JSON.parse(fs.readFileSync(paths.assets.json.root + "/site.json"));
var colors = {};
for (i in site.colors) {
colors[Object.keys(site.colors[i])[0]] = Object.values(site.colors[i])[0];
}
return merge(
gulp.src(paths._src.sass.app + "/paroparo.scss")
.pipe(sassVars(colors))
.pipe(sass({
includePaths: [paths._src.sass.app],
onError: browserSync.notify
})),
gulp.src(paths._src.css.vendor + "/*.css")
)
.pipe(cleanCSS())
.pipe(autoprefixer())
.pipe(concat("paroparo.css"))
.pipe(rename({suffix: '.min'}))
.pipe(browserSync.stream())
.pipe(size({title: "build:styles:paroparo"}))
.pipe(gulp.dest(paths._site.assets.css))
.pipe(gulp.dest(paths.assets.css.root));
});
//Task che compila i file SASS, li unisce con le gli altri CSS dei vendor (Leaflet, hightlight, ...) e li minimizza nel file paroparo.min.css
gulp.task('build:styles:paroparo-dark', function () {
var site = JSON.parse(fs.readFileSync(paths.assets.json.root + "/site.json"));
var colors = {};
for (i in site.colors) {
colors[Object.keys(site.colors[i])[0]] = Object.values(site.colors[i])[0];
}
return gulp.src(paths._src.sass.app + "/paroparo-dark.scss")
.pipe(sassVars(colors))
.pipe(sass(
{onError: browserSync.notify}
))
.pipe(cleanCSS())
.pipe(autoprefixer())
.pipe(concat("paroparo-dark.css"))
.pipe(rename({suffix: '.min'}))
.pipe(browserSync.stream())
.pipe(size({title: "build:styles:paroparo-dark"}))
.pipe(gulp.dest(paths._site.assets.css))
.pipe(gulp.dest(paths.assets.css.root));
});
gulp.task('build:styles', function(callback) {runSequence(['build:variables', 'build:styles:loader', 'build:styles:paroparo', 'build:styles:paroparo-dark'], callback)});
//Task che compila i file JS
gulp.task('build:scripts:paroparo', function() {
return gulp.src(paths._src.js.critical.concat(paths._src.js.optional))
.pipe(babel({
presets: [["@babel/preset-env", { modules: false }]],
compact: false }))
.pipe(concat('paroparo.js'))
.pipe(rename({suffix: '.min'}))
.pipe(cache(uglify()))
.pipe(browserSync.reload({stream: true}))
.pipe(size({title: "build:scripts:paroparo"}))
.pipe(gulp.dest(paths._site.assets.js))
.pipe(gulp.dest(paths.assets.js.root));
});
//Task che compila i file JS che non servono sempre (es. leaflet, highlight)
gulp.task('build:scripts:other', function() {
return gulp.src(paths._src.js.other)
.pipe(babel({
presets: [["@babel/preset-env", { modules: false }]],
compact: false }))
.pipe(cache(uglify()))
.pipe(browserSync.reload({stream: true}))
.pipe(size({title: "build:scripts:other"}))
.pipe(gulp.dest(paths._site.assets.js))
.pipe(gulp.dest(paths.assets.js.root));
});
//Task che compila il per lo switch theme
gulp.task('build:scripts:switch', function() {
return gulp.src(paths._src.js.app + '/switch.js' )
.pipe(babel({
presets: [["@babel/preset-env", { modules: false }]],
compact: false }))
.pipe(concat('switch.js'))
.pipe(rename({suffix: '.min'}))
.pipe(cache(uglify()))
.pipe(browserSync.reload({stream: true}))
.pipe(size({title: "build:scripts:switch"}))
.pipe(gulp.dest(paths._site.assets.js))
.pipe(gulp.dest(paths.assets.js.root));
});
// Task che compila tutti i JS
gulp.task('build:scripts', function(callback) {runSequence(['build:scripts:switch', 'build:scripts:paroparo', 'build:scripts:other'], callback)});
// Task di ottimizzazione delle immagini (sovrascrittura)
gulp.task('build:images', function() {
return gulp.src(paths.assets.img.all)
.pipe(fileClean({force: true}))
.pipe(cache(imagemin({ optimizationLevel:5, progressive: true, interlaced: true })))
.pipe(webp())
.pipe(browserSync.reload({stream: true}))
.pipe(size({title: "build:images"}))
.pipe(gulp.dest(paths._site.assets.img))
.pipe(gulp.dest(paths.assets.img.root));
});
// Task di ottimizzazione delle svg. E' sepratato dal task delle immagini perchè imagemin non ottimizza bene gli svg dei dividers e decorations (sovrascrittura)
gulp.task('build:svg', function() {
return gulp.src(paths.assets.img.svg)
.pipe(browserSync.reload({stream: true}))
.pipe(size({title: "build:svg"}))
.pipe(gulp.dest(paths._site.assets.img))
.pipe(gulp.dest(paths.assets.img.root));
});
// Task completo degli assets
gulp.task('build:assets', function(callback) {runSequence('clean:jekyll', 'build:styles', 'build:scripts', 'build:images', 'build:svg', callback)});
// Task per il build Jekyll. Crea la cartella _site
gulp.task('build:jekyll', function(callback) {
run('jekyll build --config _config.yml --future --trace')();
callback();
});
// Task watch per far ricompilare i file in _site
gulp.task('build:jekyll:watch', gulp.series('build:jekyll', function(callback) {
browserSync.reload({stream: true});
callback();
}));
// Build task completo (assets + jekyll)
gulp.task('build', function(callback) {runSequence('build:assets', 'build:jekyll', callback)});
// Task che fa il build e fa partire browsersync
gulp.task('serve', gulp.series('build', function(callback) {
browserSync.init({
server: {
baseDir: paths._site.root
},
ui: {
port: 3000
},
ghostMode: false, // Toggle to mirror clicks, reloads etc (performance)
logFileChanges: true,
open: true // Toggle to auto-open page when starting
});
//Watch _config.yml
gulp.watch(['_config.yml'], gulp.series('build:jekyll:watch'));
// Watch css files and pipe changes to browserSync
gulp.watch(paths._src.css.all, gulp.series('build:styles'));
// Watch sass files and pipe changes to browserSync
gulp.watch(paths._src.sass.all, gulp.series('build:styles'));
// Watch .js files
gulp.watch(paths._src.js.all, gulp.series('build:scripts'));
// Watch image files and pipe changes to browserSync
gulp.watch(paths.assets.img.all, gulp.series('build:images'));
//Watch html
gulp.watch(paths.assets.html.all, gulp.series('build:jekyll:watch'));
// Watch posts
gulp.watch(paths._posts.root + '**/*.+(md|markdown|MD)', gulp.series('build:jekyll:watch'));
// Watch data files
//gulp.watch('_data/**.*+(yml|yaml|csv|json)', ['build:jekyll:watch']);
callback();
}));
// Task watch per taggare l'immagine docker e fare pubblicarla su github
var tag_deploy, tag_build;
gulp.task('docker:deploy:input', function() {
return gulp.src(paths.here)
.pipe(prompt.prompt({
type: 'input',
name: 'tag',
default: 'latest',
message: 'Di quale tag vuoi fare il deploy?'
}, (res) => {
tag_deploy = res.tag;
}))
});
gulp.task('docker:deploy', gulp.series('docker:deploy:input', function deploy(callback) {
run('docker build --pull --rm -f "Dockerfile" -t paroparo:' + tag_deploy + '"."')();
run('docker tag paroparo docker.pkg.github.com/iltruma/paroparo/paroparo:' + tag_deploy)();
run('docker push docker.pkg.github.com/iltruma/paroparo/paroparo:' + tag_deploy)();
callback();
}));
gulp.task('docker:build:input', function() {
return gulp.src(paths.here)
.pipe(prompt.prompt({
type: 'input',
name: 'tag',
default: 'latest',
message: 'Di quale tag vuoi fare il build?'
}, (res) => {
tag_build = res.tag;
}))
});
gulp.task('docker:build', gulp.series('docker:build:input', function deploy(callback) {
run('docker build --pull --rm -f "Dockerfile" -t paroparo:' + tag_build + '"."')();
callback();
})); |
<reponame>glipecki/angularjs-demo
let routing : any[] = [
{
"stateName": "login",
"urlPrefix": "/login",
"type": "systemjs",
"module": "login"
}
];
export default routing;
|
package utils
import (
"encoding/json"
)
// ToString 对象转文本
func ToString(v interface{}) string {
bs, err := json.Marshal(v)
if nil != err {
return ""
}
return string(bs)
}
|
from DHP.Utils.cursor import cursor
from DHP.context import warn, color
from DHP.Utils.logger import logger
def login():
try:
while True:
id1 = input("Enter Your ID: ")
password = input("Enter Your Password: ")
auth = False
QUERY = """SELECT * FROM UserData WHERE user_id=%s AND user_password=%s"""
VAL = (id1, password)
cursor.execute(QUERY, VAL)
if len(cursor.fetchall()) == 1:
auth = True
if auth is True:
print(color("Successfully Logged in!!"))
logger.info(f"User {id1} has Logged In!")
return id1
if auth is False:
warn("You Credentials Are Wrong!\nTRY AGAIN")
chance = input("Do you want leave login page? [y] [n] : ")
if chance == "y":
return None
else:
print("Invalid option! continuing login process....")
except Exception as e:
logger.exception("login loop error \n", e)
logger.exception(e)
except KeyboardInterrupt:
logger.warning("login - Loop closed due to Keyboard interrupt Error")
|
<gh_stars>1-10
import Component from 'vue-class-component';
import { Watch } from 'vue-property-decorator';
import RoleVO from '../../../../../../../shared/modules/AccessPolicy/vos/RoleVO';
import AnimationController from '../../../../../../../shared/modules/Animation/AnimationController';
import AnimationModuleVO from '../../../../../../../shared/modules/Animation/vos/AnimationModuleVO';
import DataFilterOption from '../../../../../../../shared/modules/DataRender/vos/DataFilterOption';
import VueComponentBase from '../../../../VueComponentBase';
import { ModuleAnimationReportingVuexGetter } from '../../../store/AnimationReportingVuexStore';
@Component({
template: require('./reporting_filtres.pug'),
})
export default class VueAnimationReportingFiltresComponent extends VueComponentBase {
@ModuleAnimationReportingVuexGetter
private get_filter_anim_theme_active_options: DataFilterOption[];
private tmp_filter_module_termine: DataFilterOption = null;
private tmp_filter_module_valide: DataFilterOption = null;
@Watch('tmp_filter_module_termine')
private change_tmp_filter_module_termine() {
this.$store.commit('AnimationReportingVuexStore/set_filter_module_termine_active_option', this.tmp_filter_module_termine);
}
@Watch('tmp_filter_module_valide')
private change_tmp_filter_module_valide() {
this.$store.commit('AnimationReportingVuexStore/set_filter_module_valide_active_option', this.tmp_filter_module_valide);
}
private get_role_name(role: RoleVO): string {
return this.label(role.translatable_name);
}
private condition_by_anim_theme(anim_module: AnimationModuleVO, anim_theme_options: DataFilterOption[]): boolean {
let res: boolean = true;
if (this.get_filter_anim_theme_active_options && this.get_filter_anim_theme_active_options.length > 0) {
return this.get_filter_anim_theme_active_options.find((e) => e.id == anim_module.theme_id) != null;
}
return res;
}
private multiselectOptionLabel(filter_item: DataFilterOption): string {
if ((filter_item == null) || (typeof filter_item == 'undefined')) {
return '';
}
return filter_item.label;
}
get filter_options(): DataFilterOption[] {
return [
new DataFilterOption(DataFilterOption.STATE_SELECTABLE, this.t('YES'), AnimationController.OPTION_YES),
new DataFilterOption(DataFilterOption.STATE_SELECTABLE, this.t('NO'), AnimationController.OPTION_NO),
];
}
} |
<reponame>NegriAndrea/pyxsim
from pyxsim import \
LineSourceModel, PhotonList
from pyxsim.tests.utils import \
BetaModelSource
from yt.units.yt_array import YTQuantity, uconcatenate
import numpy as np
import yt.units as u
from yt.utilities.physical_constants import clight
from numpy.random import RandomState
cross_section = 500.0e-22*u.cm**3/u.s
m_chi = (10.0*u.GeV).to_equivalent("g", "mass_energy")
def test_line_emission():
bms = BetaModelSource()
ds = bms.ds
def _dm_emission(field, data):
return cross_section*(data["dark_matter_density"]/m_chi)**2*data["cell_volume"]
ds.add_field(("gas","dm_emission"), function=_dm_emission, units="s**-1")
location = YTQuantity(3.5, "keV")
sigma = YTQuantity(1000., "km/s")
sigma_E = (location*sigma/clight).in_units("keV")
A = YTQuantity(1000., "cm**2")
exp_time = YTQuantity(2.0e5, "s")
redshift = 0.01
sphere = ds.sphere("c", (100.,"kpc"))
line_model = LineSourceModel(location, "dm_emission",
sigma="dark_matter_dispersion", prng=32)
photons = PhotonList.from_data_source(sphere, redshift, A, exp_time,
line_model)
D_A = photons.parameters["fid_d_a"]
dist_fac = 1.0/(4.*np.pi*D_A*D_A*(1.+redshift)**3)
dm_E = (sphere["dm_emission"]).sum()
E = uconcatenate(photons["energy"])
n_E = len(E)
n_E_pred = (exp_time*A*dm_E*dist_fac).in_units("dimensionless")
loc = location/(1.+redshift)
sig = sigma_E/(1.+redshift)
assert np.abs(loc-E.mean()) < 1.645*sig/np.sqrt(n_E)
assert np.abs(E.std()**2-sig*sig) < 1.645*np.sqrt(2*(n_E-1))*sig**2/n_E
assert np.abs(n_E-n_E_pred) < 1.645*np.sqrt(n_E)
if __name__ == "__main__":
test_line_emission() |
if [ -f "/home/srbongers/.miniconda/etc/profile.d/conda.sh" ]; then
. "/home/srbongers/.miniconda/etc/profile.d/conda.sh"
CONDA_CHANGEPS1=false conda activate jupyter-base
export DISPLAY=:1
xvfb-run -n 1 -s "-screen 0 1400x900x24" jupyter lab --no-browser --config=/home/srbongers/.jupyter/jupyter_server_config.py
fi
|
class RobotEnvironment:
def add_fixture(self, env):
"""Add L-shaped fixture to place block."""
size = (0.1, 0.1, 0.04)
urdf = 'insertion/fixture.urdf'
env.add_object(urdf, pose=(0, 0, 0), size=size) |
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/Bond/Bond.framework"
install_framework "$BUILT_PRODUCTS_DIR/Diff/Diff.framework"
install_framework "$BUILT_PRODUCTS_DIR/OHHTTPStubs/OHHTTPStubs.framework"
install_framework "$BUILT_PRODUCTS_DIR/ReactiveKit/ReactiveKit.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/Bond/Bond.framework"
install_framework "$BUILT_PRODUCTS_DIR/Diff/Diff.framework"
install_framework "$BUILT_PRODUCTS_DIR/OHHTTPStubs/OHHTTPStubs.framework"
install_framework "$BUILT_PRODUCTS_DIR/ReactiveKit/ReactiveKit.framework"
fi
|
<filename>pecado-dp/pecado-dp-platform/src/main/java/me/batizhao/dp/domain/FormGenerator.java
package me.batizhao.dp.domain;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonTypeName;
import lombok.Data;
import java.util.List;
/**
* Form generator JSON序列化类
* @See <a href="https://mrhj.gitee.io/form-generator/">FormGenerator</a>
*
* @author batizhao
* @date 2021/7/9
*/
@Data
@JsonTypeName("formData")
@JsonTypeInfo(include = JsonTypeInfo.As.WRAPPER_OBJECT ,use = JsonTypeInfo.Id.NAME)
public class FormGenerator {
private List<Fields> fields;
private String formRef = "elForm";
private String formModel = "formData";
private String size = "medium";
private String labelPosition = "right";
private Integer labelWidth = 100;
private String formRules = "rules";
private Integer gutter = 15;
private boolean disabled = false;
private Integer span = 24;
private boolean formBtns = true;
}
|
#!/bin/bash
#SBATCH --job-name=ppta_ptmcmc_pe_set_3_1_ephem_1
#SBATCH --output=/fred/oz002/bgoncharov/correlated_noise_logs/ppta_ptmcmc_pe_set_3_1_ephem_1_%A_%a.out
#SBATCH --ntasks=4
#SBATCH --time=2-0
#SBATCH --mem-per-cpu=3G
#SBATCH --tmp=4G
#SBATCH --array=0
module load numpy/1.16.3-python-2.7.14
srun echo $TEMPO2
srun echo $TEMPO2_CLOCK_DIR
srun python /home/bgonchar/correlated_noise_pta_2020/run_analysis.py --prfile "/home/bgonchar/correlated_noise_pta_2020/params/ppta_dr2_ptmcmc_pe_set_3_1_ephem_1_20200916.dat" --num $SLURM_ARRAY_TASK_ID
|
<gh_stars>1-10
import TmForm from './src/form';
/* istanbul ignore next */
TmForm.install = function(Vue) {
Vue.component(TmForm.name, TmForm);
};
export default TmForm;
|
#!/usr/bin/env bash
# Copyright (c) Contributors to the aswf-docker Project. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
set -ex
git clone https://github.com/glfw/glfw.git
cd glfw
if [ "$GLFW_VERSION" != "latest" ]; then
git checkout tags/${GLFW_VERSION} -b ${GLFW_VERSION}
fi
mkdir build
cd build
cmake -G "Unix Makefiles" -DCMAKE_INSTALL_PREFIX=${ASWF_INSTALL_PREFIX} ..
make -j$(nproc)
make install
cd ../..
rm -rf glfw
|
SELECT name
FROM employees
WHERE department = 'data_analysts'; |
package com.algaworks.pedidovenda.service;
import java.io.Serializable;
import javax.inject.Inject;
import com.algaworks.pedidovenda.model.Parcela;
import com.algaworks.pedidovenda.repository.ParcelaDAO;
import com.algaworks.pedidovenda.util.jpa.Transactional;
import com.algaworks.pedidovenda.util.jsf.FacesUtil;
public class ParcelaService implements Serializable {
private static final long serialVersionUID = 1L;
@Inject
private ParcelaDAO parcelaDAO;
@Transactional
public Parcela salvar(Parcela parcela) {
if(!parcela.getPaga())
parcela.setPaga(true);
parcela = parcelaDAO.salvar(parcela);
if(parcela != null){
FacesUtil.InfoMessage("a parcela " + parcela.getDescricao() + " foi paga");
}
return parcela;
}
}
|
<reponame>lauslim12/attendance<filename>web/components/Layout/Head.tsx
import NextHead from 'next/head';
import { memo } from 'react';
/**
* Sets the HTML Head component for a single Layout.
*
* @param params - Props.
* @returns Next's document head component.
*/
const Head = ({ title }: { title?: string[] }) => (
<NextHead>
<meta charSet="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<meta name="theme-color" content="#f67e4d" />
<meta name="msapplication-navbutton-color" content="#f67e4d" />
<meta name="apple-mobile-web-app-status-bar-style" content="#f67e4d" />
<title>
{title
? [...title.map((x) => x?.trim()).filter((x) => x), 'Attendance'].join(
' · '
)
: 'Attendance'}
</title>
</NextHead>
);
export default memo(Head);
|
({
fireEvent : function(cmp, event, helper) {
var params = event.getParam("arguments") || {};
var source = cmp.get("v.logId");
var e = $A.getEvt("test:testAppEventPhasesEvent");
e.setParams({
sourceId: source,
actions: []
});
$A.logger.info("fire " + source);
e.fire();
}
}) |
import matplotlib.pyplot as plt
def plot_time_series(tdata, incs):
plt.plot(tdata, incs)
plt.xlabel('Time')
plt.ylabel('Increments')
plt.title('Time Series Analysis')
plt.show() |
#!/bin/bash
# MIT License
# Copyright (c) 2017 Ken Fallon http://kenfallon.com
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# renames the hostname of the Raspberry Pi to a version based on it’s Ethernet MAC address.
device_serial=$( sed 's/://g' /sys/class/net/eth0/address )
sed "s/raspberrypi/${device_serial}/g" -i /etc/hostname /etc/hosts
# generate device key and certificate
cd /etc/aws-iot-fleet-provisioning
pip3 install -r requirements.txt
python3 main.py ${device_serial}
if [[ -f /etc/aws-iot-fleet-provisioning/certs/certificate.pem && -f /etc/aws-iot-fleet-provisioning/certs/private.key && -f /etc/aws-iot-fleet-provisioning/certs/root.ca.pem ]]; then
# store iot certificates
logger ":-------------- Network is up: --------------"
sudo mkdir -p /home/pi/Desktop/iot
sudo mv /etc/aws-iot-fleet-provisioning/certs/* /home/pi/Desktop/iot
touch /home/pi/Desktop/iot/device.json
iot_endpoint=$( sudo sed -n /IOT_ENDPOINT/p /etc/aws-iot-fleet-provisioning/config.ini | cut -d' ' -f 3 )
sudo echo "{
\"certs\": {
\"caPath\": \"certs/root.ca.pem\",
\"certPath\": \"certs/certificate.pem\",
\"keyPath\": \"certs/private.key\"
},
\"state\": {
\"photoWidth\": 640,
\"photoHeight\": 480,
\"samplingRate\": 30,
\"endHour\": \"20:00\",
\"beginHour\": \"07:00\"
},
\"clientId\" : \"${device_serial}\",
\"topicSendControlImage\" : \"takePhoto\",
\"topicGetSignedURL\" : \"s3-signed-url\",
\"host\" : \"${iot_endpoint}\"
}" > "/home/pi/Desktop/iot/device.json"
sudo echo "
# For more options and information see
# http://rpf.io/configtxt
# Some settings may impact device functionality. See link above for details
# uncomment if you get no picture on HDMI for a default "safe" mode
#hdmi_safe=1
# uncomment this if your display has a black border of unused pixels visible
# and your display can output without overscan
#disable_overscan=1
# uncomment the following to adjust overscan. Use positive numbers if console
# goes off screen, and negative if there is too much border
#overscan_left=16
#overscan_right=16
#overscan_top=16
#overscan_bottom=16
# uncomment to force a console size. By default it will be display's size minus
# overscan.
#framebuffer_width=1280
#framebuffer_height=720
# uncomment if hdmi display is not detected and composite is being output
#hdmi_force_hotplug=1
# uncomment to force a specific HDMI mode (this will force VGA)
#hdmi_group=1
#hdmi_mode=1
# uncomment to force a HDMI mode rather than DVI. This can make audio work in
# DMT (computer monitor) modes
#hdmi_drive=2
# uncomment to increase signal to HDMI, if you have interference, blanking, or
# no display
#config_hdmi_boost=4
# uncomment for composite PAL
#sdtv_mode=2
#uncomment to overclock the arm. 700 MHz is the default.
#arm_freq=800
# Uncomment some or all of these to enable the optional hardware interfaces
#dtparam=i2c_arm=on
#dtparam=i2s=on
#dtparam=spi=on
# Uncomment this to enable infrared communication.
#dtoverlay=gpio-ir,gpio_pin=17
#dtoverlay=gpio-ir-tx,gpio_pin=18
# Additional overlays and parameters are documented /boot/overlays/README
# Enable audio (loads snd_bcm2835)
dtparam=audio=on
[pi4]
# Enable DRM VC4 V3D driver on top of the dispmanx display stack
dtoverlay=vc4-fkms-v3d
max_framebuffers=2
[all]
#dtoverlay=vc4-fkms-v3d
start_x=1
gpu_mem=128
" > "/boot/configTemp.txt"
#Clone the repository to run the IOT application
githubLink=$( sudo sed -n /GITHUBLINK/p /etc/wpa_supplicant/wpa_supplicant.conf | cut -d' ' -f 4 | sed 's/"//g')
logger "githubLink : ${githubLink}"
cd /home/pi/Desktop
ATTEMPT=0
while [ $ATTEMPT -le 4 ]; do
ATTEMPT=$(( $ATTEMPT + 1 ))
logger "Waiting for git clone (ATTEMPT: $ATTEMPT)..."
if ! git clone $githubLink rpi
then
logger ":---Clone failed:---"
else
logger ":---Clone success:---"
break
fi
sleep 5
done
#Copy the certificates into the cloned repo
sudo mv /boot/configTemp.txt /boot/config.txt
cd /home/pi/Desktop/iot
sudo cp ./* /home/pi/Desktop/rpi/RaspberrypiNodeJSApplication
cd /home/pi/Desktop/rpi/RaspberrypiNodeJSApplication
sudo mkdir certs
npm install
sudo mv ./certificate.pem ./certs/certificate.pem
sudo mv ./private.key ./certs/private.key
sudo mv ./root.ca.pem ./certs/root.ca.pem
#Install node
logger "Installing node ..."
wget https://nodejs.org/dist/v14.16.0/node-v14.16.0-linux-armv7l.tar.xz
tar -xf node-v14.16.0-linux-armv7l.tar.xz
cd ./node-v14.16.0-linux-armv7l
sudo cp -R * /usr/local/
#Set the timezone
timezone=$( sudo sed -n /TIMEZONE/p /etc/wpa_supplicant/wpa_supplicant.conf | cut -d' ' -f 4 | sed 's/"//g')
logger "Timezone : ${timezone}"
sudo timedatectl set-timezone $timezone
#Create a RAM partition to which we will write captured images to
sudo mkdir -p /mnt/ramdisk
echo "tmpfs /mnt/ramdisk/ tmpfs nodev,nosuid,size=50M 0 0" | sudo tee -a /etc/fstab
#Run the application when RaspberryPi boots
sudo echo "#!/bin/bash
cd /home/pi/Desktop/rpi/RaspberrypiNodeJSApplication
npm start
" > "/etc/script.sh"
sudo echo "#!/bin/bash
logger \"RCLOCAL START\"
sudo chmod a+x /etc/script.sh
sudo bash /etc/script.sh
" > "/etc/rc.local"
sudo chmod +x /etc/rc.local
#Install the dependencies for the IOT application
cd /home/pi/Desktop/rpi/RaspberrypiNodeJSApplication
npm install
fi
# reboot pi
/sbin/shutdown -r 1 "reboot in 1 minute" |
<gh_stars>0
import { Test, TestingModule } from '@nestjs/testing';
import { TechniciansOrdersService } from './technicians-orders.service';
class ApiServiceMock {
getTechnicianOrderByTechnicianId(id: any) {
return [];
}
}
describe.only("TechniciansOrdersService", () => {
let techniciansOrdersService: TechniciansOrdersService;
beforeAll(async () => {
const ApiServiceProvider = {
provide: TechniciansOrdersService,
useClass: ApiServiceMock,
}
const module: TestingModule = await Test.createTestingModule({
providers: [
TechniciansOrdersService, ApiServiceProvider
],
}).compile();
techniciansOrdersService = module.get<TechniciansOrdersService>(TechniciansOrdersService);
})
it('should call getTechnicianOrderByTechnicianId method with expected params', async () => {
const id: number = 1;
const getTechnicianOrderSpy = jest.spyOn(techniciansOrdersService, 'getTechnicianOrderByTechnicianId');
techniciansOrdersService.getTechnicianOrderByTechnicianId(id);
expect(getTechnicianOrderSpy).toHaveBeenCalledWith(id);
});
}) |
<gh_stars>100-1000
package com.netflix.raigad.configuration;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class TestPropertiesConfigSource
{
@Test
public void readFile()
{
PropertiesConfigSource configSource = new PropertiesConfigSource("conf/raigad.properties");
configSource.initialize("asgName", "region");
assertEquals("\"/tmp/data\"", configSource.get("Raigad.path.data"));
assertEquals(9001, configSource.get("Raigad.transport.tcp.port", 0));
// File has 5 lines, but line 6 is "Raigad.http.port9002", so it gets filtered out with empty string check.
assertEquals(4, configSource.size());
}
@Test
public void updateKey()
{
PropertiesConfigSource configSource = new PropertiesConfigSource("conf/raigad.properties");
configSource.initialize("asgName", "region");
// File has 5 lines, but line 2 is "escar.http.port9002", so it gets filtered out with empty string check.
assertEquals(4, configSource.size());
configSource.set("foo", "bar");
assertEquals(5, configSource.size());
assertEquals("bar", configSource.get("foo"));
assertEquals(9001, configSource.get("Raigad.transport.tcp.port", 0));
configSource.set("Raigad.transport.tcp.port", Integer.toString(10));
assertEquals(10, configSource.get("Raigad.transport.tcp.port", 0));
}
}
|
import { ContainerModule } from 'inversify';
import { FCProxyCreator } from './fc-proxy-creator';
import { ProxyCreator } from '@malagu/core/lib/browser';
export default new ContainerModule(bind => {
bind(FCProxyCreator).toSelf().inSingletonScope();
bind(ProxyCreator).toService(FCProxyCreator);
});
|
public class Node {
var value: Int
weak var next: Node?
init(value: Int, next: Node?) {
self.value = value
self.next = next
}
}
public class LinkedList {
private var head: Node?
private var tail: Node?
public init() {}
public var first: Node? {
return head
}
public var last: Node? {
return tail
}
public var isEmpty: Bool {
return head == nil
}
public func append(value: Int) {
let node = Node(value: value, next: nil)
if let tailNode = tail {
tailNode.next = node
} else {
head = node
}
tail = node
}
} |
# class CreateDestinations < ActiveRecord::Migration[5.0]
# def change
# create_table :destinations do |t|
# t.integer :distance
# t.string :distance_text
# t.integer :duration
# t.string :duration_text
# t.integer :origin_id
# t.string :origin_type
# t.string :origin_address
# t.string :address
# t.float :geo_coords, array: true, default: []
#
# t.timestamps
# end
#
# add_index :destinations, [:origin_type, :origin_id]
# end
# end
|
import React from 'react'
import './square.css'
import X from '../../assets/X.png'
import O from '../../assets/O.png'
const Square = (props) => {
const { value, onClick, classCss } = props
return (
<button className={`square ${classCss}`} onClick={ onClick }>
{value ? (value === 'X' ? <img src={X} alt="Player X"/> : <img src={O} alt="Player O"/> ) : ''}
</button>
)
}
export default Square
|
<filename>vumi/persist/tests/test_txredis_manager.py
"""Tests for vumi.persist.txredis_manager."""
import os
from functools import wraps
from twisted.internet import reactor
from twisted.internet.defer import inlineCallbacks, returnValue, Deferred
from twisted.trial.unittest import SkipTest
from vumi.persist.txredis_manager import TxRedisManager
from vumi.tests.helpers import VumiTestCase
def wait(secs):
d = Deferred()
reactor.callLater(secs, d.callback, None)
return d
def skip_fake_redis(func):
@wraps(func)
def wrapper(*args, **kw):
if 'VUMITEST_REDIS_DB' not in os.environ:
# We're using a fake redis, so skip this test.
raise SkipTest(
"This test requires a real Redis server. Set VUMITEST_REDIS_DB"
" to run it.")
return func(*args, **kw)
return wrapper
class TestTxRedisManager(VumiTestCase):
@inlineCallbacks
def get_manager(self):
manager = yield TxRedisManager.from_config({
'FAKE_REDIS': 'yes',
'key_prefix': 'redistest',
})
self.add_cleanup(self.cleanup_manager, manager)
yield manager._purge_all()
returnValue(manager)
@inlineCallbacks
def cleanup_manager(self, manager):
yield manager._purge_all()
yield manager._close()
@inlineCallbacks
def test_key_unkey(self):
manager = yield self.get_manager()
self.assertEqual('redistest:foo', manager._key('foo'))
self.assertEqual('foo', manager._unkey('redistest:foo'))
self.assertEqual('redistest:redistest:foo',
manager._key('redistest:foo'))
self.assertEqual('redistest:foo',
manager._unkey('redistest:redistest:foo'))
@inlineCallbacks
def test_set_get_keys(self):
manager = yield self.get_manager()
self.assertEqual([], (yield manager.keys()))
self.assertEqual(None, (yield manager.get('foo')))
yield manager.set('foo', 'bar')
self.assertEqual(['foo'], (yield manager.keys()))
self.assertEqual('bar', (yield manager.get('foo')))
yield manager.set('foo', 'baz')
self.assertEqual(['foo'], (yield manager.keys()))
self.assertEqual('baz', (yield manager.get('foo')))
@inlineCallbacks
def test_disconnect_twice(self):
manager = yield self.get_manager()
yield manager._close()
yield manager._close()
@inlineCallbacks
def test_scan(self):
manager = yield self.get_manager()
self.assertEqual([], (yield manager.keys()))
for i in range(10):
yield manager.set('key%d' % i, 'value%d' % i)
all_keys = set()
cursor = None
for i in range(20):
# loop enough times to have gone through all the keys in our test
# redis instance but not forever so we can assert on the value of
# cursor if we get stuck.
cursor, keys = yield manager.scan(cursor)
all_keys.update(keys)
if cursor is None:
break
self.assertEqual(cursor, None)
self.assertEqual(all_keys, set(
'key%d' % i for i in range(10)))
@inlineCallbacks
def test_ttl(self):
manager = yield self.get_manager()
missing_ttl = yield manager.ttl("missing_key")
self.assertEqual(missing_ttl, None)
yield manager.set("key-no-ttl", "value")
no_ttl = yield manager.ttl("key-no-ttl")
self.assertEqual(no_ttl, None)
yield manager.setex("key-ttl", 30, "value")
ttl = yield manager.ttl("key-ttl")
self.assertTrue(10 <= ttl <= 30)
@skip_fake_redis
@inlineCallbacks
def test_reconnect_sub_managers(self):
manager = yield self.get_manager()
sub_manager = manager.sub_manager('subredis')
sub_sub_manager = sub_manager.sub_manager('subsubredis')
yield manager.set("foo", "1")
yield sub_manager.set("foo", "2")
yield sub_sub_manager.set("foo", "3")
# Our three managers are all connected properly.
f1 = yield manager.get("foo")
f2 = yield sub_manager.get("foo")
f3 = yield sub_sub_manager.get("foo")
self.assertEqual([f1, f2, f3], ["1", "2", "3"])
# Kill the connection and wait a few moments for the reconnect.
yield manager._client.quit()
yield wait(manager._client.factory.initialDelay + 0.05)
# Our three managers are all reconnected properly.
f1 = yield manager.get("foo")
f2 = yield sub_manager.get("foo")
f3 = yield sub_sub_manager.get("foo")
self.assertEqual([f1, f2, f3], ["1", "2", "3"])
|
import nock from 'nock'
import config from '../config'
import HmppsAuthClient from './hmppsAuthClient'
import TokenStore from './tokenStore'
jest.mock('./tokenStore')
const tokenStore = new TokenStore() as jest.Mocked<TokenStore>
const username = 'Bob'
const token = { access_token: '<PASSWORD>', expires_in: 300 }
describe('hmppsAuthClient', () => {
let fakeHmppsAuthApi: nock.Scope
let hmppsAuthClient: HmppsAuthClient
beforeEach(() => {
fakeHmppsAuthApi = nock(config.apis.hmppsAuth.url)
hmppsAuthClient = new HmppsAuthClient(tokenStore)
})
afterEach(() => {
jest.resetAllMocks()
nock.cleanAll()
})
describe('getUser', () => {
it('should return data from api', async () => {
const response = { data: 'data' }
fakeHmppsAuthApi
.get('/api/user/me')
.matchHeader('authorization', `Bearer ${token.access_token}`)
.reply(200, response)
const output = await hmppsAuthClient.getUser(token.access_token)
expect(output).toEqual(response)
})
})
describe('getUserRoles', () => {
it('should return data from api', async () => {
fakeHmppsAuthApi
.get('/api/user/me/roles')
.matchHeader('authorization', `Bearer ${token.access_token}`)
.reply(200, [{ roleCode: 'role1' }, { roleCode: 'role2' }])
const output = await hmppsAuthClient.getUserRoles(token.access_token)
expect(output).toEqual(['role1', 'role2'])
})
})
describe('getSystemClientToken', () => {
it('should instantiate the redis client', async () => {
tokenStore.getToken.mockResolvedValue(token.access_token)
await hmppsAuthClient.getSystemClientToken(username)
})
it('should return token from redis if one exists', async () => {
tokenStore.getToken.mockResolvedValue(token.access_token)
const output = await hmppsAuthClient.getSystemClientToken(username)
expect(output).toEqual(token.access_token)
})
it('should return token from HMPPS Auth with username', async () => {
tokenStore.getToken.mockResolvedValue(null)
fakeHmppsAuthApi
.post(`/oauth/token`, 'grant_type=client_credentials&username=Bob')
.basicAuth({ user: config.apis.hmppsAuth.systemClientId, pass: config.apis.hmppsAuth.systemClientSecret })
.matchHeader('Content-Type', 'application/x-www-form-urlencoded')
.reply(200, token)
const output = await hmppsAuthClient.getSystemClientToken(username)
expect(output).toEqual(token.access_token)
expect(tokenStore.setToken).toBeCalledWith('Bob', token.access_token, 240)
})
it('should return token from HMPPS Auth without username', async () => {
tokenStore.getToken.mockResolvedValue(null)
fakeHmppsAuthApi
.post(`/oauth/token`, 'grant_type=client_credentials')
.basicAuth({ user: config.apis.hmppsAuth.systemClientId, pass: config.apis.hmppsAuth.systemClientSecret })
.matchHeader('Content-Type', 'application/x-www-form-urlencoded')
.reply(200, token)
const output = await hmppsAuthClient.getSystemClientToken()
expect(output).toEqual(token.access_token)
expect(tokenStore.setToken).toBeCalledWith('%ANONYMOUS%', token.access_token, 240)
})
})
})
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* (C) Copyright IBM Corp. 2020
*/
package com.ibm.stocator.test;
import java.util.Random;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.SparkConf;
import org.apache.spark.sql.SparkSession;
public class Runner {
private static final int minBound = 10000;
private static boolean dataCreate = true;
private static boolean flatListing = false;
private static String csvLocalLargePath = null;
private static String csvLocalPath = null;
private static boolean isTimeOutTest = true;
private static boolean isSwift = false;
public static void main(String[] args) throws Exception {
String propertiesFile = null;
if (args.length > 1) {
csvLocalPath = args[0];
propertiesFile = args[1];
} else {
System.out.println("Parameters required: path to csv file, path to properties file");
System.exit(1);
}
if (args.length > 2) {
dataCreate = Boolean.valueOf(args[2]).booleanValue();
}
if (args.length > 3) {
flatListing = Boolean.valueOf(args[3]).booleanValue();
}
if (args.length > 4) {
isTimeOutTest = Boolean.valueOf(args[4]).booleanValue();
}
if (args.length > 5) {
csvLocalLargePath = args[5];
}
ConfigHandler app = new ConfigHandler(propertiesFile);
NameGenerator nameGenerator = new NameGenerator(app);
SparkSession spark = createSparkSession(app, nameGenerator, null);
long start = System.currentTimeMillis();
executeTestSuite(nameGenerator, spark);
long delta = System.currentTimeMillis() - start;
System.out.println("Total run time: " + delta);
String defaultFS = app.getProperty("object.storage.defaultFS");
if (defaultFS != null) {
spark = createSparkSession(app, nameGenerator, defaultFS);
nameGenerator.setDefaultFS(true);
start = System.currentTimeMillis();
executeTestSuite(nameGenerator, spark);
delta = System.currentTimeMillis() - start;
System.out.println("Total run time: " + delta);
}
}
private static SparkSession createSparkSession(ConfigHandler app,
NameGenerator nameGenerator, String defaultFS) throws Exception{
SparkSession spark = SparkSession
.builder()
.appName("Stocator test suite")
.master("local")
.getOrCreate();
if (defaultFS != null) {
spark.sparkContext().hadoopConfiguration().set("fs.defaultFS", defaultFS);
}
String objectStoreIdentifier = app.getProperty("object.storage.scheme");
if (objectStoreIdentifier.equals("swift2d")) {
isSwift = true;
spark.sparkContext().hadoopConfiguration().set("fs." + nameGenerator.getIdentifier() + ".impl",
app.getProperty("object.storage.impl"));
spark.sparkContext().hadoopConfiguration().set(
"fs." + nameGenerator.getIdentifier() + ".service." + app.getServiceName() + ".auth.url",
app.getProperty("auth.url"));
spark.sparkContext().hadoopConfiguration().set(
"fs." + nameGenerator.getIdentifier() + ".service." + app.getServiceName() + ".public",
app.getProperty("public"));
spark.sparkContext().hadoopConfiguration().set(
"fs." + nameGenerator.getIdentifier() + ".service." + app.getServiceName() + ".tenant",
app.getProperty("tenant"));
spark.sparkContext().hadoopConfiguration().set(
"fs." + nameGenerator.getIdentifier() + ".service." + app.getServiceName() + ".username",
app.getProperty("username"));
spark.sparkContext().hadoopConfiguration().set(
"fs." + nameGenerator.getIdentifier() + ".service." + app.getServiceName() + ".auth.method",
app.getProperty("auth.method"));
spark.sparkContext().hadoopConfiguration().set(
"fs." + nameGenerator.getIdentifier() + ".service." + app.getServiceName() + ".password",
app.getProperty("password"));
flatListing = true;
nameGenerator.generateNewContainer(false);
} else if (objectStoreIdentifier.equals("cos-iam")) {
spark.sparkContext().hadoopConfiguration().set("fs." + nameGenerator.getIdentifier() + ".impl",
app.getProperty("object.storage.impl"));
spark.sparkContext().hadoopConfiguration().set("fs.stocator." + nameGenerator.getIdentifier() + ".scheme",
app.getProperty("object.storage.scheme"));
spark.sparkContext().hadoopConfiguration().set("fs.stocator.scheme.list", app.getProperty("object.storage.scheme"));
spark.sparkContext().hadoopConfiguration().set("fs.stocator." + nameGenerator.getIdentifier() + ".impl",
app.getProperty("client.impl"));
spark.sparkContext().hadoopConfiguration().set("fs." + nameGenerator.getIdentifier() + ".service.endpoint",
app.getProperty("endpoint"));
if(app.getProperty("iam.api.key") != null) {
spark.sparkContext().hadoopConfiguration().set("fs." + nameGenerator.getIdentifier() + ".service.iam.api.key",
app.getProperty("iam.api.key"));
} else {
spark.sparkContext().hadoopConfiguration().set("fs." + nameGenerator.getIdentifier() + ".service.iam.token",
app.getProperty("iam.token"));
}
spark.sparkContext().hadoopConfiguration().set("fs." + nameGenerator.getIdentifier() + ".service.iam.service.id",
app.getProperty("iam.service.id"));
spark.sparkContext().hadoopConfiguration().set("fs." + nameGenerator.getIdentifier() + ".service.v2.signer.type",
app.getProperty("v2.signer.type"));
System.out.println("Setting default.client.exec.timeout = " + app.getProperty("default.client.exec.timeout"));
spark.sparkContext().hadoopConfiguration().set("fs." + nameGenerator.getIdentifier() + ".client.execution.timeout",
app.getProperty("default.client.exec.timeout"));
spark.sparkContext().hadoopConfiguration().set("fs." + nameGenerator.getIdentifier() + ".client.request.timeout",
app.getProperty("default.request.timeout"));
if (!flatListing) {
spark.sparkContext().hadoopConfiguration().set("fs." + nameGenerator.getIdentifier() + ".flat.list",
"false");
}
nameGenerator.generateNewContainer();
} else if (objectStoreIdentifier.equals("cos")) {
spark.sparkContext().hadoopConfiguration().set("fs." + nameGenerator.getIdentifier() + ".impl",
app.getProperty("object.storage.impl"));
spark.sparkContext().hadoopConfiguration().set("fs.stocator." + nameGenerator.getIdentifier() + ".scheme",
app.getProperty("object.storage.scheme"));
spark.sparkContext().hadoopConfiguration().set("fs.stocator.scheme.list", app.getProperty("object.storage.scheme"));
spark.sparkContext().hadoopConfiguration().set("fs.stocator." + nameGenerator.getIdentifier() + ".impl",
app.getProperty("client.impl"));
spark.sparkContext().hadoopConfiguration().set("fs." + nameGenerator.getIdentifier() + ".service.endpoint",
app.getProperty("endpoint"));
spark.sparkContext().hadoopConfiguration().set("fs." + nameGenerator.getIdentifier() + ".service.access.key",
app.getProperty("access.key"));
spark.sparkContext().hadoopConfiguration().set("fs." + nameGenerator.getIdentifier() + ".service.secret.key",
app.getProperty("secret.key"));
spark.sparkContext().hadoopConfiguration().set("fs." + nameGenerator.getIdentifier() + ".service.v2.signer.type",
app.getProperty("v2.signer.type"));
System.out.println("Setting default.client.exec.timeout = " + app.getProperty("default.client.exec.timeout"));
spark.sparkContext().hadoopConfiguration().set("fs." + nameGenerator.getIdentifier() + ".client.execution.timeout",
app.getProperty("default.client.exec.timeout"));
spark.sparkContext().hadoopConfiguration().set("fs." + nameGenerator.getIdentifier() + ".client.request.timeout",
app.getProperty("default.request.timeout"));
if (!flatListing) {
spark.sparkContext().hadoopConfiguration().set("fs." + nameGenerator.getIdentifier() + ".flat.list",
"false");
}
nameGenerator.generateNewContainer();
} else if (objectStoreIdentifier.startsWith("s3a")) {
spark.sparkContext().hadoopConfiguration().set("fs." + nameGenerator.getIdentifier() + ".endpoint",
app.getProperty("endpoint"));
spark.sparkContext().hadoopConfiguration().set("fs." + nameGenerator.getIdentifier() + ".access.key",
app.getProperty("access.key"));
spark.sparkContext().hadoopConfiguration().set("fs." + nameGenerator.getIdentifier() + ".secret.key",
app.getProperty("secret.key"));
spark.sparkContext().hadoopConfiguration().set("fs." + nameGenerator.getIdentifier() + ".connection.ssl.enabled",
"false");
System.out.println("Setting default.client.exec.timeout = " + app.getProperty("default.client.exec.timeout"));
spark.sparkContext().hadoopConfiguration().set("fs." + nameGenerator.getIdentifier() + ".client.execution.timeout",
app.getProperty("default.client.exec.timeout"));
spark.sparkContext().hadoopConfiguration().set("fs." + nameGenerator.getIdentifier() + ".client.request.timeout",
app.getProperty("default.request.timeout"));
nameGenerator.generateNewContainer();
} else {
throw new Exception("Unknown object store identifier");
}
return spark;
}
private static void executeTestSuite(NameGenerator nameGenerator,
SparkSession spark) throws Exception{
TestSuite testSuite = new TestSuite(dataCreate, flatListing);
System.out.println("*********************************");
System.out.println("*** Create dataframe from the local CSV file ***");
Dataset<Row> schemaFlights = testSuite.getFlights(spark, csvLocalPath);
nameGenerator.generateObjectNames();
if (dataCreate) {
System.out.println("Data cleanup (start) for " + nameGenerator.getContainerPath() + "*");
System.out.println("*********************************");
testSuite.deleteData(nameGenerator.getContainerPath(), spark.sparkContext().hadoopConfiguration(), false);
System.out.println("*********************************");
}
testSuite.test1(spark, schemaFlights, nameGenerator.getCsvPath2());
testSuite.test2(spark, schemaFlights, nameGenerator.getParquetPath(), Constants.PARQUET_TYPE);
testSuite.test2(spark, schemaFlights, nameGenerator.getJsonPath(), Constants.JSON_TYPE);
testSuite.test3(spark, schemaFlights, nameGenerator.getCsvPath1());
testSuite.test4(spark, nameGenerator.getTxtPath());
testSuite.test8(spark, nameGenerator.getTxtPath(), isTimeOutTest );
if (isSwift) {
nameGenerator.generateNewContainer("list");
System.out.println("Data cleanup for " + nameGenerator.getContainerPath() + "*");
System.out.println("*********************************");
testSuite.deleteData(nameGenerator.getContainerPath(), spark.sparkContext().hadoopConfiguration(), dataCreate);
System.out.println("*********************************");
}
testSuite.test6(spark, schemaFlights, nameGenerator.getContainerPath(), nameGenerator.getCsvName());
if (isSwift) {
nameGenerator.generateNewContainer(false);
System.out.println("Data cleanup for " + nameGenerator.getContainerPath() + "*");
System.out.println("*********************************");
testSuite.deleteData(nameGenerator.getContainerPath(), spark.sparkContext().hadoopConfiguration(), dataCreate);
System.out.println("*********************************");
}
testSuite.test7(spark, schemaFlights, nameGenerator.getContainerPath(), Constants.TEXT_TYPE);
testSuite.test7(spark, schemaFlights, nameGenerator.getContainerPath(), Constants.JSON_TYPE);
testSuite.test7(spark, schemaFlights, nameGenerator.getContainerPath(), Constants.PARQUET_TYPE);
testSuite.test71(spark, schemaFlights, nameGenerator.getContainerPath(), Constants.TEXT_TYPE);
testSuite.test71(spark, schemaFlights, nameGenerator.getContainerPath(), Constants.JSON_TYPE);
testSuite.test71(spark, schemaFlights, nameGenerator.getContainerPath(), Constants.PARQUET_TYPE);
testSuite.test10(spark, nameGenerator.getDataResPath() + "/dfp");
testSuite.test11(spark, schemaFlights, nameGenerator.getContainerPath(), Constants.PARQUET_TYPE);
testSuite.test12(spark, schemaFlights, nameGenerator.getContainerPath(), Constants.PARQUET_TYPE);
testSuite.test9(spark, nameGenerator.getDataResPath());
testSuite.test13(spark, schemaFlights, nameGenerator.getContainerPath(), Constants.CSV_TYPE);
testSuite.test14(spark, schemaFlights, nameGenerator.getContainerPath(), Constants.JSON_TYPE);
testSuite.test14(spark, schemaFlights, nameGenerator.getContainerPath(), Constants.PARQUET_TYPE);
testSuite.test15(spark, schemaFlights, nameGenerator.getContainerPath(), Constants.JSON_TYPE);
testSuite.test15(spark, schemaFlights, nameGenerator.getContainerPath(), Constants.PARQUET_TYPE);
testSuite.test16(spark, schemaFlights, nameGenerator.getContainerPath(), Constants.JSON_TYPE);
testSuite.test16(spark, schemaFlights, nameGenerator.getContainerPath(), Constants.PARQUET_TYPE);
if (csvLocalLargePath != null) {
System.out.println("*********************************");
System.out.println("Large file test!");
Dataset<Row> largeSchemaFlights = testSuite.getFlights(spark, csvLocalLargePath);
if (isSwift) {
nameGenerator.generateNewContainer(true);
}
testSuite.test1(spark, largeSchemaFlights, nameGenerator.getCsvPath2());
testSuite.test2(spark, largeSchemaFlights, nameGenerator.getParquetPath(), Constants.PARQUET_TYPE);
testSuite.test2(spark, largeSchemaFlights, nameGenerator.getJsonPath(), Constants.JSON_TYPE);
System.out.println("***** Repartition to 1");
largeSchemaFlights.repartition(1);
if (isSwift) {
nameGenerator.generateNewContainer(true);
}
testSuite.test2(spark, largeSchemaFlights, nameGenerator.getParquetPath(), Constants.PARQUET_TYPE);
testSuite.test2(spark, largeSchemaFlights, nameGenerator.getJsonPath(), Constants.JSON_TYPE);
} else {
System.out.println("*********************************");
System.out.println("No large file test to be executed");
}
}
}
|
<reponame>jmini/microprofile-open-api
/**
* Copyright (c) 2017 Contributors to the Eclipse Foundation
* Copyright 2017 SmartBear Software
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.eclipse.microprofile.openapi.models;
import java.util.Map;
/**
* Paths
* <p>
* Holds the relative paths to the individual endpoints and their operations. The path is appended to the URL from the Server Object in order to
* construct the full URL. The Paths MAY be empty, due to
* <a href= "https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.0.md#security-filtering">security constraints</a>.
* <p>
*
* @see <a href= "https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.0.md#pathsObject"> OpenAPI Specification Paths Object</a>
*/
public interface Paths extends Constructible, Extensible, Map<String, PathItem> {
/**
* Adds the given path item to this Paths and return this instance of Paths
*
* @param name a path name in the format valid for a Paths object. The field name MUST begin with a slash.
* @param item the path item added to the list of paths
* @return the current Paths instance
*/
Paths addPathItem(String name, PathItem item);
} |
<filename>src/math/geometry/icurve.ts
import {PlaneTransformation} from './planeTransformation'
import {Point} from './point'
import {PN} from './parallelogramNode'
import {Rectangle} from './rectangle'
// The interface for curves
export interface ICurve {
// Returns the point on the curve corresponding to parameter t
value(t: number): Point
// first derivative at t
derivative(t: number): Point
// second derivative
secondDerivative(t: number): Point
// third derivative
thirdDerivative(t: number): Point
// A tree of ParallelogramNodes covering the curve.
// This tree is used in curve intersections routines.
pNodeOverICurve(): PN
// XY bounding box of the curve
boundingBox: Rectangle
// the start of the parameter domain
parStart: number
// the end of the parameter domain
parEnd: number
// Returns the trim curve between start and end, without wrap
trim(start: number, end: number): ICurve
// Returns the trim curve between start and end, with wrap, if supported by the implementing class.
trimWithWrap(start: number, end: number): ICurve
// Moves the curve by the delta.
translate(delta: Point): void
// Returns the curved with all points scaled from the original by x and y
scaleFromOrigin(xScale: number, yScale: number): ICurve
// this[ParStart]
start: Point
// this[ParEnd]
end: Point
// this[Reverse[t]]=this[ParEnd+ParStart-t]
reverse(): ICurve
// Offsets the curve in the direction of dir
offsetCurve(offset: number, dir: Point): ICurve
// return length of the curve segment [start,end]
lengthPartial(start: number, end: number): number
// Get the length of the curve
length: number
getParameterAtLength(length: number): number
// Return the transformed curve
transform(transformation: PlaneTransformation): ICurve
// and t belongs to the closed segment [low,high]
closestParameterWithinBounds(
targetPoint: Point,
low: number,
high: number,
): number
closestParameter(targetPoint: Point): number
// clones the curve.
clone(): ICurve
// The left derivative at t.
leftDerivative(t: number): Point
// the right derivative at t
rightDerivative(t: number): Point
// the signed curvature of the segment at t
curvature(t: number): number
// the derivative of the curvature at t
curvatureDerivative(t: number): number
// the derivative of CurvatureDerivative
curvatureSecondDerivative(t: number): number
}
|
#!/bin/bash
#
# Use lock-unlock.c (build it and name it "lock-unlock") for file locking.
# This simple shell script simulates a single worker. If holding a lock, it
# does some work by printing its PID. You will see that only one process at a
# time is working. Run it like this to see something:
#
# $ ./run.sh xxx & ./run.sh xxx & ./run.sh xxx & ./run.sh xxx &
#
# We need bash for this script to get $RANDOM stuff. Also note that we use a
# busy waiting with "sleep 1". In production environment, that would not be
# the best way to wait.
#
# (c) jp@devnull.cz
#
lock=./lock-unlock
[ $# -ne 1 ] && echo "usage: $0 <lock-file>" && exit 1
trap "rm -f $1; exit" INT TERM
[ -x $lock ] || { echo "program '$lock' does not exist"; exit 1; }
while true; do
# loop until lock acquired (active/busy waiting!)
while true; do
$lock $1 lock
ret=$?
# something bad happened
[ $ret -eq 2 ] && echo "UNEXPECTED FAILURE, exiting..." && \
return 1
# already locked
[ $ret -eq 1 ] && sleep 1
# lock OK
[ $ret -eq 0 ] && break
done
# simulate some work by printing its PID every second
n=$(expr $RANDOM % 3 + 1)
echo "lock acquired by $$, will do some work"
for i in $(yes | head -$n); do
echo " $$"
sleep 1
done
echo "$$ is releasing the lock"
$lock $1 unlock
sleep 1
done
|
def format_string(input_dict, format_string):
return format_string.format(*[input_dict[key] for key in sorted(input_dict.keys())]) |
echo "# H1-OSPF #"
docker exec H1 birdc6 -v show ospf nei ospf_func1_up | grep -v "0001 BIRD 1.6.6 ready"
docker exec H1 birdc6 -v show ospf nei ospf_func1_dn | grep -v "0001 BIRD 1.6.6 ready"
docker exec H1 birdc6 -v show ospf nei ospf_func2_up | grep -v "0001 BIRD 1.6.6 ready"
docker exec H1 birdc6 -v show ospf nei ospf_func2_dn | grep -v "0001 BIRD 1.6.6 ready"
docker exec H1 birdc6 -v show ospf nei ospf_func3_up | grep -v "0001 BIRD 1.6.6 ready"
docker exec H1 birdc6 -v show ospf nei ospf_func3_dn | grep -v "0001 BIRD 1.6.6 ready"
docker exec H1 birdc6 -v show ospf nei ospf_func4_up | grep -v "0001 BIRD 1.6.6 ready"
docker exec H1 birdc6 -v show ospf nei ospf_func4_dn | grep -v "0001 BIRD 1.6.6 ready"
# echo "\n# H1 inet #"
# docker exec H1 ip -6 route list vrf inet
#
# echo "\n# H1 user #"
# docker exec H1 ip -6 route list vrf user
#
# echo "\n# H1 func1_up #"
# docker exec H1 ip -6 route list vrf func1_up
#
# echo "\n# H1 func1_dn #"
# docker exec H1 ip -6 route list vrf func1_dn
#
# echo "\n# F1 route #"
# docker exec F1 ip -6 route list
|
#!/bin/bash
oc wait ${CNF_MCP:-"mcp/worker-cnf"} --for condition=updated --timeout 1s
|
import os
def check_files_consistency(annotation_files_dir: str, beat_files_dir: str, file_extension: str) -> str:
if not beat_files_dir.endswith("/"):
beat_files_dir += "/"
annotation_files_url = [f for f in os.listdir(annotation_files_dir) if f.endswith((file_extension))]
nb_annotation_files = len(annotation_files_url)
beat_files_url = [f for f in os.listdir(beat_files_dir) if f.endswith((file_extension))]
nb_beat_files = len(beat_files_url)
if nb_annotation_files != nb_beat_files or nb_annotation_files == 0:
return "failed! annotation files num:%d beat files num:%d\n" % (nb_annotation_files, nb_beat_files) |
package org.hiro;
import org.hiro.character.Human;
import org.hiro.map.AbstractCoordinate;
import org.hiro.map.Coordinate;
import org.hiro.map.RoomInfoEnum;
import org.hiro.output.Display;
import org.hiro.things.ObjectType;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* 通路
* Draw the connecting passages
*/
public class Passage {
/*
* putpass:
* add a passage character or secret passage here
* 通路や秘密の通路の追加
*/
static void putpass(AbstractCoordinate cp) {
Place pp = Util.getPlace(cp);
pp.p_flags |= Const.F_PASS;
if (Util.rnd(10) + 1 < Human.instance.getLevel() && Util.rnd(40) == 0) {
pp.p_flags &= ~Const.F_REAL;
} else {
pp.p_ch = ObjectType.PASSAGE;
}
}
/*
* do_passages:
* Draw all the passages on a level.
*/
static void do_passages() {
List<RoomGraphDescription> rdes = new ArrayList<>();
{
RoomGraphDescription r = new RoomGraphDescription();
r.conn = new int[]{0, 1, 0, 1, 0, 0, 0, 0, 0};
Arrays.fill(r.isconn, false);
r.ingraph = false;
rdes.add(r);
}
{
RoomGraphDescription r = new RoomGraphDescription();
r.conn = new int[]{1, 0, 1, 0, 1, 0, 0, 0, 0};
Arrays.fill(r.isconn, false);
r.ingraph = false;
rdes.add(r);
}
{
RoomGraphDescription r = new RoomGraphDescription();
r.conn = new int[]{0, 1, 0, 0, 0, 1, 0, 0, 0};
Arrays.fill(r.isconn, false);
r.ingraph = false;
rdes.add(r);
}
{
RoomGraphDescription r = new RoomGraphDescription();
r.conn = new int[]{1, 0, 0, 0, 1, 0, 1, 0, 0};
Arrays.fill(r.isconn, false);
r.ingraph = false;
rdes.add(r);
}
{
RoomGraphDescription r = new RoomGraphDescription();
r.conn = new int[]{0, 1, 0, 1, 0, 1, 0, 1, 0};
Arrays.fill(r.isconn, false);
r.ingraph = false;
rdes.add(r);
}
{
RoomGraphDescription r = new RoomGraphDescription();
r.conn = new int[]{0, 0, 1, 0, 1, 0, 0, 0, 1};
Arrays.fill(r.isconn, false);
r.ingraph = false;
rdes.add(r);
}
{
RoomGraphDescription r = new RoomGraphDescription();
r.conn = new int[]{0, 0, 0, 1, 0, 0, 0, 1, 0};
Arrays.fill(r.isconn, false);
r.ingraph = false;
rdes.add(r);
}
{
RoomGraphDescription r = new RoomGraphDescription();
r.conn = new int[]{0, 0, 0, 0, 1, 0, 1, 0, 1};
Arrays.fill(r.isconn, false);
r.ingraph = false;
rdes.add(r);
}
{
RoomGraphDescription r = new RoomGraphDescription();
r.conn = new int[]{0, 0, 0, 0, 0, 1, 0, 1, 0};
Arrays.fill(r.isconn, false);
r.ingraph = false;
rdes.add(r);
}
/*
* starting with one room, connect it to a random adjacent room and
* then pick a new room to start with.
*/
int r1Number = Util.rnd(Const.MAXROOMS);
RoomGraphDescription r1 = rdes.get(r1Number);
r1.ingraph = true;
RoomGraphDescription r2 = null;
int roomcount = 1;
do {
/*
* find a room to connect with
*/
int r2Number = 0;
int j = 0;
for (int i = 0; i < Const.MAXROOMS; i++) {
if (r1.conn[i] != 0 && !rdes.get(i).ingraph && Util.rnd(++j) == 0) {
r2 = rdes.get(i);
r2Number = i;
}
}
/*
* if no adjacent rooms are outside the graph, pick a new room
* to look from
*/
if (j == 0) {
do {
r1 = rdes.get(Util.rnd(Const.MAXROOMS));
} while (!r1.ingraph);
} else {
/*
* otherwise, connect new room to the graph, and draw a tunnel
* to it
*/
r2.ingraph = true;
conn(r1Number, r2Number);
r1.isconn[r2Number] = true;
r2.isconn[r1Number] = true;
roomcount++;
}
} while (roomcount < Const.MAXROOMS);
/*
* attempt to add passages to the graph a random number of times so
* that there isn't always just one unique passage through it.
*/
for (roomcount = Util.rnd(5); roomcount > 0; roomcount--) {
int from = Util.rnd(Const.MAXROOMS);
r1 = rdes.get(from); /* a random room to look from */
/*
* find an adjacent room not already connected
*/
int j = 0;
int to = 0;
for (int i = 0; i < Const.MAXROOMS; i++) {
if (r1.conn[i] != 0 && !r1.isconn[i] && Util.rnd(++j) == 0) {
to = i;
r2 = rdes.get(i);
}
}
/*
* if there is one, connect it and look for the next added
* passage
*/
if (j != 0) {
conn(from, to);
r1.isconn[to] = true;
r2.isconn[from] = true;
}
}
passnum();
}
/*
* conn:
* Draw a corridor from a room in a certain direction.
*/
static void conn(int r1, int r2) {
int rm;
int direc;
if (r1 < r2) {
rm = r1;
if (r1 + 1 == r2) {
direc = 'r';
} else {
direc = 'd';
}
} else {
rm = r2;
if (r2 + 1 == r1) {
direc = 'r';
} else {
direc = 'd';
}
}
Room rpf = Global.rooms.get(rm);
/*
* Set up the movement variables, in two cases:
* first drawing one down.
*/
int rmt;
int distance = 0;
int turn_distance = 0;
Room rpt = null;
AbstractCoordinate startPosition = new Coordinate();
AbstractCoordinate endPosiiton = new Coordinate();
AbstractCoordinate direction = new Coordinate();
AbstractCoordinate turn_delta = new Coordinate();
if (direc == 'd') {
rmt = rm + 3; /* room # of dest */
rpt = Global.rooms.get(rmt); /* room pointer of dest */
direction.setX(0);
direction.setY(1); /* direction of move */
startPosition = new Coordinate(rpf.r_pos); /* start of move */
endPosiiton = new Coordinate(rpt.r_pos); /* end of move */
if (!rpf.containInfo(RoomInfoEnum.ISGONE)) { /* if not gone pick door pos */
do {
startPosition = rpf.r_pos.add(new Coordinate(Util.rnd(rpf.r_max.getX() - 2) + 1, rpf.r_max.getY() - 1));
} while (rpf.containInfo(RoomInfoEnum.ISMAZE) && (Util.flat(startPosition) & Const.F_PASS) == 0);
}
if (!rpt.containInfo(RoomInfoEnum.ISGONE)) {
do {
endPosiiton.setX(rpt.r_pos.getX() + Util.rnd(rpt.r_max.getX() - 2) + 1);
} while (rpt.containInfo(RoomInfoEnum.ISMAZE) && (Util.flat(endPosiiton) & Const.F_PASS) == 0);
}
distance = Math.abs(startPosition.getY() - endPosiiton.getY()) - 1; /* distance to move */
turn_delta.setX (startPosition.getX() < endPosiiton.getX() ? 1 : -1);
turn_delta.setY( 0); /* direction to turn */
turn_distance = Math.abs(startPosition.getX() - endPosiiton.getX()); /* how far to turn */
} else if (direc == 'r') /* setup for moving right */ {
rmt = rm + 1;
rpt = Global.rooms.get(rmt);
direction.setX(1);
direction.setY( 0);
startPosition = new Coordinate(rpf.r_pos);
endPosiiton = new Coordinate(rpt.r_pos);
if (!rpf.containInfo(RoomInfoEnum.ISGONE)) {
do {
startPosition = rpf.r_pos.add(new Coordinate(rpf.r_max.getX() - 1, Util.rnd(rpf.r_max.getY() - 2) + 1));
} while (rpf.containInfo(RoomInfoEnum.ISMAZE) && (Util.flat(startPosition) & Const.F_PASS) == 0);
}
if (!rpt.containInfo(RoomInfoEnum.ISGONE)) {
do {
endPosiiton.setY(rpt.r_pos.getY() + Util.rnd(rpt.r_max.getY() - 2) + 1);
} while (rpt.containInfo(RoomInfoEnum.ISMAZE) && (Util.flat(endPosiiton) & Const.F_PASS) == 0);
}
distance = Math.abs(startPosition.getX() - endPosiiton.getX()) - 1;
turn_delta.setX( 0);
turn_delta.setY(startPosition.getY() < endPosiiton.getY() ? 1 : -1);
turn_distance = Math.abs(startPosition.getY() - endPosiiton.getY());
}
boolean MASTER = true;
if (MASTER) {
} else {
// debug("error in connection tables");
}
/* where turn starts */
int turn_spot = Util.rnd(distance - 1) + 1;
/*
* Draw in the doors on either side of the passage or just put #'s
* if the rooms are gone.
*/
if (!rpf.containInfo(RoomInfoEnum.ISGONE)) {
door(rpf, startPosition);
} else {
putpass(startPosition);
}
if (!rpt.containInfo(RoomInfoEnum.ISGONE)) {
door(rpt, endPosiiton);
} else {
putpass(endPosiiton);
}
/*
* Get ready to move...
*/
AbstractCoordinate curr = new Coordinate(startPosition);
while (distance > 0) {
/*
* Move to new position
*/
curr = direction.add(curr);
/*
* Check if we are at the turn place, if so do the turn
*/
if (distance == turn_spot)
while (turn_distance-- > 0) {
putpass(curr);
curr = turn_delta.add(curr);
}
/*
* Continue digging along
*/
putpass(curr);
distance--;
}
curr = direction.add(curr);
if (!curr.equals(endPosiiton)) {
// msg("warning, connectivity problem on this level");
}
}
/*
* door:
* Add a door or possibly a secret door. Also enters the door in
* the exits array of the room.
*/
static void door(Room rm, AbstractCoordinate cp) {
Place pp;
rm.r_exit[rm.r_nexits++] = cp;
if (rm.containInfo(RoomInfoEnum.ISMAZE)) {
return;
}
pp = Util.getPlace(cp);
if (Util.rnd(10) + 1 < Human.instance.getLevel() && Util.rnd(5) == 0) {
if (cp.getY() == rm.r_pos.getY() || cp.getY() == rm.r_pos.getY() + rm.r_max.getY() - 1) {
pp.p_ch = ObjectType.Horizon;
} else {
pp.p_ch = ObjectType.Vert;
}
pp.p_flags &= ~Const.F_REAL;
} else {
pp.p_ch = ObjectType.DOOR;
}
}
/*
* passnum:
* Assign a number to each passageway
*/
static int pnum;
static boolean newpnum;
static void passnum() {
pnum = 0;
newpnum = false;
for (Room rp : Global.passages) {
rp.r_nexits = 0;
}
for (Room rp : Global.rooms) {
for (int i = 0; i < rp.r_nexits; i++) {
newpnum = true;
numpass(rp.r_exit[i].getY(), rp.r_exit[i].getX());
}
}
}
/*
* numpass:
* Number a passageway square and its brethren
*/
static void numpass(int y, int x) {
if (x >= Const.NUMCOLS || x < 0 || y >= Const.NUMLINES || y <= 0) {
return;
}
int fp = Util.flat(new Coordinate(x, y));
if ((fp & Const.F_PNUM) != 0) {
return;
}
if (newpnum) {
pnum++;
newpnum = false;
}
/*
* check to see if it is a door or secret door, i.e., a new exit,
* or a numerable type of place
*/
char ch = Util.INDEX(y, x).p_ch.getValue();
if (ch == ObjectType.DOOR.getValue() ||
((fp & Const.F_REAL) == 0 && (ch == ObjectType.Horizon.getValue() || ch == ObjectType.Vert.getValue()))) {
Room rp = Global.passages[pnum];
rp.r_exit[rp.r_nexits].setY(y);
rp.r_exit[rp.r_nexits++].setX(x);
} else if ((fp & Const.F_PASS) == 0) {
return;
}
fp |= pnum; //
/*
* recurse on the surrounding places
*/
numpass(y + 1, x);
numpass(y - 1, x);
numpass(y, x + 1);
numpass(y, x - 1);
}
/*
* add_pass:
* Add the passages to the current window (wizard command)
*/
void add_pass() {
Place pp;
int y, x;
ObjectType ch;
for (y = 1; y < Const.NUMLINES - 1; y++)
for (x = 0; x < Const.NUMCOLS; x++) {
pp = Util.INDEX(y, x);
if ((pp.p_flags & Const.F_PASS) != 0 || pp.p_ch == ObjectType.DOOR ||
((pp.p_flags & Const.F_REAL) == 0 && (pp.p_ch == ObjectType.Vert || pp.p_ch == ObjectType.Horizon))) {
ch = pp.p_ch;
if ((pp.p_flags & Const.F_PASS) != 0) {
ch = ObjectType.PASSAGE;
}
pp.p_flags |= Const.F_SEEN;
Display.move(y, x);
if (pp.p_monst != null) {
pp.p_monst.setFloorTile(pp.p_ch.getValue());
} else if ((pp.p_flags & Const.F_REAL) != 0) {
Display.addch(ch.getValue());
} else {
Display.standout();
Display.addch((pp.p_flags & Const.F_PASS) != 0 ? ObjectType.PASSAGE.getValue() : ObjectType.DOOR.getValue());
Display.standend();
}
}
}
}
}
|
<filename>app/components/Login.js
/* eslint-disable react/button-has-type */
/* eslint-disable class-methods-use-this */
// @flow
import { remote, ipcRenderer } from 'electron';
import fs from 'fs';
import React, { Component } from 'react';
import ReactLoading from 'react-loading';
import { Redirect, Link } from 'react-router-dom';
import log from 'electron-log';
import {
config,
session,
directories,
eventEmitter,
loginCounter
} from '../index';
import navBar from './NavBar';
import routes from '../constants/routes';
// import styles from './Send.css';
type Props = {
syncStatus: number,
unlockedBalance: number,
lockedBalance: number,
transactions: Array<string>,
handleSubmit: () => void,
transactionInProgress: boolean
};
export default class Login extends Component<Props> {
props: Props;
constructor(props?: Props) {
super(props);
this.state = {
importkey: false,
importseed: false,
importCompleted: false,
loginInProgress: false,
userOpenedDifferentWallet: false,
darkMode: session.darkMode || false,
walletFile: session.walletFile,
wrongPassword: <PASSWORD>
};
this.handleImportFromSeed = this.handleImportFromSeed.bind(this);
this.handleImportFromKey = this.handleImportFromKey.bind(this);
this.handleInitialize = this.handleInitialize.bind(this);
this.handleLoginFailure = this.handleLoginFailure.bind(this);
this.handleLoginInProgress = this.handleLoginInProgress.bind(this);
this.refreshLogin = this.refreshLogin.bind(this);
}
componentDidMount() {
this.interval = setInterval(() => this.refresh(), 1000);
ipcRenderer.on('importSeed', this.handleImportFromSeed);
ipcRenderer.on('importKey', this.handleImportFromKey);
eventEmitter.on('initializeNewSession', this.handleInitialize);
eventEmitter.on('loginInProgress', this.handleLoginInProgress);
eventEmitter.on('refreshLogin', this.refreshLogin);
}
componentWillUnmount() {
clearInterval(this.interval);
ipcRenderer.off('importSeed', this.handleImportFromSeed);
ipcRenderer.off('importKey', this.handleImportFromKey);
eventEmitter.off('initializeNewSession', this.handleInitialize);
eventEmitter.off('loginInProgress', this.handleLoginInProgress);
eventEmitter.off('refreshLogin', this.refreshLogin);
}
handleLoginInProgress() {
log.debug('Login in progress...');
this.setState({
loginInProgress: true
});
}
handleLoginFailure() {
this.setState({
loginFailed: true,
loginInProgress: false
});
}
refreshLogin() {
this.setState({
userOpenedDifferentWallet: true
});
}
handleInitialize() {
this.setState({
importCompleted: true
});
}
handleImportFromSeed(evt, route) {
clearInterval(this.interval);
this.setState({
importseed: true
});
}
handleImportFromKey(evt, route) {
clearInterval(this.interval);
this.setState({
importkey: true
});
}
async handleSubmit(event) {
// We're preventing the default refresh of the page that occurs on form submit
event.preventDefault();
loginCounter.userLoginAttempted = true;
eventEmitter.emit('loginInProgress');
const password = event.target[0].value;
if (password === undefined) {
return;
}
eventEmitter.emit('initializeNewSession', password);
}
refresh() {
this.setState(prevState => ({
syncStatus: session.getSyncStatus()
}));
}
render() {
if (this.state.userOpenedDifferentWallet) {
return <Redirect to="/" />;
}
if (this.state.loginFailed === true) {
return <Redirect to="/login" />;
}
if (this.state.importseed === true) {
return <Redirect to="/import" />;
}
if (this.state.importCompleted === true) {
return <Redirect to="/" />;
}
return (
<div>
{this.state.darkMode === false && (
<div className="fullwindow">
{this.state.loginInProgress === false && (
<div className="mid-div">
<div
className={
this.state.wrongPassword
? 'box loginbox-fail has-background-light inner-div'
: 'box loginbox has-background-light inner-div'
}
>
<form onSubmit={this.handleSubmit}>
<div className="field">
<label className="label" htmlFor="scanheight">
Password
<div className="control">
<input
ref={input => input && input.focus()}
className={
this.state.wrongPassword
? 'input is-large is-danger'
: 'input is-large'
}
type="password"
placeholder="Enter your wallet password..."
/>
</div>
</label>
<label className="help" htmlFor="scanheight">
attempting login to {this.state.walletFile}
</label>
</div>
<div className="buttons is-right">
<button
type="submit"
className="button is-success is-large"
>
Login
</button>
</div>
</form>
</div>
</div>
)}
</div>
)}
{this.state.darkMode === true && (
<div className="fullwindow has-background-dark outer-div">
{this.state.loginInProgress === false && (
<div className="mid-div">
<div
className={
this.state.wrongPassword
? 'box loginbox-fail has-background-black inner-div'
: 'box loginbox has-background-black inner-div'
}
>
<form onSubmit={this.handleSubmit}>
<div className="field">
<label
className="label has-text-white"
htmlFor="scanheight"
>
Password
<div className="control">
<input
ref={input => input && input.focus()}
className={
this.state.wrongPassword
? 'input is-large is-danger'
: 'input is-large'
}
type="password"
placeholder="Enter your wallet password..."
/>
</div>
</label>
<label
className="help has-text-white"
htmlFor="scanheight"
>
attempting login to {this.state.walletFile}
</label>
</div>
<div className="buttons is-right">
<button
type="submit"
className="button is-success is-large"
>
Login
</button>
</div>
</form>
</div>
</div>
)}
</div>
)}
</div>
);
}
}
|
<reponame>sthagen/drone-drone<gh_stars>1000+
// Copyright 2019 Drone IO, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package starlark
import (
"encoding/json"
"fmt"
"io"
"go.starlark.net/starlark"
)
type writer interface {
io.Writer
io.ByteWriter
io.StringWriter
}
func write(out writer, v starlark.Value) error {
if marshaler, ok := v.(json.Marshaler); ok {
jsonData, err := marshaler.MarshalJSON()
if err != nil {
return err
}
out.Write(jsonData)
return nil
}
switch v := v.(type) {
case starlark.NoneType:
out.WriteString("null")
case starlark.Bool:
fmt.Fprintf(out, "%t", v)
case starlark.Int:
out.WriteString(v.String())
case starlark.Float:
fmt.Fprintf(out, "%g", v)
case starlark.String:
s := string(v)
if isQuoteSafe(s) {
fmt.Fprintf(out, "%q", s)
} else {
data, _ := json.Marshal(s)
out.Write(data)
}
case starlark.Indexable:
out.WriteByte('[')
for i, n := 0, starlark.Len(v); i < n; i++ {
if i > 0 {
out.WriteString(", ")
}
if err := write(out, v.Index(i)); err != nil {
return err
}
}
out.WriteByte(']')
case *starlark.Dict:
out.WriteByte('{')
for i, itemPair := range v.Items() {
key := itemPair[0]
value := itemPair[1]
if i > 0 {
out.WriteString(", ")
}
if err := write(out, key); err != nil {
return err
}
out.WriteString(": ")
if err := write(out, value); err != nil {
return err
}
}
out.WriteByte('}')
default:
return fmt.Errorf("value %s (type `%s') can't be converted to JSON", v.String(), v.Type())
}
return nil
}
func isQuoteSafe(s string) bool {
for _, r := range s {
if r < 0x20 || r >= 0x10000 {
return false
}
}
return true
}
|
<filename>js/sykepengesoknad-gammel-plattform/SykepengerSkjema.js<gh_stars>1-10
import React from 'react';
import PropTypes from 'prop-types';
import Stegindikator, { arbeidstakerUrler } from '../components/soknad-felles/Stegindikator';
import { sykepengesoknad as sykepengesoknadPt, childEllerChildren } from '../propTypes/index';
import SykmeldingUtdragContainer from './SykmeldingUtdragContainer';
import { TilbakelenkeSoknad } from '../sykepengesoknad/felleskomponenter/TilbakelenkeSoknad';
const SykepengerSkjema = ({ children, aktivtSteg, tittel, sykepengesoknad }) => {
return (<div>
<Stegindikator aktivtSteg={aktivtSteg} soknadId={sykepengesoknad.id} />
<TilbakelenkeSoknad aktivtSteg={aktivtSteg} soknadId={sykepengesoknad.id} urler={arbeidstakerUrler} />
<SykmeldingUtdragContainer sykepengesoknad={sykepengesoknad} />
{ tittel && <h2 className="soknad__stegtittel">{tittel}</h2> }
{children}
</div>);
};
SykepengerSkjema.propTypes = {
children: childEllerChildren,
aktivtSteg: PropTypes.string,
tittel: PropTypes.string,
sykepengesoknad: sykepengesoknadPt,
};
export default SykepengerSkjema;
|
/**
* Task 42) Write a method that gets an integer array and returns the same array with an extra element that is the calculated average of the given array.
* For example:
* {1,2,3,4,5,6} -> {1,2,3,4,5,6, 3}
*/
public class Task42 {
public static void main(String[] args) {
int[] a = {1, 2, 3, 4, 5, 6};
int[] b = addAvg(a);
printArray(b);
}
private static void printArray(int[] b) {
for (int i : b) {
System.out.print(i + " ");
}
}
private static int[] addAvg(int[] a) {
int[] b = new int[a.length + 1];
int sum = 0;
for (int i = 0; i < a.length; i++) {
b[i] = a[i];
sum += a[i];
}
b[b.length - 1] = sum / a.length;
return b;
}
}
|
import * as crypto from 'crypto';
const cloneDeep = require('lodash.clonedeep') // eslint-disable-line @typescript-eslint/no-var-requires
import {Group} from './group';
import {InetHengeDataType} from './diagram'
import {Link} from './link';
import {Node} from './node';
export type GroupPosition = { x: number, y: number, width: number, height: number }
export type NodePosition = { x: number, y: number }
export type LinkPosition = { x1: number, y1: number, x2: number, y2: number }
type ExtendedInetHengeDataType = InetHengeDataType & { pop: string }
type CacheDataType = {
sha1: string,
group: GroupPosition[],
node: NodePosition[],
link: LinkPosition[],
}
export class PositionCache {
private cachedSha1: string
public group: GroupPosition[]
public node: NodePosition[]
public link: LinkPosition[]
constructor(public data: InetHengeDataType, public pop?: RegExp, sha1?: string) {
// NOTE: properties below can be undefined
this.cachedSha1 = sha1;
}
static getAll(): CacheDataType[] {
return JSON.parse(localStorage.getItem('positionCache')) || {};
}
static get(): CacheDataType {
return this.getAll()[location.pathname] || {};
}
save(group: d3.Selection<Group>, node: d3.Selection<Node>, link: d3.Selection<Link>): void {
const cache = PositionCache.getAll();
cache[location.pathname] = {
sha1: this.sha1(),
group: this.groupPosition(group),
node: this.nodePosition(node),
link: this.linkPosition(link)
};
localStorage.setItem('positionCache', JSON.stringify(cache));
}
sha1(data?: ExtendedInetHengeDataType, pop?: RegExp): string {
data = <ExtendedInetHengeDataType>cloneDeep(data || this.data);
data.pop = String(pop || this.pop);
if (data.pop === 'undefined') {
data.pop = 'null'; // NOTE: unify undefined with null
}
data.nodes && data.nodes.forEach((i) => {
delete i.icon;
delete i.meta;
});
data.links && data.links.forEach((i) => {
delete i.meta;
});
const sha1 = crypto.createHash('sha1');
sha1.update(JSON.stringify(data));
return sha1.digest('hex');
}
groupPosition(group: d3.Selection<any>): GroupPosition[] { // eslint-disable-line @typescript-eslint/no-explicit-any
const position = [];
group.each((d) => {
position.push({
x: d.bounds.x,
y: d.bounds.y,
width: d.bounds.width(),
height: d.bounds.height()
});
});
return position;
}
nodePosition(node: d3.Selection<any>): NodePosition[] { // eslint-disable-line @typescript-eslint/no-explicit-any
const position = [];
node.each((d: Node) => {
position.push({
x: d.x,
y: d.y
});
});
return position;
}
linkPosition(link: d3.Selection<any>): LinkPosition[] { // eslint-disable-line @typescript-eslint/no-explicit-any
const position = [];
link.each((d) => {
position.push({
x1: d.source.x,
y1: d.source.y,
x2: d.target.x,
y2: d.target.y
});
});
return position;
}
match(data: InetHengeDataType, pop: RegExp): boolean {
return this.cachedSha1 === this.sha1(<ExtendedInetHengeDataType>data, pop);
}
static load(data: InetHengeDataType, pop: RegExp): PositionCache | undefined {
const cache = this.get();
if (cache) {
const position = new PositionCache(data, pop, cache.sha1);
if (position.match(data, pop)) { // if data and pop match saved sha1
position.group = cache.group;
position.node = cache.node;
position.link = cache.link;
return position;
}
}
}
}
|
package dbaccess
import anorm.SQL
import play.api.Play.current
import play.api.db.DB
import anorm.NamedParameter.symbol
import models.User
/**
* Data access object for user related operations.
*
* @author ob, scs
*/
trait UserDaoT {
/**
* Creates the given user in the database.
*
* @param user the user object to be stored.
* @return the persisted user object
*/
def addUser(user: User): User = {
DB.withConnection { implicit c =>
val id: Option[Long] =
SQL("insert into Users(name, password, distance, admin, active) values (({name}), ({password}), ({distance}), ({admin}), ({active}))").on(
'name -> user.name, 'password -> <PASSWORD>, 'distance -> user.distance, 'admin -> user.admin, 'active -> user.active).executeInsert()
user.id = id.get
}
user
}
/**
* Creates the given changeUser in the database.
*
* @param changeUser the changeUser object to be changed
* @return the changed User
*/
def changeUser(changeUser: User): User = {
DB.withConnection { implicit c =>
val change =
SQL("update Users SET password = ({password}), distance = ({distance}), admin = ({admin}), active =({active}) where id = ({id})").on(
'id -> changeUser.id, 'password -> <PASSWORD>, 'distance -> changeUser.distance, 'admin -> changeUser.admin, 'active -> changeUser.active).executeUpdate()
}
changeUser
}
/**
* Creates the given changeUser in the database.
*
* @param changeUser the changeUser object to be changed
* @return the changed User
*/
def disableUser(changeUser: User): User = {
DB.withConnection { implicit c =>
val change =
SQL("update Users SET active = false where id = ({id})").on(
'id -> changeUser.id).executeUpdate()
}
changeUser
}
/**
* Removes a user by name from the database.
*
* @param id the users id
* @return a boolean success flag
*/
def rmUser(id: Long): Boolean = {
DB.withConnection { implicit c =>
val rowsCount = SQL("delete from Users where id = ({id})").on('id -> id).executeUpdate()
rowsCount > 0
}
}
/**
* Returns a list of available user from the database.
*
* @return a list of user objects.
*/
def registeredUsers: List[User] = {
DB.withConnection { implicit c =>
val selectUsers = SQL("Select * from Users;")
// Transform the resulting Stream[Row] to a List[(Long, String, String, BigDecimal, String)]
val users = selectUsers().map(row => User(row[Long]("id"), row[String]("name"), row[String]("password"), row[BigDecimal]("distance"), row[Boolean]("admin"), row[Boolean]("active"))).toList
users
}
}
}
object UserDao extends UserDaoT
|
/**
* * unpack
*
* ? v-1.0.0-rc.1
* RP: EmiPhil
*
* * Unpacks keto stuff into a unified object for usage.
*/
function unpack (req) {
const {
utils,
// * extras is a user-writeable object that allows overwriting Keto funcs
// * without losing access to the unpack utility.
extras
} = req.app.__KETO
const {
data,
logger
} = req.__KETO
return Object.assign({}, utils, { data }, logger, extras)
}
module.exports.utils = unpack
|
<filename>crawl-ref/source/game-options.h<gh_stars>1000+
/*
* @file
* @brief Global game options controlled by the rcfile.
*/
#pragma once
#include <functional>
#include <string>
#include <set>
#include <vector>
#include "colour.h"
#include "stringutil.h"
#include "maybe-bool.h"
using std::vector;
struct game_options;
enum rc_line_type
{
RCFILE_LINE_EQUALS, ///< foo = bar
RCFILE_LINE_PLUS, ///< foo += bar
RCFILE_LINE_MINUS, ///< foo -= bar
RCFILE_LINE_CARET, ///< foo ^= bar
NUM_RCFILE_LINE_TYPES,
};
template<class A, class B> void merge_lists(A &dest, const B &src, bool prepend)
{
dest.insert(prepend ? dest.begin() : dest.end(), src.begin(), src.end());
}
template <class L, class E>
L& remove_matching(L& lis, const E& entry)
{
lis.erase(remove(lis.begin(), lis.end(), entry), lis.end());
return lis;
}
class GameOption
{
public:
GameOption(std::set<std::string> _names)
: names(_names), loaded(false) { }
virtual ~GameOption() {};
// XX reset, set_from, and some other stuff could be templated for most
// subclasses, but this is hard to reconcile with the polymorphism involved
virtual void reset() { loaded = false; }
virtual void set_from(const GameOption *other) = 0;
virtual string loadFromString(const std::string &, rc_line_type)
{
loaded = true;
return "";
}
const std::set<std::string> &getNames() const { return names; }
const std::string name() const { return *names.begin(); }
bool was_loaded() const { return loaded; }
protected:
std::set<std::string> names;
bool loaded; // tracks whether the option has changed via loadFromString.
// will miss whether it was changed directly in c++ code. (TODO)
friend struct game_options;
};
class BoolGameOption : public GameOption
{
public:
BoolGameOption(bool &val, std::set<std::string> _names,
bool _default)
: GameOption(_names), value(val), default_value(_default) { }
void reset() override
{
value = default_value;
GameOption::reset();
}
void set_from(const GameOption *other) override
{
const auto other_casted = dynamic_cast<const BoolGameOption *>(other);
// ugly: I can't currently find any better way to enforce types
ASSERT(other_casted);
value = other_casted->value;
}
string loadFromString(const std::string &field, rc_line_type) override;
private:
bool &value;
bool default_value;
};
class ColourGameOption : public GameOption
{
public:
ColourGameOption(unsigned &val, std::set<std::string> _names,
unsigned _default, bool _elemental = false)
: GameOption(_names), value(val), default_value(_default),
elemental(_elemental) { }
void reset() override
{
value = default_value;
GameOption::reset();
}
void set_from(const GameOption *other) override
{
const auto other_casted = dynamic_cast<const ColourGameOption *>(other);
// ugly: I can't currently find any better way to enforce types
ASSERT(other_casted);
value = other_casted->value;
}
string loadFromString(const std::string &field, rc_line_type) override;
private:
unsigned &value;
unsigned default_value;
bool elemental;
};
class CursesGameOption : public GameOption
{
public:
CursesGameOption(unsigned &val, std::set<std::string> _names,
unsigned _default)
: GameOption(_names), value(val), default_value(_default) { }
void reset() override
{
value = default_value;
GameOption::reset();
}
void set_from(const GameOption *other) override
{
const auto other_casted = dynamic_cast<const CursesGameOption *>(other);
// ugly: I can't currently find any better way to enforce types
ASSERT(other_casted);
value = other_casted->value;
}
string loadFromString(const std::string &field, rc_line_type) override;
private:
unsigned &value;
unsigned default_value;
};
class IntGameOption : public GameOption
{
public:
IntGameOption(int &val, std::set<std::string> _names, int _default,
int min_val = INT_MIN, int max_val = INT_MAX)
: GameOption(_names), value(val), default_value(_default),
min_value(min_val), max_value(max_val) { }
void reset() override
{
value = default_value;
GameOption::reset();
}
void set_from(const GameOption *other) override
{
const auto other_casted = dynamic_cast<const IntGameOption *>(other);
// ugly: I can't currently find any better way to enforce types
ASSERT(other_casted);
value = other_casted->value;
}
string loadFromString(const std::string &field, rc_line_type) override;
private:
int &value;
int default_value, min_value, max_value;
};
class StringGameOption : public GameOption
{
public:
StringGameOption(string &val, std::set<std::string> _names,
string _default)
: GameOption(_names), value(val), default_value(_default) { }
void reset() override
{
value = default_value;
GameOption::reset();
}
void set_from(const GameOption *other) override
{
const auto other_casted = dynamic_cast<const StringGameOption *>(other);
// ugly: I can't currently find any better way to enforce types
ASSERT(other_casted);
value = other_casted->value;
}
string loadFromString(const std::string &field, rc_line_type) override;
private:
string &value;
string default_value;
};
#ifdef USE_TILE
class TileColGameOption : public GameOption
{
public:
TileColGameOption(VColour &val, std::set<std::string> _names,
string _default);
void reset() override
{
value = default_value;
GameOption::reset();
}
void set_from(const GameOption *other) override
{
const auto other_casted = dynamic_cast<const TileColGameOption *>(other);
// ugly: I can't currently find any better way to enforce types
ASSERT(other_casted);
value = other_casted->value;
}
string loadFromString(const std::string &field, rc_line_type) override;
private:
VColour &value;
VColour default_value;
};
#endif
typedef pair<int, int> colour_threshold;
typedef vector<colour_threshold> colour_thresholds;
typedef function<bool(const colour_threshold &l, const colour_threshold &r)>
colour_ordering;
class ColourThresholdOption : public GameOption
{
public:
ColourThresholdOption(colour_thresholds &val, std::set<std::string> _names,
string _default, colour_ordering ordering_func)
: GameOption(_names), value(val), ordering_function(ordering_func),
default_value(parse_colour_thresholds(_default)) { }
void reset() override
{
value = default_value;
GameOption::reset();
}
void set_from(const GameOption *other) override
{
const auto other_casted = dynamic_cast<const ColourThresholdOption *>(other);
// ugly: I can't currently find any better way to enforce types
ASSERT(other_casted);
value = other_casted->value;
}
string loadFromString(const string &field, rc_line_type ltyp) override;
private:
colour_thresholds parse_colour_thresholds(const string &field,
string* error = nullptr) const;
colour_thresholds &value;
colour_ordering ordering_function;
colour_thresholds default_value;
};
// T must be convertible to a string.
template<typename T>
class ListGameOption : public GameOption
{
public:
ListGameOption(vector<T> &list, std::set<std::string> _names,
vector<T> _default = {})
: GameOption(_names), value(list), default_value(_default) { }
void reset() override
{
value = default_value;
GameOption::reset();
}
void set_from(const GameOption *other) override
{
const auto other_casted = dynamic_cast<const ListGameOption<T> *>(other);
// ugly: I can't currently find any better way to enforce types
ASSERT(other_casted);
value = other_casted->value;
}
string loadFromString(const std::string &field, rc_line_type ltyp) override
{
if (ltyp == RCFILE_LINE_EQUALS)
value.clear();
vector<T> new_entries;
for (const auto &part : split_string(",", field))
{
if (part.empty())
continue;
if (ltyp == RCFILE_LINE_MINUS)
remove_matching(value, T(part));
else
new_entries.emplace_back(part);
}
merge_lists(value, new_entries, ltyp == RCFILE_LINE_CARET);
return GameOption::loadFromString(field, ltyp);
}
private:
vector<T> &value;
vector<T> default_value;
};
// A template for an option which can take one of a fixed list of values.
// Trying to set it to a value which isn't listed in _choices gives an error
// message, and does not alter _val.
template<typename T>
class MultipleChoiceGameOption : public GameOption
{
public:
MultipleChoiceGameOption(T &_val, std::set<std::string> _names, T _default,
map<string, T> _choices)
: GameOption(_names), value(_val), default_value(_default),
choices(_choices) { }
void reset() override
{
value = default_value;
GameOption::reset();
}
void set_from(const GameOption *other) override
{
const auto other_casted = dynamic_cast<const MultipleChoiceGameOption<T> *>(other);
// ugly: I can't currently find any better way to enforce types
ASSERT(other_casted);
value = other_casted->value;
}
string loadFromString(const std::string &field, rc_line_type ltyp) override
{
const T *choice = map_find(choices, field);
if (choice == 0)
{
string all_choices = comma_separated_fn(choices.begin(),
choices.end(), [] (const pair<string, T> &p) {return p.first;},
" or ");
return make_stringf("Bad %s value: %s (should be %s)",
name().c_str(), field.c_str(),
all_choices.c_str());
}
else
{
value = *choice;
return GameOption::loadFromString(field, ltyp);
}
}
private:
T &value, default_value;
map<string, T> choices;
};
bool read_bool(const std::string &field, bool def_value);
maybe_bool read_maybe_bool(const std::string &field);
|
<reponame>enzoferey/url-to-app-uri
import { ANDROID_TARGET, IOS_TARGET } from "../index";
const instagram = (href, target) => {
const afterDomain = href.split(".com/")[1];
const isPhoto = afterDomain.includes("p/");
if (target === ANDROID_TARGET) {
if (isPhoto) {
return `intent://instagram.com/${afterDomain}#Intent;package=com.instagram.android;scheme=https;end`;
}
// profile
return `intent://instagram.com/_u/${afterDomain}#Intent;package=com.instagram.android;scheme=https;end`;
} else if (target === IOS_TARGET) {
if (isPhoto) {
return undefined;
}
// profile
return `instagram://user?username=${afterDomain}`;
}
};
export default instagram;
|
bool is_valid_IPv4(string ip_address) {
string numbers[4];
// Extract 4 numbers from the IP address
size_t start = 0;
size_t end = ip_address.find('.');
for (int i = 0; i < 4; i++) {
numbers[i] = ip_address.substr(start, end - start);
start = end + 1;
end = ip_address.find('.', start);
}
// Check that each number is between 0 and 255
for (int num : numbers) {
if (num < 0 || num > 255) {
return false;
}
}
return true;
} |
import React from 'react'
import dynamic from 'next/dynamic'
import CloseIcon from '../../../../public/svg/close.svg'
import {Selected} from "../Selected";
import i18next, {t} from "i18next";
const BuyNow = dynamic(() => import('../../../components/Shopify/ShopifyBuyButton'), {
ssr: false,
})
const Pricing = ( { hide } ) => {
const { language } = i18next;
const useBuyButton = language !== 'ru'
const handleBuy = (cardId) => {
if (cardId) {
document
.getElementById(cardId)
.querySelector('iframe')
.contentDocument
.querySelector('.shopify-buy__btn')
.click()
}
}
return (
<>
<div
className={`fixed flex items-center justify-center overflow-auto z-50 bg-[#F8F9F9] left-0 right-0 top-0 bottom-0 w-full h-full`}
>
<div
className={`bg-[#F8F9F9] w-full h-full lg:container lg:mx-auto relative`}
>
<CloseIcon
className='absolute top-[30px] right-[40px] max-w-[36px] cursor-pointer'
onClick={ hide }
/>
<div className="text-primary text-32px text-center font-semibold mt-[40px] mb-32px lg:mb-10">{t('pricing.title')}</div>
<div className="flex flex-col md:grid md:grid-cols-2 md:gap-x-7 px-4 lg:px-0">
<div className="flex flex-col items-center bg-white rounded-20px p-8 mb-[1.875rem] md:mb-[50px] lg:mb-0 shadow-[0_30px_100px_-15px_rgba(0,0,0,0.15)]">
<div className='max-h-60 h-[171px] md:h-[241px]'>
<img src='/img/buy/card-3.png' alt={t('pricing.pack3.title')} className='h-[171px] md:h-full text-center mx-auto' />
</div>
<div className="text-[#0C1116] text-3xl font-semibold lg:text-[2.5rem]">
<Selected
text={t('pricing.pack3.title')}
selected={t('pricing.pack3.selected')}
classes={'text-emerald-400'}
/>
</div>
<div className='mt-3 mb-[2.375rem] lg:mb-14 text-[1.063rem] lg:text-2xl text-center text-secondary lg:max-w-[437px]'>
{t('pricing.pack3.description')}
</div>
{ !useBuyButton &&
<a
href='https://www.mvideo.ru/products/kriptovalutnyi-koshelek-tangem-wallet-nabor-iz-3-kart-10030608?utm_source=tangemcomru3х&utm_medium=cpm&utm_campaign=Vendor_Tangem_Flight_E-Р_Tangem_Wallet_26.04-31.12.2022'
className='block px-9 bg-primary text-white text-[1.375rem] lg:text-lg font-semibold rounded-[6.25rem] py-3 lg:py-4 text-center max-w-[277px] mb-10'
>
Купить сейчас
</a>
}
{useBuyButton &&
<>
<button
type="button"
onClick={() => handleBuy('buy-now-6677839577154')}
className='block px-9 bg-primary text-white text-[1.375rem] lg:text-lg font-semibold rounded-[6.25rem] py-3 lg:py-4 text-center max-w-[277px]'
>
Buy now $69.90
</button>
<div className='text-[#DD1919] text-[1.063rem] lg:text-lg font-semibold mt-6 lg:mt-[1.125rem]'>
50% OFF <span className='text-[#757575] line-through'>$148.49</span>
</div>
<BuyNow iframeId="pack-3" id='6677839577154' />
</>
}
</div>
<div className="flex flex-col items-center bg-white rounded-20px p-8 mb-24 md:mb-[50px] lg:mb-0 shadow-[0_30px_100px_-15px_rgba(0,0,0,0.15)]">
<div className='max-h-60 h-[171px] md:h-[241px]'>
<img src='/img/buy/card-2.png' alt={t('pricing.pack2.title')} className='h-[171px] md:h-full text-center mx-auto' />
</div>
<div className="text-[#0C1116] text-3xl font-semibold lg:text-[2.5rem]">
<Selected
text={t('pricing.pack2.title')}
selected={t('pricing.pack2.selected')}
classes={'text-secondary'}
/>
</div>
<div className='mt-3 mb-[2.375rem] lg:mb-14 text-[1.063rem] lg:text-2xl text-center text-secondary lg:max-w-[377px]'>
{t('pricing.pack2.description')}
</div>
{ !useBuyButton &&
<a
href='https://www.mvideo.ru/products/kriptovalutnyi-koshelek-tangem-wallet-nabor-iz-2-kart-10030607?utm_source=tangemcomru2х&utm_medium=cpm&utm_campaign=Vendor_Tangem_Flight_E-Р_Tangem_Wallet_26.04-31.12.2022'
className='block px-9 bg-[#ECEDED] text-primary text-[1.375rem] lg:text-lg font-semibold rounded-[6.25rem] py-3 lg:py-4 text-center max-w-[277px] mb-10'
>
Купить сейчас
</a>
}
{ useBuyButton &&
<>
<button
type='button'
onClick={() => handleBuy('buy-now-6677836693570')}
className='block px-9 bg-[#ECEDED] text-primary text-[1.375rem] lg:text-lg font-semibold rounded-[6.25rem] py-3 lg:py-4 text-center max-w-[277px]'
>
Buy now $54.90
</button>
<div className='text-[#DD1919] text-[1.063rem] lg:text-lg font-semibold mt-6 lg:mt-[1.125rem]'>
50% OFF <span className='text-[#757575] line-through'>$104.99</span>
</div>
<BuyNow iframeID="pack-2" id="6677836693570" />
</>
}
</div>
</div>
</div>
</div>
</>
)
}
export default Pricing
|
#ifndef DRESS_H
#define DRESS_H
#include <Arduino.h>
#include <Adafruit_NeoPixel.h>
#include <avr/power.h>
struct Dress
{
Dress(int pin);
Adafruit_NeoPixel lights;
void blue();
void blue_sync();
void rainbow();
void rainbow_sync();
private:
uint32_t Wheel(byte WheelPos);
};
#endif |
package org.spongycastle.tls.crypto.impl;
import java.io.IOException;
/**
* Interface for block cipher services.
*/
public interface TlsBlockCipherImpl
{
/**
* Set the key to be used by the block cipher implementation supporting this service.
*
* @param key array holding the block cipher key.
* @param keyOff offset into the array the key starts at.
* @param keyLen length of the key in the array.
*/
void setKey(byte[] key, int keyOff, int keyLen) throws IOException;
/**
* Initialise the parameters for operator.
*
* @param iv array holding the initialization vector (IV).
* @param ivOff offset into the array the IV starts at.
* @param ivLen length of the IV in the array.
* @throws IOException if the parameters are inappropriate.
*/
void init(byte[] iv, int ivOff, int ivLen) throws IOException;
/**
* Perform the cipher encryption/decryption returning the output in output.
* <p>
* Note: we have to use doFinal() here as it is the only way to guarantee output from the underlying cipher.
* </p>
* @param input array holding input data to the cipher.
* @param inputOffset offset into input array data starts at.
* @param inputLength length of the input data in the array.
* @param output array to hold the cipher output.
* @param outputOffset offset into output array to start saving output.
* @return the amount of data written to output.
* @throws IOException in case of failure.
*/
int doFinal(byte[] input, int inputOffset, int inputLength, byte[] output, int outputOffset) throws IOException;
/**
* Return the blocksize (in bytes) of the underlying block cipher.
*
* @return the cipher's blocksize.
*/
int getBlockSize();
}
|
package consul
import (
"github.com/google/wire"
consulApi "github.com/hashicorp/consul/api"
"github.com/pkg/errors"
"github.com/spf13/viper"
"go.uber.org/zap"
)
// NewOptions
func NewOptions(v *viper.Viper) (*consulApi.Config, error) {
var (
err error
o = new(consulApi.Config)
)
if err = v.UnmarshalKey("consul", o); err != nil {
return nil, errors.Wrapf(err, "viper unmarshal consul options error")
}
return o, nil
}
// Client
type Client struct {
Config *consulApi.Config
Client *consulApi.Client
}
// New
func New(o *consulApi.Config, logger *zap.Logger) (*Client, error) {
// initialize consul
var (
consulCli *consulApi.Client
err error
)
if o.Address == "" {
logger.Warn("The consul server address is not configured, and the provider will not take effect.")
return nil, nil
}
consulCli, err = consulApi.NewClient(o)
if err != nil {
return nil, errors.Wrap(err, "create consul client error")
}
c := &Client{
Config: o,
Client: consulCli,
}
return c, nil
}
var ProviderSet = wire.NewSet(New, NewOptions)
|
from typing import List, Dict, Tuple
from math import inf
Node = int # Assuming nodes are represented as integers
def all_pairs_shortest_paths(vertices: List[Node], distances: Dict[Tuple[Node, Node], float]) -> Dict[Tuple[Node, Node], float]:
dist = { (u, v): inf for u in vertices for v in vertices }
for u, v in distances:
dist[(u, v)] = distances[(u, v)]
for v in vertices:
dist[(v, v)] = 0
for interm in vertices:
for source in vertices:
for destination in vertices:
shc = dist[(source, interm)] + dist[(interm, destination)]
dist[(source, destination)] = min(dist[(source, destination)], shc)
return dist |
import random
# Cities represented as (x, y) coordinates
cities = [(0,0), (5, 4), (3, 7), (8, 1), (9, 6)]
# Initialize the population with random starting routes
def init_population(pop_size):
population = []
for _ in range(pop_size):
population.append(random.sample(range(len(cities)), len(cities)))
return population
# Fitness function calculating total distance traveled
def fitness(order):
sum_distance = 0
for i in range(len(order) - 1):
city_1 = cities[order[i]]
city_2 = cities[order[i+1]]
sum_distance += get_distance(city_1, city_2)
return sum_distance
# Function to get the Euclidean distance of two coordinates
def get_distance(xy1, xy2):
x1 = xy1[0]
y1 = xy1[1]
x2 = xy2[0]
y2 = xy2[1]
distance = ((x2 - x1)**2 + (y2 - y1)**2)**(1/2)
return distance
# Generate a new population by selecting the fittest individuals, crossover and mutation
def generate_population(population, pop_size):
new_population = []
selected_individuals = selection(population, pop_size)
for x in selected_individuals:
c = crossover(x)
m = mutate(c)
new_population.append(m)
return new_population
# Selection, crossover and mutation
def selection(population, pop_size):
selected_individuals = []
for i in range(pop_size):
individual = random.choices(population, weights=[1/fitness(order) for order in population], k=1)[0]
selected_individuals.append(individual)
return selected_individuals
def crossover(x1):
x2 = random.choices(population, k=1)[0]
split_point = random.randint(1, len(x1)-1)
c1 = x1[0:split_point] + x2[split_point:]
c2 = x2[0:split_point] + x1[split_point:]
return c1, c2
def mutate(x1):
pos1 = random.randint(0, len(x1)-1)
pos2 = random.randint(0, len(x1)-1)
x1[pos1], x1[pos2] = x1[pos2], x1[pos1]
return x1
# Main genetic algorithm loop
def run_genetic_algorithm(pop_size, num_generations):
# initialize population
population = init_population(pop_size)
for gen in range(num_generations):
# new population
population = generate_population(population, pop_size)
# get the fittest individual
fittest_order = population[0]
fittest_distance = fitness(fittest_order)
print('Generation {}: Fittest order = {}, Fittest distance = {}'
.format(gen, fittest_order, fittest_distance))
return fittest_order
if __name__ == '__main__':
pop_size = 100
num_generations = 4
run_genetic_algorithm(pop_size, num_generations) |
#!/bin/sh
config='config.yaml'
timeout=10
if [ -f $config ]
then
db=`grep -Eo "host:\s*\"(.+:[0-9]+)\"" $config | cut -d \" -f2`
echo "Database: $db"
$(./wait.sh $db $timeout)
up=$?
echo "Database up=0? $up"
if [ $up -eq 0 ] ; then
$(./grafeas-server --config $config)
else
echo "Database is down, exiting" 1>&2
exit 1
fi
else
echo "No config file is specified, exiting" 1>&2
exit 1
fi
echo "Done"
|
'use strict';
//server constants
require('dotenv').config();
const express = require('express');
const app = express();
const port = process.env.PORT || 3000;
const notFoundHandler = require('./error-handlers/404');
const authRoute = require('../routes/routes');
const logger = require('../middleware/logger');
app.use(logger);
app.use(express.json());
app.use(authRoute);
app.get('/', (req, res) => {
res.status(200).send('Hello world!');
});
app.use('*', notFoundHandler);
module.exports = {
server: app,
start: port => {
if(!port) { throw new Error('port missing');}
app.listen(port, () => {
//console.log(`listening on ${port}`);
});
},
};
|
<filename>src/utils/featureFlag.ts
import { FeatureFlag } from 'components/core/enums';
import isProduction from './isProduction';
const PROD_FLAGS: Record<FeatureFlag, boolean> = {
GENERAL_MEMBERSHIP_MINT: true,
COLLECTORS_NOTE: true,
};
const DEV_FLAGS: Record<FeatureFlag, boolean> = {
GENERAL_MEMBERSHIP_MINT: true,
COLLECTORS_NOTE: true,
};
export const isFeatureEnabled = (flag: FeatureFlag) =>
isProduction() ? PROD_FLAGS[flag] : DEV_FLAGS[flag];
|
#!/bin/sh
manifest="XCTestManifests.swift"
if [[ -z ${PROJECT_DIR} ]]
then
testspath="${PWD}/Tests"
else
testspath="${PROJECT_DIR}/../Tests"
fi
testdirs="CAtomicsTests"
for testdir in ${testdirs}
do
manifestpath="${testspath}/${testdir}/${manifest}"
if /bin/test ! -s "${manifestpath}"
then
# echo "$manifestpath does not exist"
generate="yes"
else
newer=`/usr/bin/find "${testspath}/${testdir}" -newer "${manifestpath}"`
if /bin/test "${newer}"
then
# echo "newer files than $manifestpath"
generate="yes"
fi
fi
done
if /bin/test "${generate}"
then
/usr/bin/find "${testspath}" -name "${manifest}" -exec rm -f {} \;
echo "Regenerating test manifests"
/usr/bin/swift test --generate-linuxmain
prev="${PWD}"
cd "${testspath}/../"
/usr/bin/git apply "Utilities/test-compatibility.diff"
cd "${prev}"
else
echo "No need to regenerate test manifests"
fi
|
<filename>generix/core/action/command.py
"""
A module for different commands which composes into Action.
"""
from generix.core.cell.direction import Direction
def reaches_bound(board, point, direction):
"""
Checks whether a cell reached bounds or not.
:param direction: direction to check.
:param board: board instance.
:param point: Point object.
:return: True - reached, False - not reached.
"""
if direction == Direction.UP:
return is_upper_bound(point.y)
elif direction == Direction.UP_RIGHT:
return is_upper_bound(point.y) or is_right_bound(point.x, board)
elif direction == Direction.RIGHT:
return is_right_bound(point.x, board)
elif direction == Direction.DOWN_RIGHT:
return is_lower_bound(point.y, board) or is_right_bound(point.x, board)
elif direction == Direction.DOWN:
return is_lower_bound(point.y, board)
elif direction == Direction.DOWN_LEFT:
return is_lower_bound(point.y, board) or is_left_bound(point.x)
elif direction == Direction.LEFT:
return is_left_bound(point.x)
elif direction == Direction.UP_LEFT:
return is_upper_bound(point.y) or is_left_bound(point.x)
def is_left_bound(x):
"""
Was left bound reached?
:param x: x coordinate.
:return: True - bound was reached, False - otherwise.
"""
return x <= 0
def is_upper_bound(y):
"""
Was right bound reached?
:param y: y coordinate.
:return: True - bound was reached, False - otherwise.
"""
return y <= 0
def is_lower_bound(y, board):
"""
Was lower bound reached?
:param y: y coordinate.
:param board: Board instance.
:return: True - bound was reached, False - otherwise.
"""
return y >= board.height - 1
def is_right_bound(x, board):
"""
Was right bound reached?
:param x: x coordinate.
:param board: Board instance.
:return: True - bound was reached, False - otherwise.
"""
return x >= board.width - 1
def move(board, point, cell):
"""
Sets cell on point position.
:param board: Board instance.
:param point: Point instance of cell location.
:param cell: CellId instance.
:return: None.
"""
board.set_cell(point, cell)
def turn(cell, angle):
"""
Turns cell to specific angle.
:param cell: Cell object.
:param angle: angle to turn to from current position.
:return: None.
"""
cell.turn(angle)
def is_cell_of_types(board, point, types):
"""
Searches cell type in the list and returns an index if found.
:param board: Board instance.
:param point: Point instance.
:param types: list of cell types (enum).
:return: index if found, otherwise it returns -1.
"""
try:
index = types.index(board.get_cell(point).id)
except ValueError:
return -1
return index
|
#!/bin/bash
prompt=false
[ "$1" == "--prompt" ] || [ "$1" == "-p" ] && prompt=true
mapfile -t lines < cli_tests.txt
for i in $(seq ${#lines[@]}) ; do
CMD=${lines[$i]}
echo
[ ! "$CMD" ] || [[ "$CMD" =~ \# ]] && continue
echo "[INFO] Running: "
count=0
for i in $(echo $CMD); do
[ $count -eq 0 ] && echo "$ checksit check"
[ $count -ge 2 ] && echo " $i"
let count+=1
done
if [ $prompt == "true" ]; then
echo
echo "Press ENTER to continue..."
read waiter
if [ "$waiter" == "q" ]; then
echo "[INFO] Exiting!"
exit
fi
fi
$CMD
if [ $? -ne 0 ]; then
echo
echo "[ERROR] The following command failed...so aborting: $CMD"
break
fi
echo
echo "=========================================================================="
done
|
package io.silverspoon.oxpresso;
import org.apache.camel.Exchange;
import java.io.File;
import java.io.IOException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Created by ppecka on 1/24/15.
*/
public class DS18B20 {
//private static Pattern pattern = Pattern.compile("t=(\\d+)");
public DS18B20() {
}
public void transform(Exchange exchange) {
String lines[] = exchange.getIn().getBody(String.class).split("\\r?\\n");
try {
if (!lines[0].matches(".*crc=.{2}\\s+YES")) {
System.out.println(lines[0]);
throw new IOException("ERROR - sensor read with wrong CRC - could not read temperature!");
}
} catch (IOException e) {
System.out.println(e.getMessage());
exchange.getOut().setBody(e.getMessage());
}
try {
Pattern pattern = Pattern.compile("t=(\\d+)");
Matcher matcher = pattern.matcher(lines[1]);
if (matcher.find()) {
String temp = matcher.group(1);
StringBuilder output = new StringBuilder();
output.append(temp.substring(0,temp.length()-3));
output.append(".");
output.append(temp.substring(temp.length()-3,temp.length()));
Status hu = new Status();
hu.setName(new File(String.valueOf(exchange.getIn().getHeader("CamelFileAbsolutePath"))).getParentFile().getName());
hu.setValue(output.toString());
exchange.getOut().setBody(hu);
} else {
throw new Exception("ERROR: could not parse sensor temperature value!");
}
} catch (Exception e) {
System.out.println(e.getMessage());
exchange.getOut().setBody(String.format("Sensor ID: %s - could not read temperature!", "XXXXX"));
}
}
public void getSensorID(Exchange exchange) {
exchange.getIn().setHeader("DS18B20_ID", new File(String.valueOf(exchange.getIn().getHeader("CamelFileAbsolutePath"))).getParentFile().getName());
}
} |
#!/bin/bash
#path='aco_ckpt_73'
#path='sattaco_N3_dff1024_embsz256_maxseqlen120_stPOSE_Noam_ckpt_73_pat20'
#path='rnn450_embsz128_aco_maxseqlen120_ckpt_73_pat20'
path='rnn1300_embsz512_aco_maxseqlen120_ckpt_73_pat20'
#lab='T6B72110000.lab'
#lab='T6B73200230.lab'
lab='T6B73200187.lab'
#epoch='11'
#epoch='18'
epoch='12'
python synthesize.py --model_cfg $path/main.opts \
--aco_model $path/best-val_e"$epoch"_aco_model.ckpt \
--synthesize_lab data/tcstar/lab/73/$lab --force-dur --cfg cfg/tcstar_73.cfg --pf 1.04 --cuda
|
package main
import (
"bytes"
"fmt"
"io/ioutil"
"log"
"net/http"
"os"
"strconv"
"github.com/aymerick/raymond"
"github.com/julienschmidt/httprouter"
"github.com/rlayte/toystore"
"github.com/rlayte/toystore/adapters/memory"
)
var Toy *toystore.Toystore
func Favicon(w http.ResponseWriter, r *http.Request, params httprouter.Params) {
output, err := ioutil.ReadFile("public/favicon.ico")
if err != nil {
panic(err)
}
w.Write(output)
}
func Home() func(w http.ResponseWriter, r *http.Request, params httprouter.Params) {
contents, err := ioutil.ReadFile("views/home.html")
if err != nil {
panic(err)
}
template, err := raymond.Parse(string(contents))
if err != nil {
panic(err)
}
return func(w http.ResponseWriter, r *http.Request, params httprouter.Params) {
var ring_string string
// fmt.Println(Toy.Ring)
if Toy.Ring == nil {
ring_string = ""
} else {
ring_string = Toy.Ring.String()
}
context := map[string]interface{}{
"ring": ring_string,
"keys": Toy.Data.Keys(),
}
output, err := template.Exec(context)
if err != nil {
panic(err)
}
w.Write([]byte(output))
}
}
func Get(w http.ResponseWriter, r *http.Request, params httprouter.Params) {
key := r.FormValue("key")
value, ok := Toy.Get(key)
if !ok {
w.Header().Set("Status", "404")
fmt.Fprint(w, "Not found\n")
return
} else {
fmt.Fprint(w, value)
}
}
func Put(w http.ResponseWriter, r *http.Request, params httprouter.Params) {
key := r.FormValue("key")
value := r.FormValue("data")
ok := Toy.Put(key, value)
if ok {
fmt.Fprint(w, "Success\n")
} else {
fmt.Fprint(w, "Failed\n")
}
http.Redirect(w, r, "/", 301)
}
func Serve(t *toystore.Toystore) {
Toy = t
router := httprouter.New()
router.GET("/", Home())
router.GET("/toystore/force.csv", GraphData)
router.GET("/favicon.ico", Favicon)
router.ServeFiles("/static/*filepath", http.Dir("public"))
router.GET("/api", Get)
router.POST("/api", Put)
log.Println("Running server on port", t.Port)
log.Fatal(http.ListenAndServe(t.Address(), router))
}
func GraphData(w http.ResponseWriter, r *http.Request, params httprouter.Params) {
if Toy.Ring == nil {
return
}
var buf bytes.Buffer
address_list := Toy.Ring.AddressList()
buf.WriteString("source,target,value\n")
for i, val := range address_list {
second_val := address_list[(i+1)%len(address_list)]
if val != "" && second_val != "" {
buf.WriteString("localhost") // Tempory hack for d3 parsing.
buf.WriteString(toystore.RpcToAddress(val))
buf.WriteString(",")
buf.WriteString("localhost") // Tempory hack for d3 parsing.
buf.WriteString(toystore.RpcToAddress(second_val))
buf.WriteString(",10\n") // Not sure what value does.
}
}
// Also a little hacky -- connects the ring
buf.WriteString("localhost") // Tempory hack for d3 parsing.
buf.WriteString(toystore.RpcToAddress(address_list[len(address_list)-1]))
buf.WriteString(",")
buf.WriteString("localhost") // Tempory hack for d3 parsing.
buf.WriteString(toystore.RpcToAddress(address_list[1]))
buf.WriteString(",1\n") // Not sure what value does.
w.Write(buf.Bytes())
}
func main() {
var seed string
if len(os.Args) != 2 {
fmt.Printf("usage: %s [port] [seed]\n", os.Args[0])
os.Exit(1)
}
port, err := strconv.Atoi(os.Args[1])
if err != nil {
panic(err)
}
// This section is gross:
seed_port := 3000
// Seed for gossip protocol
seed = ":3010"
metaData := toystore.ToystoreMetaData{RPCAddress: ":3020"}
if port == seed_port {
log.Println("Seed node")
Serve(toystore.New(port, memory.New(), "", metaData))
} else {
Serve(toystore.New(port, memory.New(), seed, metaData))
}
}
|
<gh_stars>10-100
package chylex.hee.system.collections;
import java.util.stream.IntStream;
import java.util.stream.LongStream;
public final class BitStream{
public static IntStream forInt(int bitSet){
IntStream.Builder build = IntStream.builder();
final int highest = Integer.bitCount(Integer.highestOneBit(bitSet)-1);
for(int bit = 0; bit <= highest; bit++){
if ((bitSet&(1<<bit)) != 0)build.accept(bit);
}
return build.build();
}
public static LongStream forLong(long bitSet){
LongStream.Builder build = LongStream.builder();
final int highest = Long.bitCount(Long.highestOneBit(bitSet)-1L);
for(int bit = 0; bit <= highest; bit++){
if ((bitSet&(1L<<bit)) != 0)build.accept(bit);
}
return build.build();
}
}
|
<filename>src/main/java/com/lewisallen/rtdptiCache/busInterfacer/SIRIResponseParser.java
package com.lewisallen.rtdptiCache.busInterfacer;
import com.lewisallen.rtdptiCache.caches.BusCodesCache;
import com.lewisallen.rtdptiCache.caches.Caches;
import com.lewisallen.rtdptiCache.logging.ErrorHandler;
import org.json.JSONArray;
import org.json.JSONObject;
import org.json.XML;
import org.springframework.http.ResponseEntity;
import java.time.OffsetDateTime;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.logging.Level;
import java.util.stream.Collectors;
public class SIRIResponseParser {
private OffsetDateTime responseTime;
/**
* Parses a response from the SIRI service and updates the cache.
*
* @param response An XML response received from the SIRI service.
*/
public void parse(ResponseEntity<String> response) {
// TODO: split up into multiple
JSONObject siriResponse = XML.toJSONObject(response.getBody());
Map<Object, JSONObject> cache = new ConcurrentHashMap<Object, JSONObject>();
// Store the time of response for later calculation of time until departure.
this.responseTime = OffsetDateTime.parse(responsePathTransverser(siriResponse, "ServiceDelivery")
.get("ResponseTimestamp").toString());
// Check if there are any visits, if not then we can simply wipe the cache.
if (responsePathTransverser(siriResponse, "StopMonitoringDelivery").has("MonitoredStopVisit")) {
JSONArray monitoredStops;
List<JSONObject> monitoredStopsList = new ArrayList<>();
// Handle when only one monitored stop is available (it is returned as a JSONObject rather than a JSONArray).
if (responsePathTransverser(siriResponse, "StopMonitoringDelivery")
.get("MonitoredStopVisit") instanceof JSONArray) {
// If there are multiple stops, we get all stops as a JSONArray, then add each to a ArrayList.
monitoredStops = responsePathTransverser(siriResponse, "StopMonitoringDelivery")
.getJSONArray("MonitoredStopVisit");
// Convert JSONArray to standard List<JSONObject>
for (int i = 0; i < monitoredStops.length(); i++) {
monitoredStopsList.add(monitoredStops.getJSONObject(i));
}
} else {
// If there is only a single stop, we get the singular stop JSONObject and add it to an ArrayList.
JSONObject stop = responsePathTransverser(siriResponse, "MonitoredStopVisit");
monitoredStopsList.add(stop);
}
// Go through the JSON list and group items by into lists by their MonitoringRef
Map<String, List<JSONObject>> groupedList = monitoredStopsList.parallelStream()
.collect(Collectors.groupingBy(this::getMonitoringRef));
// Create a cache to hold list of stops alongside other info.
for (String naptanKey : groupedList.keySet()) {
// Remove irrelevant info from json and add departure. Sort stops by departure time.
List<JSONObject> trimmedList = groupedList.get(naptanKey)
.stream()
.map(this::removeFields)
.map(this::addDeparture)
.sorted(Comparator.comparingInt(this::getSecondsUntilDeparture))
.collect(Collectors.toList());
// Create JSON objects to store in cache.
JSONObject jsonToCache = new JSONObject();
// Store some name information from the NaPTAN Cache alongside the stop visits.
jsonToCache.put("StopName", BusCodesCache.busCodeCache.get(naptanKey).getLongDescription());
jsonToCache.put("Identifier", BusCodesCache.busCodeCache.get(naptanKey).getIdentifier());
jsonToCache.put("MonitoredStopVisits", trimmedList);
// Add final object to local cache.
cache.put(naptanKey, jsonToCache);
}
}
// Copy the local cache to the global cache.
Caches.resetBusData(cache);
}
private int getSecondsUntilDeparture(JSONObject o) {
try {
return Integer.parseInt(o.getJSONObject("MonitoredVehicleJourney")
.getJSONObject("MonitoredCall")
.get("DepartureSeconds").toString());
} catch (Exception e) {
String message = String.format("Error extracting departure seconds from %s", o);
ErrorHandler.handle(e, Level.WARNING, message);
return 0;
}
}
/**
* Removes unnessecary/unused specified fields from the provided JSONObject.
*
* @param j JSONObject to remove fields from.
* @return Processed JSONObject.
*/
private JSONObject removeFields(JSONObject j) {
j.remove("RecordedAtTime");
j.remove("MonitoringRef");
j.getJSONObject("MonitoredVehicleJourney").remove("DirectionRef");
j.getJSONObject("MonitoredVehicleJourney").remove("Monitored");
j.getJSONObject("MonitoredVehicleJourney").remove("VehicleRef");
j.getJSONObject("MonitoredVehicleJourney").remove("OperatorRef");
j.getJSONObject("MonitoredVehicleJourney").remove("FramedVehicleJourneyRef");
j.getJSONObject("MonitoredVehicleJourney").remove("DirectionRef");
j.getJSONObject("MonitoredVehicleJourney").remove("DirectionName");
j.getJSONObject("MonitoredVehicleJourney").remove("LineRef");
return j;
}
/**
* Adds a seconds until departure field to the provided JSONObject
*
* @param j Stop visit JSONObject
* @return Stop visit JSONObject with departure time key.
*/
private JSONObject addDeparture(JSONObject j) {
// Get the object that holds the time values.
JSONObject parent = j.getJSONObject("MonitoredVehicleJourney").getJSONObject("MonitoredCall");
long departureSeconds = getDepartureSeconds(parent);
j.getJSONObject("MonitoredVehicleJourney").getJSONObject("MonitoredCall").put("DepartureSeconds", departureSeconds);
return j;
}
/**
* Calculates the seconds until departure for a provided MonitoredCall JSONObject.
* Attempts to get the departure seconds in the following order:
* ExpectedDepartureTime > AimedDepartureTime > ExpectedArrivalTime > AimedArrivalTime
*
* @param json Monitored call JSONObject
* @return Seconds until journey departure.
*/
private long getDepartureSeconds(JSONObject json) {
String[] jsonKeys = new String[]{
"ExpectedDepartureTime",
"AimedDepartureTime",
"ExpectedArrivalTime",
"AimedArrivalTime"
};
for (String key : jsonKeys) {
try {
return parseTime(json, key);
} catch (Exception e) {
// empty
}
}
String message = "Failed to parse any departure time in JSON: " + json.toString();
ErrorHandler.handle(new RuntimeException(message), Level.SEVERE, message);
return 0;
}
/**
* Given a Monitored Call object and a key, parses the time from the given key.
*
* @param json Monitored Call JSON Object
* @param key key to parse
* @return Time in seconds until departure.
*/
private long parseTime(JSONObject json, String key) {
OffsetDateTime expectedDepartureTime = OffsetDateTime.parse(json.get(key).toString());
return responseTime.until(expectedDepartureTime, ChronoUnit.SECONDS);
}
/**
* Helper method to aid in transversing a SIRI response.
*
* @param siriResponse the full SIRI response
* @param key the key to look for in the response. If no key is supplied, Service Delivery will be returned.
* @return JSONObject for key.
*/
private JSONObject responsePathTransverser(JSONObject siriResponse, String key) {
// We never need anything above the Siri and ServiceDelivery nodes.
JSONObject result = siriResponse.getJSONObject("Siri")
.getJSONObject("ServiceDelivery");
if (key.equals("StopMonitoringDelivery")) {
result = result.getJSONObject(key);
} else if (key.equals("MonitoredStopVisit")) {
result = result.getJSONObject("StopMonitoringDelivery")
.getJSONObject("MonitoredStopVisit");
}
return result;
}
/**
* Returns the monitoring reference key for the given JSON object.
*
* @param json JSON to parse
* @return value of MontoringRef key.
*/
private String getMonitoringRef(JSONObject json) {
return json.get("MonitoringRef").toString();
}
}
|
package su.zano.mainsite.ZanoSu.util;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
public @interface ViewConfig {
enum CreateMode{ALWAYS_NEW, LAZY_INIT, EAGER_INIT}
String uri();
String displayName();
CreateMode createMode() default CreateMode.ALWAYS_NEW;
}
|
/**
* Contains classes that communicate to an Http Server.
*/
package io.opensphere.server.serverprovider.http;
|
#!/bin/bash
# Script args
WORKSPACE=$1
cd $WORKSPACE && rm -rf ./build |
<filename>gulpfile.js
var gulp = require('gulp');
var uglify = require('gulp-uglifyjs');
var concat = require('gulp-concat');
var del = require('del');
var templateCache = require('gulp-angular-templatecache');
var htmlmin = require('gulp-htmlmin');
var htmlreplace = require('gulp-html-replace');
var cssmin = require('gulp-clean-css');
var sourcemaps = require('gulp-sourcemaps');
var replace = require('gulp-replace');
gulp.task('default', function() {
// Delete existing dist files
del([
'dist/**/*',
]);
var templates = [
'app/**/*.html',
'!*/index.html',
'!*/index-async.html',
'!*app/bower_components/**/*.html'
];
// Add templates for lazy loading
gulp.src(templates)
.pipe(gulp.dest('dist/'));
// Create a template cache for eagar loading
gulp.src(templates)
.pipe(htmlmin(
{
collapseWhitespace: true,
caseSensitive: true,
}
))
.pipe(templateCache('templates.min.js',{
module:'myApp'
}))
.pipe(gulp.dest('dist/js'));
// Concatinate and minify all css files
gulp.src([
'app/bower_components/html5-boilerplate/dist/css/normalize.css',
'app/bower_components/html5-boilerplate/dist/css/main.css',
'app/app.css',
])
.pipe(sourcemaps.init())
.pipe(concat('app.min.css'))
.pipe(cssmin({'keepSpecialComments' : '0'}))
.pipe(sourcemaps.write('../css'))
.pipe(gulp.dest('dist/css/'));
// Concatinate and minify all js files
gulp.src([
'app/**/*.js',
'!app/bower_components/**/*.js',
'!app/**/*_test.js'
])
.pipe(concat('app.min.js'))
.pipe(uglify({ 'outSourceMap' : true }))
.pipe(gulp.dest('dist/js/'));
// Clean up imports
gulp.src('app/index.html')
.pipe(htmlreplace({
'css':'css/app.min.css',
'js':['js/app.min.js','js/templates.min.js']
}))
.pipe(replace(/src="(.+?)"\scdn="(.+?)"/gmi, 'src="$2"'))
.pipe(replace(/href="(.+?)"\scdn="(.+?)"/gmi, 'href="$2"'))
.pipe(htmlmin(
{
collapseWhitespace: true,
caseSensitive: true,
}
))
.pipe(gulp.dest('dist/'));
// Copy data folder
gulp.src('app/data/**/*')
.pipe(gulp.dest('dist/data'));
}); |
#include <cstdint>
#include "Block.hpp"
block_range make_range(uint64_t lower, uint64_t upper) {
return std::make_pair(lower,upper);
}
block_range make_range(uint64_t target) {
return std::make_pair(target, target);
}
|
import { BasicBotConfigRepository } from "domain_guild-configs";
import {
ConfigurateUsecase,
InvalidValueError,
} from "protocol_configurate-usecase";
import {
AddFunction,
addWithRecord,
CheckFunction,
guildOnly,
Pipelines,
} from "../util";
export default function (
basicBotConfig: BasicBotConfigRepository,
commandNames: Set<string>,
checkUpdate: CheckFunction
): ConfigurateUsecase["add"] {
const m: Record<string, AddFunction> = {
disabledCommands: async (t, v, exec) => {
const guild = guildOnly(t, exec);
if (typeof v !== "string") {
throw new InvalidValueError(v);
}
if (!commandNames.has(v)) {
throw new InvalidValueError(v);
}
await checkUpdate({ guild }, exec);
return await basicBotConfig
.addDisabledCommands(guild, v)
.then(Pipelines.guildSet(guild));
},
};
return addWithRecord(m);
}
|
<gh_stars>0
const WebSocket = require('hyco-ws')
class WsAzure {
constructor (config) {
this.config = config.azure
this.wss = null
}
/**
* Create a `WebSocketServer` instance.
*
* @param {Object} options Configuration options
* @param {String} options.host The hostname where to bind the server
* @param {Number} options.port The port where to bind the server
* @param {http.Server} options.server A pre-created HTTP/S server to use
* @param {Function} options.verifyClient An hook to reject connections
* @param {Function} options.handleProtocols An hook to handle protocols
* @param {String} options.path Accept only connections matching this path
* @param {Boolean} options.noServer Enable no server mode
* @param {Boolean} options.clientTracking Specifies whether or not to track clients
* @param {(Boolean|Object)} options.perMessageDeflate Enable/disable permessage-deflate
* @param {Number} options.maxPayload The maximum allowed message size
* @param {Function} callback A listener for the `listening` event
*/
Server (options, callback) {
const uri = WebSocket.createRelayListenUri(this.config.ns, this.config.path)
this.wss = WebSocket.createRelayedServer({
server: uri,
token: () => WebSocket.createRelayToken(uri, this.config.keyrule, this.config.key)
}, ws => {
callback && callback(ws)
/* ws.onmessage = function (event) {
console.log('onmessage: ' + event.data)
}
ws.on('close', function () {
console.log('connection closed')
}) */
})
/* this.wss.on('error', err => {
console.log('error', err)
}) */
return this
}
on (name, listener) {
this.wss.on(name, listener)
}
close () {
this.wss.close()
}
}
module.exports = WsAzure
|
'''
Debian and other distributions "unbundle" requests' vendored dependencies, and
rewrite all imports to use the global versions of ``urllib3`` and ``chardet``.
The problem with this is that not only requests itself imports those
dependencies, but third-party code outside of the distros' control too.
In reaction to these problems, the distro maintainers replaced
``requests.packages`` with a magical "stub module" that imports the correct
modules. The implementations were varying in quality and all had severe
problems. For example, a symlink (or hardlink) that links the correct modules
into place introduces problems regarding object identity, since you now have
two modules in `sys.modules` with the same API, but different identities::
requests.packages.urllib3 is not urllib3
With version ``2.5.2``, requests started to maintain its own stub, so that
distro-specific breakage would be reduced to a minimum, even though the whole
issue is not requests' fault in the first place. See
https://github.com/kennethreitz/requests/pull/2375 for the corresponding pull
request.
'''
from __future__ import absolute_import
import sys
# On Debian we use the unbundling strategy implemented by pip inside
# pip._vendor.__init__.
def vendored(modulename):
vendored_name = "{0}.{1}".format(__name__, modulename)
try:
__import__(vendored_name, globals(), locals(), level=0)
except ImportError:
try:
__import__(modulename, globals(), locals(), level=0)
except ImportError:
# We can just silently allow import failures to pass here. If we
# got to this point it means that ``import requests.packages.whatever``
# failed and so did ``import whatever``. Since we're importing this
# upfront in an attempt to alias imports, not erroring here will
# just mean we get a regular import error whenever requests
# *actually* tries to import one of these modules to use it, which
# actually gives us a better error message than we would have
# otherwise gotten.
pass
else:
sys.modules[vendored_name] = sys.modules[modulename]
base, head = vendored_name.rsplit(".", 1)
setattr(sys.modules[base], head, sys.modules[modulename])
vendored('chardet')
vendored('idna')
vendored('urllib3')
vendored('urllib3._collections')
vendored('urllib3.connection')
vendored('urllib3.connectionpool')
vendored('urllib3.contrib')
vendored('urllib3.contrib.ntlmpool')
vendored('urllib3.contrib.pyopenssl')
vendored('urllib3.exceptions')
vendored('urllib3.fields')
vendored('urllib3.filepost')
vendored('urllib3.packages')
vendored('urllib3.packages.ordered_dict')
vendored('urllib3.packages.six')
vendored('urllib3.packages.ssl_match_hostname')
vendored('urllib3.packages.ssl_match_hostname._implementation')
vendored('urllib3.poolmanager')
vendored('urllib3.request')
vendored('urllib3.response')
vendored('urllib3.util')
vendored('urllib3.util.connection')
vendored('urllib3.util.request')
vendored('urllib3.util.response')
vendored('urllib3.util.retry')
vendored('urllib3.util.ssl_')
vendored('urllib3.util.timeout')
vendored('urllib3.util.url')
|
import { Routes, RouterModule } from '@angular/router';
import { AboutComponent } from './components/about.component';
const notesRoutes: Routes = [
{ path: '' , component: AboutComponent },
];
export default RouterModule.forChild( notesRoutes ); |
<reponame>Lydeon/Summer2018PSU<filename>project4/phonebill/src/main/java/edu/pdx/cs410J/seung2/Project4.java
package edu.pdx.cs410J.seung2;
import edu.pdx.cs410J.web.HttpRequestHelper;
import java.io.IOException;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.text.ParseException;
import java.util.Collection;
import java.util.Date;
import java.text.SimpleDateFormat;
import java.util.Iterator;
/**
* The main class that parses the command line and communicates with the
* Phone Bill server using REST.
*/
public class Project4 {
public static String README = "\nCourse: CS 401J\nProject 1: Designing a Phone Bill Application\nProgrammer: <NAME>" +
"\nDescription: This project parses the user's command line arguments and initialize PhoneCall class and PhoneBill class or " +
"executes options given by the user.\n PhoneBill will store the customer's name and collection of PhoneCall data." +
"\n PhoneCall will store caller and callee's number and starting and ending date and time.\n\nUsage: java edu.pdx.cs410J.<login-id>.Project1 [options] <args>\n\n" +
"Arguments are in this order: with example format\n" +
" - Customer: \"First Last\" or \"First Middle Last\"\n - Caller Number: XXX-XXX-XXXX\n - Callee Number: XXX-XXX-XXXX\n - Start Time: MM/DD/YYYY HH:MM\n - End Time: MM/DD/YYYY HH:MM" +
"\n\nOptions:\n -print : Prints a description of the new phone call\n -> If no information has been provided with print function then it will print error" +
"\n -README : Prints a README for this project and exits\n -> Which you have currently!";
public static final String MISSING_ARGS = "Missing command line arguments";
// Parse variables
public static String hostName = null;
public static String portNum = null;
static int print = 0;
static int search = 0;
static String name = null;
public static void main(String... args) {
PhoneBill bill = null;
PhoneCall call = null;
Date start = null;
Date end = null;
for(int i = 0; i < args.length; i++){
if(args[i].equals("-README")){
System.out.println(README);
System.exit(0);
}
else if(args[i].equals("-print")){
print = 1;
}
else if(args[i].equals("-host")){
hostName = args[i + 1];
i++;
}
else if(args[i].equals("-port")){
portNum = args[i + 1];
boolean intCheck = isInt(portNum);
// Check if the port number is valid
if(!intCheck){
System.err.println("Port number isn't an integer");
System.exit(1);
}
// If it's just with customer name then break
if(args.length == (i+3)){
name = args[i+2];
break;
}
i++;
}
else if(args[i].equals("-search")){
search = 1;
}
// If it's customer name / start time / end time format
else if((args.length - i) == 7){
name = args[i];
String sDate = args[i+1];
String sTime = args[i+2];
String sAP = args[i+3];
String eDate = args[i+4];
String eTime = args[i+5];
String eAP = args[i+6];
start = initDate(sDate, sTime, sAP);
end = initDate(eDate, eTime, eAP);
checkTimeOrder(start, end);
bill = new PhoneBill();
bill.setCustomer(name.split(" "));
break;
}
// If it's full argument
else if((args.length - i) == 9){
name = args[i];
String caller = args[i+1];
String callee = args[i+2];
String sDate = args[i+3];
String sTime = args[i+4];
String sAP = args[i+5];
String eDate = args[i+6];
String eTime = args[i+7];
String eAP = args[i+8];
start = initDate(sDate, sTime, sAP);
end = initDate(eDate, eTime, eAP);
checkTimeOrder(start, end);
call = new PhoneCall(caller, callee, start, end);
bill = new PhoneBill();
bill.setCustomer(name.split(" "));
break;
}
else{
System.out.println(MISSING_ARGS);
System.exit(1);
}
}
// If print option was there
if(print == 1){
System.out.println(call.toString());
}
int port = Integer.parseInt(portNum);
// Set up client
PhoneBillRestClient client = new PhoneBillRestClient(hostName, port);
try {
// POST
// Call was initialized with full argument then add
if(call != null) {
client.addPhoneCall(bill.getCustomer(), call);
}
// GET
else{
// If no search option
if(search == 0) {
// Retrieve the data via getAllPhoneCalls method
Collection<PhoneCall> temp = client.getAllPhoneCalls(name);
StringWriter sw = new StringWriter();
// Print it out
Messages.formatPrettyBill(new PrintWriter(sw, true), "All list for " + name,temp);
String msg = sw.toString();
System.out.println(msg);
}
// If search option was set
else{
// Retrieve the data via getSearchCalls method
Collection<PhoneCall> temp = client.getSearchCalls(bill.getCustomer(), start, end);
StringWriter sw = new StringWriter();
// Print it out
Messages.formatPrettyBill(new PrintWriter(sw, true), "Searched list for " + name,temp);
String msg = sw.toString();
System.out.println(msg);
}
}
}catch(IOException e){
System.err.println("Connection Error!");
System.exit(1);
}
System.exit(0);
}
/**
* Makes sure that the give response has the expected HTTP status code
* @param code The expected status code
* @param response The response from the server
*/
private static void checkResponseCode( int code, HttpRequestHelper.Response response )
{
if (response.getCode() != code) {
error(String.format("Expected HTTP code %d, got code %d.\n\n%s", code,
response.getCode(), response.getContent()));
}
}
/**
* Use System.err to print out the error message
*
* @param message
* Message of error
*/
private static void error( String message )
{
PrintStream err = System.err;
err.println("** " + message);
System.exit(1);
}
/**
* Prints usage information for this program and exits
* @param message An error message to print
*/
private static void usage( String message )
{
PrintStream err = System.err;
err.println("** " + message);
err.println();
err.println("usage: java Project4 host port [word] [definition]");
err.println(" host Host of web server");
err.println(" port Port of web server");
err.println(" word Word in dictionary");
err.println(" definition Definition of word");
err.println();
err.println("This simple program posts words and their definitions");
err.println("to the server.");
err.println("If no definition is specified, then the word's definition");
err.println("is printed.");
err.println("If no word is specified, all dictionary entries are printed");
err.println();
System.exit(1);
}
/**
* Check if it's int or not
*
* @param input
* String for validating if it's int or not
* @return
* return boolean for result
*/
public static boolean isInt(String input){
boolean isIt;
// Try parsing String to Int, if can't then return false
try{
Integer.parseInt(input);
isIt = true;
}catch(NumberFormatException e){
isIt = false;
}
return isIt;
}
/**
* Use isInt for the whole array of String
*
* @param args
* Array of String to be validated
* @param name
* name of the value we're testing
*/
public static void checkInt(String[] args, String name){
// Go through all the String and check if it's integer or not
for(String arg : args){
boolean checker = isInt(arg);
if(!checker){
System.err.println("Incorrect " + name + " integer value!");
System.exit(1);
}
}
}
/**
* Check the caller/callee number's validity
*
* @param number
* String of number combination
* @param name
* Name of the tested
*/
public static void checkNumber(String number, String name){
boolean numFormat;
// Check if it's separated via -
numFormat = number.contains("-");
if(!numFormat){
System.err.println("Incorrect " + name + " number format");
System.exit(1);
}
// Split the number
String[] call = number.split("-");
// if it's not in 3 section then error
if(call.length != 3){
System.err.println("Incorrect " + name + " number format");
System.exit(1);
}
// check if it's integer values
checkInt(call, name);
}
/**
* Check Date's validity
*
* @param date
* String of date for testing
* @param name
* Name of the tested
*/
public static void checkDate(String date, String name){
boolean dateFormat;
// Check if "/" is contained and if not then error
dateFormat = date.contains("/");
if(!dateFormat){
System.err.println("Incorrect " + name + " date format");
System.exit(1);
}
String[] args = date.split("/");
// If there aren't Month, Day, and Year or more than that then error
if(args.length != 3){
System.err.println("Incorrect " + name + " date value!");
System.exit(1);
}
// If Month and Day has more than 2 digits then error
if((args[0].length() > 2) || (args[1].length() > 2)){
System.err.println("Incorrect " + name + " time value!");
System.exit(1);
}
// Change Month and Day string to int
int month = Integer.parseInt(args[0]);
int day = Integer.parseInt(args[1]);
// If the month value is outside the bound then error
if (month < 1 || month > 12) {
System.err.println("Incorrect " + name + " month value!");
System.exit(1);
}
// If the day value is outside the bound then error
if (day < 1 || day > 31) {
System.err.println("Incorrect " + name + " date value!");
System.exit(1);
}
// If year isn't in 4 digit then error
if (args[2].length() != 4) {
System.err.println("Incorrect " + name + " year value!");
System.exit(1);
}
}
/**
* Check time value
*
* @param time
* String of time for testing
* @param name
* Name of the tested
*/
public static void checkTime(String time, String name){
boolean timeFormat;
// Check if ":" is contained and if not then error
timeFormat = time.contains(":");
if (!timeFormat) {
System.err.println("Incorrect " + name + " time format");
System.exit(1);
}
String[] args = time.split(":");
// If there is less or more than 2 inputs (Hour and Min) then error
if(args.length != 2){
System.err.println("Incorrect " + name + " time value!");
System.exit(1);
}
// If hour and min is bigger than 2 digits then error
if((args[0].length() > 2) || (args[1].length() > 2)){
System.err.println("Incorrect " + name + " time value!");
System.exit(1);
}
// String to Int for hour and min value
int hour = Integer.parseInt(args[0]);
int min = Integer.parseInt(args[1]);
// If Hour is outside the bound then error
if(hour < 0 || hour > 12){
System.err.println("Incorrect " + name + " hour value!");
System.exit(1);
}
// If Min is outside the bound then error
if(min < 0 || min >= 60){
System.err.println("Incorrect " + name + " minute value!");
System.exit(1);
}
}
/**
* Check AMPM format of command line argument
*
* @param ampm
* String argument from command line
* @param name
* Name of the tested
*/
// Check AM and PM
public static void checkAMPM(String ampm, String name){
// if it's not am or pm then error
if(!ampm.equals("AM") && !ampm.equals("am") && !ampm.equals("pm") && !ampm.equals("PM")){
System.err.println(name + " is not in am / AM / pm / PM format");
System.exit(1);
}
}
/**
* Initialize the Date class object via argument
*
* @param day
* String of MM/DD/YYYY
* @param hour
* String of HH:MM
* @param ampm
* String of AM or PM
* @return
* Return the initialized Date class object
*/
public static Date initDate(String day, String hour, String ampm){
Date date = null;
// Check for error
checkInt(day.split("/"), "date");
checkDate(day, "date");
checkInt(hour.split(":"), "time");
checkTime(hour, "time");
checkAMPM(ampm, "am/pm");
String input = day + " " + hour + " " + ampm;
// Using SimpleDateFormat Class to give format and parse through the String and initialize Date class
SimpleDateFormat sdf = new SimpleDateFormat("MM/dd/yyyy hh:mm a");
try{
date = sdf.parse(input);
}catch(ParseException e){
System.err.println("Setting Date class was unsuccessful");
System.exit(1);
}
return date;
}
/**
* Check if start time starts before end time
*
* @param start
* Starting time value
* @param end
* Ending time value
*/
public static void checkTimeOrder(Date start, Date end){
if(start.getTime() > end.getTime()){
System.err.println("Start time starts latter than end time");
System.exit(1);
}
}
/**
* Checking duplication for pretty print because it doesn't System.exit
*
* @param calls
* PhoneCall class object checking for duplication
* @param calling
* List we're checking from
* @return
* Return 0 or 1 for result
*/
public static int checkPrettyCallDupli(Collection<PhoneCall> calls, PhoneCall calling){
Iterator<PhoneCall> iter = calls.iterator(); // Set up Iterator to go through all the PhoneCalls
// Keep going until no object is left
while(iter.hasNext()){
PhoneCall obj = iter.next();
// If current iteration of PhoneCall is exactly same as PhoneCall from command line, then send out error
if(obj.getCaller().equals(calling.getCaller()) && obj.getCallee().equals(calling.getCallee())){
if(obj.getEndTimeString().equals(calling.getEndTimeString()) && obj.getStartTimeString().equals(calling.getStartTimeString())){
return 1;
}
}
}
return 0;
}
} |
<reponame>Ashindustry007/competitive-programming<gh_stars>100-1000
// https://cses.fi/problemset/task/1684
#include <bits/stdc++.h>
using namespace std;
using si = unordered_set<int>;
using vc = vector<char>;
using vi = vector<int>;
using vvi = vector<vi>;
using vsi = vector<si>;
vi a, b, s;
vvi c;
vsi g, h, p;
void dfs1(int u) {
s[u] = 1;
for (int v : g[u])
if (!s[v])
dfs1(v);
a.push_back(u);
}
void dfs2(int u) {
s[u] = 1;
b[u] = c.size() - 1;
c.back().push_back(u);
for (int v : h[u])
if (!s[v])
dfs2(v);
}
void dfs3(int u) {
s[u] = 1;
for (int v : p[u])
if (!s[v])
dfs3(v);
a.push_back(u);
}
int main() {
cin.tie(0), ios::sync_with_stdio(0);
char s1, s2;
int n, m, u, v;
cin >> n >> m;
g = vsi(2 * m);
h = vsi(2 * m);
for (int i = 0; i < n; i++) {
cin >> s1 >> u >> s2 >> v;
u = (u - 1) * 2;
v = (v - 1) * 2;
if (s1 == '-' && s2 == '-') {
g[u + 1].insert(v);
g[v + 1].insert(u);
h[v].insert(u + 1);
h[u].insert(v + 1);
} else if (s1 == '-' && s2 == '+') {
g[u + 1].insert(v + 1);
g[v].insert(u);
h[v + 1].insert(u + 1);
h[u].insert(v);
} else if (s1 == '+' && s2 == '-') {
g[u].insert(v);
g[v + 1].insert(u + 1);
h[v].insert(u);
h[u + 1].insert(v + 1);
} else if (s1 == '+' && s2 == '+') {
g[u].insert(v + 1);
g[v].insert(u + 1);
h[v + 1].insert(u);
h[u + 1].insert(v);
}
}
s = vi(2 * m);
for (int i = 0; i < 2 * m; i++)
if (!s[i])
dfs1(i);
s = vi(2 * m);
b = vi(2 * m);
for (int i = 2 * m - 1; i >= 0; i--)
if (!s[a[i]]) {
c.push_back(vi());
dfs2(a[i]);
}
for (int i = 0; i < m; i++)
if (b[2 * i] == b[2 * i + 1]) {
cout << "IMPOSSIBLE\n";
return 0;
}
int k = c.size();
p = vsi(k);
for (int i = 0; i < 2 * m; i++) {
u = b[i];
for (int j : g[i]) {
v = b[j];
if (u != v)
p[u].insert(v);
}
}
s = vi(k);
a = vi();
for (int i = 0; i < k; i++)
if (!s[i])
dfs3(i);
vc r(m);
for (int i : a)
for (int u : c[i])
if (!r[u / 2])
r[u / 2] = "-+"[u % 2];
for (int i = 0; i < m; i++)
cout << r[i] << " \n"[i == m - 1];
}
|
<filename>src/array.c
#include <string.h>
#include <stdarg.h>
#include "common.h"
#include "array.h"
struct ds_array_s{
void *data;
size_t elem_size;
size_t size; /* total used size */
size_t capacity; /* total capacity */
ds_ctor_f ctor;
ds_dtor_f dtor;
ds_copy_f copy;
ds_move_f move;
ds_error_f err;
ds_allocator_t* allocator;
};
/*
* parameters
* elem_size - size each byte each element will take up
* start_size - starting number of elements
*
* if ctor is NULL nothing is done to initialize the element
* if dtor is not NULL the function will be applied to each element before the call to free
* is it assumed that the array will be responsible for freeing all memory
*
* if move is not NULL items put the the list will have ownership transfered, the user should
* then call free on the passed in point, if the element was already part of the array (e.g. you
* got the pointer from get) this is a NOP. If the new element is different the destructor is called
* on the existing element.
*
* returns point to a valid ds_array_t structure or NULL
*/
ds_array_t ds_array_create( size_t elem_size, size_t start_size, ... ){
ds_ctor_f _ctor = NULL;
ds_dtor_f _dtor = NULL;
ds_move_f _move = NULL;
ds_copy_f _copy = NULL;
ds_error_f _err = ds_error_stderr;
ds_allocator_t* allocator = NULL;
ds_array_t array = NULL;
int arg_type=DS_END;
int i = 0;
va_list args;
va_start( args, start_size );
/* TODO: process the arguments */
arg_type = va_arg( args, int );
while( arg_type != DS_END ){
switch( arg_type ){
case DS_FUNC_CTOR:
_ctor = va_arg(args, ds_ctor_f);
break;
case DS_FUNC_DTOR:
_dtor = va_arg(args, ds_dtor_f);
break;
case DS_TYPE_ALLOCATOR:
allocator = va_arg(args, ds_allocator_t* );
break;
case DS_FUNC_ERR:
_err = va_arg(args, ds_error_f);
break;
case DS_FUNC_COPY:
_copy = va_arg(args,ds_copy_f);
default:
break;
}
arg_type = va_arg(args, int );
}
va_end( args );
if( allocator == NULL ){
allocator = &ds_default_allocator;
}
if( _err == NULL ){
_err = ds_error_stderr;
}
if( _copy == NULL && _move == NULL ){
_err("You must provide a copy or move function");
return NULL;
}
array = (ds_array_t)allocator->calloc(1, sizeof( struct ds_array_s ));
if ( !array ) {
_err("unable to allocate memory for array structure" );
return NULL;
}
array->elem_size = elem_size;
array->allocator = allocator;
array->capacity = start_size;
array->size = 0;
array->ctor = _ctor;
array->dtor = _dtor;
array->move = _move;
array->copy = _copy;
array->err = _err;
array->data = allocator->calloc( elem_size, start_size );
if( !array->data ){
_err( "unable to allocate %d bytes for array", start_size * elem_size );
return NULL;
}
if( array->ctor != NULL ){
for( i = 0; i < start_size; i ++ ){
//void *data = array->data;
char *element = (char*)array->data+(i*elem_size);
array->ctor( (void*)element, allocator );
}
}
return array;
}
/*
*
*/
void ds_array_destroy( ds_array_t *arr ){
int i = 0;
ds_array_t array = *arr;
if( !array ){ return; }
if( array->dtor != NULL ){
for( i = 0; i < array->capacity; i++ ){
char *element = (char*)array->data+(i*array->elem_size);
array->dtor( (void*)(element) , array->allocator );
}
}
array->allocator->free( array->data );
array->allocator->free(*arr);
if( (*arr) ){ *arr = NULL; }
}
void ds_array_grow( ds_array_t array, size_t amount ){
size_t old_cap = array->capacity;
size_t i = 0;
array->data = array->allocator->realloc( array->data, (old_cap+amount)*array->elem_size );
/* TODO: run constructor on new memory */
char *bytes = (char*)array->data;
memset( bytes+array->elem_size*old_cap,0, amount*array->elem_size );
if( array->ctor != NULL ){
for( i = old_cap; i < amount; i++ ){
array->ctor( (char*)array->data+i*array->elem_size, array->allocator );
}
}
array->capacity = old_cap+amount;
}
/*
* calculate the size based on index.
* currently fairly dumb, calculate what would be double by the time it gets there
*/
size_t ds_array_calc_grow_size( size_t capacity, size_t idx ){
size_t new_cap = capacity;
while( new_cap < idx ){
new_cap *= 2;
}
return new_cap;
}
void* ds_array_set( ds_array_t array, size_t idx, void *data ){
if( !array ) { return NULL; }
size_t offset = idx*array->elem_size;
if( idx >= array->capacity ){
size_t grow_by = ds_array_calc_grow_size( array->capacity, idx );
array->err("growing by %ld elements\n", grow_by );
ds_array_grow( array, grow_by );
/*array->err( "index %d is greater than the capacity of the array (%d)", idx, array->capacity );*/
}
if( array->move ){
array->move( (char*)array->data+offset, data );
} else {
void *element = (char*)array->data+offset;
array->copy( &element,data,array->allocator);
}
if( idx >= array->size ){
array->size = idx+1;
}
return ((char*)array->data + offset);
}
/*
*
*/
void* ds_array_get( ds_array_t array, size_t idx ){
if( !array ) { return NULL; }
size_t offset = idx*array->elem_size;
if( idx >= array->capacity ){
array->err( "index %d is greater than the capacity of the array (%d)", idx, array->capacity );
}
if ( idx >= array->size ){
array->size = idx+1;
}
return ((char*)array->data + offset);
}
/*int ds_array_grow( ds_array_t array, size_t idx ); */
/*int ds_array_shrink( ds_array_t array, size_t amount ); */
void* ds_array_foreach( ds_array_t array, ds_traverse_f func, void *user ){
int i = 0;
if( !array ) { return NULL; }
void *ret;
for( i = 0; i < array->size; i++ ){
ret = func( (char*)array->data+i*array->elem_size, user );
if ( ret ) { return ret; };
}
return NULL;
}
size_t ds_array_size( ds_array_t arr ){
if( !arr) { return -1;}
return arr->size;
}
size_t ds_array_capacity( ds_array_t arr){
if( !arr) { return -1;}
return arr->capacity;
}
|
import {
Grid,
InputLabel,
Paper
} from '@material-ui/core';
import Dialog from '@material-ui/core/Dialog';
import { Field, Form, Formik } from 'formik';
import { TextField as FTextField } from 'formik-material-ui';
import React from 'react';
import { CustomButton } from 'views/home/components/button/button';
import { EmployeeData } from '../home';
import { CreateEmployerSchema } from '../validations/ValidationSchema';
import { useStyles } from './add-employer.style';
interface Props {
open: boolean;
setOpen: any;
maxEmpId: number;
employers:EmployeeData[];
setEmployers:any;
}
export const AddEmployerModal: React.FC<Props> = ({ open, setOpen, maxEmpId,employers,setEmployers }) => {
const classes = useStyles();
const initialValues = {
id: maxEmpId + 1,
name: '',
surname: '',
dateOfBirth: '',
position: '',
phoneNumber: '',
status: "Active"
};
return (
<>
<Dialog
onClose={() => {
setOpen(false)
}}
open={open}
className={classes.dialog}
>
<div className={classes.root}>
<h2 style={{ marginBottom: 10 }}>
New Employer
</h2>
<Formik
initialValues={initialValues}
validationSchema={CreateEmployerSchema}
onSubmit={(newEmployer) => {
setEmployers([...employers,newEmployer])
setOpen(false)
}}
>
{({ submitForm }): React.ReactNode => (
<Form>
<Grid container spacing={3}>
<Grid item xs={12}>
<Paper className={classes.containerSpacing}>
<Grid container>
{/*---------------- Name------------- */}
<Grid item xs={12} className={classes.field}>
<InputLabel>Name</InputLabel>
<Field
component={FTextField}
variant="outlined"
fullWidth
name="name"
/>
</Grid>
{/*---------------- Surname------------- */}
<Grid item xs={12} className={classes.field}>
<InputLabel>Surname</InputLabel>
<Field
component={FTextField}
variant="outlined"
fullWidth
name="surname"
/>
</Grid>
{/*---------------- DateOfBirth------------- */}
<Grid item xs={12} className={classes.field}>
<InputLabel>Date of Birth</InputLabel>
<Field
component={FTextField}
variant="outlined"
fullWidth
type="date"
name="dateOfBirth"
/>
</Grid>
{/*---------------- Position------------- */}
<Grid item xs={12} className={classes.field}>
<InputLabel>Position</InputLabel>
<Field
component={FTextField}
variant="outlined"
fullWidth
name="position"
/>
</Grid>
{/*---------------- PhoneNumber------------- */}
<Grid item xs={12} className={classes.field}>
<InputLabel>Phone Number</InputLabel>
<Field
component={FTextField}
variant="outlined"
fullWidth
name="phoneNumber"
/>
</Grid>
</Grid>
<Grid>
<div className={classes.buttonsContainer}>
<CustomButton
title="Ləğv et"
backcolor="gray"
color="white"
func={() => {
setOpen(false)
}}
/>
<CustomButton
title="Əlavə et"
func={submitForm}
backcolor="#2662F0"
color="white"
>
Əlavə et
</CustomButton>
</div>
</Grid>
</Paper>
</Grid>
</Grid>
</Form>
)}
</Formik>
</div>
</Dialog>
</>
);
};
|
#!/bin/sh
cd /etc/stunnel
cat > stunnel.conf <<_EOF_
debug = 7
foreground = yes
socket = l:TCP_NODELAY=1
socket = r:TCP_NODELAY=1
[${SERVICE}]
client = ${CLIENT:-no}
accept = ${ACCEPT}
connect = ${CONNECT}
CAfile = /rootCA.crt
cert = /etc/stunnel/service.crt
key = /etc/stunnel/service.key
verify = 2
_EOF_
if ! [ -f service.crt ]
then
echo ERROR: service certificate not found 1>&2
exit 1
fi
exec stunnel "$@"
|
/**
* @brief Check the compiler supports coroutine intrinsics
* @author github.com/luncliff (<EMAIL>)
*
* @see LLVM libc++ <experimental/coroutine>
* @see https://llvm.org/docs/Coroutines.html
* @see Microsoft STL <coroutine>
* @see https://github.com/iains/gcc-cxx-coroutines
*/
#if defined(__has_builtin)
// known functions
static_assert(__has_builtin(__builtin_coro_done));
static_assert(__has_builtin(__builtin_coro_resume));
static_assert(__has_builtin(__builtin_coro_destroy));
static_assert(__has_builtin(__builtin_coro_promise));
#if defined(__clang__)
static_assert(__has_builtin(__builtin_coro_size));
static_assert(__has_builtin(__builtin_coro_frame));
static_assert(__has_builtin(__builtin_coro_free));
static_assert(__has_builtin(__builtin_coro_id));
static_assert(__has_builtin(__builtin_coro_begin));
static_assert(__has_builtin(__builtin_coro_end));
static_assert(__has_builtin(__builtin_coro_suspend));
static_assert(__has_builtin(__builtin_coro_param));
#endif
#else
/**
* @brief Try direct use of MSVC/Clang/GCC `__builtin_coro_*` intrinsics
* (without declaration)
* @note For MSVC, if the test failes, we have to use `_coro_*` intrinsics
* @see <experimental/resumable>
*/
bool has_builtin = __builtin_coro_done(nullptr);
#endif
|
#!/usr/bin/env bash
# author: Jacob Bishop
#
# api_v1 image. prod
source ../../resources/nodejs.env
docker build \
-t "${API_APP_V1_NAME}":"${API_APP_V1_VERSION}" \
--build-arg API_BASE_V1_NAME="${API_BASE_V1_NAME}" \
--build-arg API_BASE_V1_VERSION="${API_BASE_V1_VERSION}" \
-f ./Dockerfile ../..
|
#!/bin/sh
APP_NAME=DeviceManager
tpid=`ps -ef|grep $APP_NAME|grep -v grep|grep -v kill|awk '{print $2}'`
if [ ${tpid} ]; then
echo 'App is running.'
else
echo 'App is NOT running.'
fi |
<gh_stars>0
package org.glamey.training.jvm.loader.executor;
import com.google.common.io.Files;
import java.io.File;
import java.lang.reflect.Method;
public class Main {
public static void main(String[] args) throws Exception {
File classFile = new File("/Users/yang.zhou/idea_project/training/algorithm/target/classes/org/glamey/training/algorithm/sort/BubbleSort.class");
byte[] bytes = Files.toByteArray(classFile);
for (int i = 0; i < 10; i++) {
HotSwapClassLoader classLoader = new HotSwapClassLoader();
Class clazz = classLoader.loadClass(bytes);
Method mainMethod = clazz.getMethod("main", new Class[]{String[].class});
Object invoke = mainMethod.invoke(null, new String[]{null});
System.out.println(invoke);
}
}
}
|
import { makeGrpcService } from "./enums.js";
import { grpc, grpcRename, grpcWrap, makeGrpcFetch } from "./directives.js";
import { GraphQLSchema } from "graphql";
import { printSchemaWithDirectives } from "@graphql-tools/utils";
import prettier from "prettier";
import { ProtoService } from "./protos.js";
import { makeMutationType, makeDummyQueryType } from "./objects.js";
/**
* @param {import("./typings").ServiceConfig[]} services
* @param {{ cwd: string }} options
* @returns {string}
*/
export function generate(services, { cwd }) {
const protoServices = services.map((s) => ProtoService.load(s, { cwd }));
const grpcServiceEnum = makeGrpcService(services);
const dummyQuery = makeDummyQueryType();
const mutation = makeMutationType(protoServices);
const schema = new GraphQLSchema({
directives: [grpc, grpcRename, grpcWrap, makeGrpcFetch(grpcServiceEnum)],
types: [grpcServiceEnum, dummyQuery, mutation],
});
return prettier.format(printSchemaWithDirectives(schema), {
parser: "graphql",
});
}
|
package com.steven.mall.member;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
import org.springframework.cloud.openfeign.EnableFeignClients;
@EnableFeignClients(basePackages = "com.steven.mall.member.feign")
@EnableDiscoveryClient
@SpringBootApplication
@MapperScan("com.steven.mall.member.dao")
public class MallMemberApplication {
public static void main(String[] args) {
SpringApplication.run(MallMemberApplication.class, args);
}
}
|
rm -r node_modules;
rm package-lock.json;
lerna clean --yes;
find . -name "package-lock.json" -delete
npm install
lerna bootstrap |
import uuid from 'uuid/v4';
import { CREATE_TODO, TOGGLE_TODO } from '../actions/todo';
export default function todoWriter(realm, action) {
switch (action.type) {
case CREATE_TODO:
const { name } = action;
realm.create('ToDo', {
id: uuid(),
name
});
break;
case TOGGLE_TODO:
const { id } = action;
const todos = realm.objects('ToDo').filtered(`id = "${id}"`);
if (todos.length === 1) {
const todo = todos[0];
todo.completed = !todo.completed;
}
break;
default:
break;
}
}
|
package com.kudris.silentmode;
import android.app.AlarmManager;
import android.app.PendingIntent;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.support.v4.content.WakefulBroadcastReceiver;
import android.util.Log;
import java.util.Calendar;
public class NightModeBroadcastReceiver extends WakefulBroadcastReceiver {
private AlarmManager alarmMgr;
private PendingIntent alarmIntent;
@Override
public void onReceive(Context context, Intent intent) {
Intent service = new Intent(context, NightModeService.class);
startWakefulService(context, service);
}
public void scheduleAlarm(Context context) {
Log.d("LOL", " schedule alarm called");
alarmMgr = (AlarmManager) context.getSystemService(Context.ALARM_SERVICE);
Intent alarmReceiver = new Intent(context, NightModeBroadcastReceiver.class);
alarmIntent = PendingIntent.getBroadcast(context, 0, alarmReceiver, 0);
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(System.currentTimeMillis());
alarmMgr.setInexactRepeating(
AlarmManager.RTC_WAKEUP,
calendar.getTimeInMillis(),
1000 * 30,
alarmIntent);
ComponentName receiver = new ComponentName(context, NightModeBootReceiver.class);
PackageManager pm = context.getPackageManager();
pm.setComponentEnabledSetting(receiver, PackageManager.COMPONENT_ENABLED_STATE_ENABLED, PackageManager.DONT_KILL_APP);
}
public void cancelAlarm(Context context) {
// If the alarm has been set, cancel it.
if (alarmMgr != null) {
alarmMgr.cancel(alarmIntent);
}
ComponentName receiver = new ComponentName(context, NightModeBootReceiver.class);
PackageManager pm = context.getPackageManager();
pm.setComponentEnabledSetting(receiver, PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP);
}
}
|
<reponame>sqlite-mpi/smpi-iop-react-native<filename>android/src/main/java/com/sqlitempi/iop/java/OutputFn.java
package com.sqlitempi.iop.java;
// @see https://stackoverflow.com/a/18279548
public interface OutputFn {
void outputCb(String o_msg);
}
|
#!/bin/bash
#
#SBATCH --mem=200000
#SBATCH --job-name=a-l10-e
#SBATCH --partition=titanx-long
#SBATCH --output=active-logits-10-elmo-bsize10-%A.out
#SBATCH --error=active-logits-10-elmo-bsize10-%A.err
#SBATCH --gres=gpu:1
#SBATCH --mail-type=ALL
#SBATCH --mail-user=ruppaal@cs.umass.edu
# Log what we're running and where.
echo $SLURM_JOBID - `hostname` >> ~/slurm-jobs.txt
module purge
module load python/3.6.1
module load cuda80/blas/8.0.44
module load cuda80/fft/8.0.44
module load cuda80/nsight/8.0.44
module load cuda80/profiler/8.0.44
module load cuda80/toolkit/8.0.44
## Change this line so that it points to your bidaf github folder
cd ../../..
# Split active learning samples into Train and Dev sets
# (Done manually)
# Create dataset for joint training
#python joint_train.py --target_sampling_ratio 0.1 --debug_ratio 1.0 --train_ratio 0.9 --source_dir "data/squad/" --target_dir "data/active_learning/softmax/10/" --output_dir "data/active_learning/softmax/10/split_0.1/"
# Train
python -m allennlp.run train training_config/bidaf_active_elmo.json -s output/active_elmo_logits10
# Evaluate on SQuAD
python -m allennlp.run evaluate output/active_elmo_logits10/model.tar.gz --evaluation-data-file "data/squad/dev-v1.1.json"
# Evaluate on NewsQA
python -m allennlp.run evaluate output/active_elmo_logits10/model.tar.gz --evaluation-data-file "data/NewsQA/test-v1.1.json"
# Change:
# - output/PATH
# - job name
# - out and err paths
# - config file
|
package cn.icepear.dandelion.upm.biz.mapper;
import cn.icepear.dandelion.upm.api.domain.entity.SysDict;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
/**
* @author rim-wood
* @description 字典管理 Mapper 接口
* @date Created on 2019-04-18.
*/
@Mapper
public interface SysDictMapper extends BaseMapper<SysDict> {
}
|
<filename>pkg/util/testutil/file_comparisons.go
//
package testutil
import (
"encoding/json"
"io/ioutil"
"strings"
"testing"
"github.com/ghodss/yaml"
"github.com/pmezard/go-difflib/difflib"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
type FileName string
// Marshals both structs into JSON, compares them and prints text diff between them
func JSONCompare(t *testing.T, expectedData, actualData interface{}) bool {
return JSONCompareContext(t, FileName("expected"), expectedData, actualData)
}
// Marshals both structs into JSON, compares them and prints text diff between them.
// Accepts expectedFileName to print Unix patch that can be applied on top of the file.
func JSONCompareContext(t *testing.T, expectedFileName FileName, expectedData, actualData interface{}) bool {
expectedResult, err := json.MarshalIndent(expectedData, "", " ")
require.NoError(t, err)
actualResult, err := json.MarshalIndent(actualData, "", " ")
require.NoError(t, err)
return TextCompare(t, string(expectedFileName), "actual", string(expectedResult), string(actualResult))
}
// Marshals both structs into YAML, compares them and prints text diff between them
func YAMLCompare(t *testing.T, expectedData, actualData interface{}) bool {
return YAMLCompareContext(t, FileName("expected"), expectedData, actualData)
}
// Marshals both structs into YAML, compares them and prints text diff between them.
// Accepts expectedFileName to print Unix patch that can be applied on top of the file.
func YAMLCompareContext(t *testing.T, expectedFileName FileName, expectedData, actualData interface{}) bool {
expectedResult, err := yaml.Marshal(expectedData)
require.NoError(t, err)
actualResult, err := yaml.Marshal(actualData)
require.NoError(t, err)
return TextCompare(t, string(expectedFileName), "actual", string(expectedResult), string(actualResult))
}
func ReadCompare(t *testing.T, expectedFileName, actualFileName FileName, actualData string) bool {
expectedData, err := ioutil.ReadFile(string(expectedFileName))
require.NoError(t, err)
return FileCompare(t, expectedFileName, actualFileName, string(expectedData), actualData)
}
// functions like assert in that it returns true/false if there is no difference and marks the test as failed
// while allowing the test to continue
func FileCompare(t *testing.T, expectedFileName, actualFileName FileName, expectedData, actualData string) bool {
expectedLines := difflib.SplitLines(strings.TrimSpace(expectedData))
actualLines := difflib.SplitLines(strings.TrimSpace(actualData))
return TextLinesCompare(t, string(expectedFileName), string(actualFileName), expectedLines, actualLines)
}
func TextCompare(t *testing.T, expectedTextName, actualTextName string, expectedText, actualText string) bool {
return TextLinesCompare(t, expectedTextName, actualTextName, difflib.SplitLines(expectedText), difflib.SplitLines(actualText))
}
func TextLinesCompare(t *testing.T, expectedTextName, actualTextName string, expectedLines, actualLines []string) bool {
diff := difflib.UnifiedDiff{
A: expectedLines,
B: actualLines,
FromFile: expectedTextName,
ToFile: actualTextName,
Context: 3,
}
text, err := difflib.GetUnifiedDiffString(diff)
require.NoError(t, err)
if text != "" {
t.Log(text)
assert.Fail(t, "comparison failed")
return false
}
return true
}
|
# -*- coding: utf-8 -*-
"""
Created on Mon Nov 9 11:38:05 2015
AUTHOR:
<NAME> <<EMAIL>>
Gordon College
Based Octave functions written in the spring of 1999
Python version: March 2008, October 2008
"""
import numpy
def rkf( f, a, b, x0, tol, hmax, hmin ):
"""Runge-Kutta-Fehlberg method to solve x' = f(x,t) with x(t[0]) = x0.
USAGE:
t, x = rkf(f, a, b, x0, tol, hmax, hmin)
INPUT:
f - function equal to dx/dt = f(x,t)
a - left-hand endpoint of interval (initial condition is here)
b - right-hand endpoint of interval
x0 - initial x value: x0 = x(a)
tol - maximum value of local truncation error estimate
hmax - maximum step size
hmin - minimum step size
OUTPUT:
t - NumPy array of independent variable values
x - NumPy array of corresponding solution function values
NOTES:
This function implements 4th-5th order Runge-Kutta-Fehlberg Method
to solve the initial value problem
dx
-- = f(x,t), x(a) = x0
dt
on the interval [a,b].
Based on pseudocode presented in "Numerical Analysis", 6th Edition,
by Burden and Faires, Brooks-Cole, 1997.
"""
# Coefficients used to compute the independent variable argument of f
a2 = 2.500000000000000e-01 # 1/4
a3 = 3.750000000000000e-01 # 3/8
a4 = 9.230769230769231e-01 # 12/13
a5 = 1.000000000000000e+00 # 1
a6 = 5.000000000000000e-01 # 1/2
# Coefficients used to compute the dependent variable argument of f
b21 = 2.500000000000000e-01 # 1/4
b31 = 9.375000000000000e-02 # 3/32
b32 = 2.812500000000000e-01 # 9/32
b41 = 8.793809740555303e-01 # 1932/2197
b42 = -3.277196176604461e+00 # -7200/2197
b43 = 3.320892125625853e+00 # 7296/2197
b51 = 2.032407407407407e+00 # 439/216
b52 = -8.000000000000000e+00 # -8
b53 = 7.173489278752436e+00 # 3680/513
b54 = -2.058966861598441e-01 # -845/4104
b61 = -2.962962962962963e-01 # -8/27
b62 = 2.000000000000000e+00 # 2
b63 = -1.381676413255361e+00 # -3544/2565
b64 = 4.529727095516569e-01 # 1859/4104
b65 = -2.750000000000000e-01 # -11/40
# Coefficients used to compute local truncation error estimate. These
# come from subtracting a 4th order RK estimate from a 5th order RK
# estimate.
r1 = 2.777777777777778e-03 # 1/360
r3 = -2.994152046783626e-02 # -128/4275
r4 = -2.919989367357789e-02 # -2197/75240
r5 = 2.000000000000000e-02 # 1/50
r6 = 3.636363636363636e-02 # 2/55
# Coefficients used to compute 4th order RK estimate
c1 = 1.157407407407407e-01 # 25/216
c3 = 5.489278752436647e-01 # 1408/2565
c4 = 5.353313840155945e-01 # 2197/4104
c5 = -2.000000000000000e-01 # -1/5
# Set t and x according to initial condition and assume that h starts
# with a value that is as large as possible.
t = a
x = numpy.array(x0)
h = hmax
# Initialize arrays that will be returned
T = numpy.array( [t] )
X = numpy.array( [x] )
while t < b:
# Adjust step size when we get to last interval
if t + h > b:
h = b - t;
# Compute values needed to compute truncation error estimate and
# the 4th order RK estimate.
k1 = h * f( x, t )
k2 = h * f( x + b21 * k1, t + a2 * h )
k3 = h * f( x + b31 * k1 + b32 * k2, t + a3 * h )
k4 = h * f( x + b41 * k1 + b42 * k2 + b43 * k3, t + a4 * h )
k5 = h * f( x + b51 * k1 + b52 * k2 + b53 * k3 + b54 * k4, t + a5 * h )
k6 = h * f( x + b61 * k1 + b62 * k2 + b63 * k3 + b64 * k4 + b65 * k5, \
t + a6 * h )
# Compute the estimate of the local truncation error. If it's small
# enough then we accept this step and save the 4th order estimate.
r = abs( r1 * k1 + r3 * k3 + r4 * k4 + r5 * k5 + r6 * k6 ) / h
if len( numpy.shape( r ) ) > 0:
r = max( r )
if r <= tol:
t = t + h
x = x + c1 * k1 + c3 * k3 + c4 * k4 + c5 * k5
T = numpy.append( T, t )
X = numpy.append( X, [x], 0 )
# Now compute next step size, and make sure that it is not too big or
# too small.
h = h * min( max( 0.84 * ( tol / r )**0.25, 0.1 ), 4.0 )
if h > hmax:
h = hmax
elif h < hmin:
raise RuntimeError("Error: Could not converge to the required tolerance %e with minimum stepsize %e." % (tol,hmin))
break
# endwhile
return ( T, X )
|
#!/bin/bash
## Copyright (c) 2021, Oracle and/or its affiliates.
set -e
##
## d e p l o y m e n t - m a i n . s h
## Execute OGG Deployment
##
##
## a b o r t
## Terminate with an error message
##
function abort() {
echo "Error - $*"
exit 1
}
: "${OGG_DEPLOYMENT:=Local}"
: "${OGG_ADMIN:=oggadmin}"
: "${OGG_LISTEN_ON:=127.0.0.1}"
: "${OGG_DEPLOYMENT_HOME:?}"
[[ -d "${OGG_DEPLOYMENT_HOME}" ]] || abort "Deployment storage, '${OGG_DEPLOYMENT_HOME}', not found."
: "${OGG_TEMPORARY_FILES:?}"
[[ -d "${OGG_TEMPORARY_FILES}" ]] || abort "Deployment temporary storage, '${OGG_TEMPORARY_FILES}', not found."
: "${OGG_HOME:?}"
[[ -d "${OGG_HOME}" ]] || abort "Deployment runtime, '${OGG_HOME}'. not found."
NGINX_CRT="$(awk '$1 == "ssl_certificate" { gsub(/;/, ""); print $NF; exit }' < /etc/nginx/nginx.conf)"
NGINX_KEY="$(awk '$1 == "ssl_certificate_key" { gsub(/;/, ""); print $NF; exit }' < /etc/nginx/nginx.conf)"
export OGG_DEPLOYMENT OGG_ADMIN NGINX_CRT NGINX_KEY
##
## g e n e r a t e P a s s w o r d
## If not already specified, generate a random password with:
## - at least one uppercase character
## - at least one lowercase character
## - at least one digit character
##
function generatePassword {
if [[ -n "${OGG_ADMIN_PWD}" || -d "${OGG_DEPLOYMENT_HOME}/Deployment/etc" ]]; then
return
fi
local password
password="$(openssl rand -base64 9)-$(openssl rand -base64 3)"
if [[ "${password}" != "${password/[A-Z]/_}" && \
"${password}" != "${password/[a-z]/_}" && \
"${password}" != "${password/[0-9]/_}" ]]; then
export OGG_ADMIN_PWD="${password}"
echo "----------------------------------------------------------------------------------"
echo "-- Password for OGG administrative user '${OGG_ADMIN}' is '${OGG_ADMIN_PWD}'"
echo "----------------------------------------------------------------------------------"
return
fi
generatePassword
}
##
## l o c a t e _ j a v a
## Locate the Java installation and set JAVA_HOME
##
function locate_java() {
[[ -n "${JAVA_HOME}" ]] && return 0
local java
java=$(command -v java)
[[ -z "${java}" ]] && abort "Java installation not found"
JAVA_HOME="$(dirname "$(dirname "$(readlink -f "${java}")")")"
export JAVA_HOME
}
##
## r u n _ a s _ o g g
## Return a string used for running a process as the 'ogg' user
##
function run_as_ogg() {
local user="ogg"
local uid gid
uid="$(id -u "${user}")"
gid="$(id -g "${user}")"
echo "setpriv --ruid ${uid} --euid ${uid} --groups ${gid} --rgid ${gid} --egid ${gid} -- "
}
##
## s e t u p _ d e p l o y m e n t _ d i r e c t o r i e s
## Create and set permissions for directories for the deployment
##
function setup_deployment_directories() {
rm -fr "${OGG_DEPLOYMENT_HOME}"/Deployment/var/{run,temp,lib/db} \
"${OGG_TEMPORARY_FILES}"/{run,temp}
mkdir -p "${OGG_TEMPORARY_FILES}"/{run,temp,db} \
"${OGG_DEPLOYMENT_HOME}"/Deployment/var/lib
ln -s "${OGG_TEMPORARY_FILES}"/run "${OGG_DEPLOYMENT_HOME}"/Deployment/var/run
ln -s "${OGG_TEMPORARY_FILES}"/temp "${OGG_DEPLOYMENT_HOME}"/Deployment/var/temp
ln -s "${OGG_TEMPORARY_FILES}"/db "${OGG_DEPLOYMENT_HOME}"/Deployment/var/lib/db
chown ogg:ogg "${OGG_DEPLOYMENT_HOME}" "${OGG_TEMPORARY_FILES}"
chmod 0750 "${OGG_DEPLOYMENT_HOME}" "${OGG_TEMPORARY_FILES}"
find "${OGG_DEPLOYMENT_HOME}" "${OGG_TEMPORARY_FILES}" -mindepth 1 -maxdepth 1 -not -name '.*' -exec \
chown -R ogg:ogg {} \;
}
##
## s t a r t _ o g g
## Initialize and start the OGG installation
##
function start_ogg() {
$(run_as_ogg) python3 /usr/local/bin/deployment-init.py
$(run_as_ogg) tail -F "${OGG_DEPLOYMENT_HOME}"/ServiceManager/var/log/ServiceManager.log &
ogg_pid=$!
}
##
## s t a r t _ n g i n x
## Start the NGinx reverse proxy daemon
##
function start_nginx() {
[[ ! -f "${NGINX_CRT}" || ! -f "${NGINX_KEY}" ]] && {
/usr/local/bin/create-certificate.sh
}
replace-variables.sh /etc/nginx/*.conf
/usr/sbin/nginx -t
/usr/sbin/nginx
}
##
## Termination handler
##
function termination_handler() {
[[ -z "${ogg_pid}" ]] || {
kill "${ogg_pid}"
unset ogg_pid
}
[[ ! -f "/var/run/nginx.pid" ]] || {
/usr/sbin/nginx -s stop
}
exit 0
}
##
## Signal Handling for this script
##
function signal_handling() {
trap - SIGTERM SIGINT
trap termination_handler SIGTERM SIGINT
}
##
## Entrypoint
##
generatePassword
setup_deployment_directories
locate_java
start_ogg
start_nginx
signal_handling
wait
|
package com.johnsnowlabs.nlp.annotators
import com.johnsnowlabs.nlp.annotators.common.{InfixToken, PrefixedToken, SuffixedToken}
import com.johnsnowlabs.nlp.serialization.ArrayFeature
import com.johnsnowlabs.nlp.{Annotation, AnnotatorModel, AnnotatorType}
import org.apache.spark.ml.util.Identifiable
class SimpleTokenizer(override val uid: String) extends AnnotatorModel[SimpleTokenizer] {
def this() = this(Identifiable.randomUID("SILLY_TOKENIZER"))
val prefixes = new ArrayFeature[String](this, "prefixes")
def setPrefixes(p: Array[String]):this.type = set(prefixes, p.sortBy(_.size).reverse)
val suffixes = new ArrayFeature[String](this, "suffixes")
def setSuffixes(s: Array[String]):this.type = set(suffixes, s.sortBy(_.size).reverse)
setDefault(prefixes, () => Array("'", "\"", "(", "[", "\n"))
setDefault(suffixes, () => Array(".", ":", "%", ",", ";", "?", "'", "\"", ")", "]", "\n"))
/**
* takes a document and annotations and produces new annotations of this annotator's annotation type
*
* @param annotations Annotations that correspond to inputAnnotationCols generated by previous annotators if any
* @return any number of annotations processed for every input annotation. Not necessary one to one relationship
*/
override def annotate(annotations: Seq[Annotation]): Seq[Annotation] =
annotations.flatMap { annotation =>
tokenize(annotation.result).map(token => annotation.
copy(result = token, metadata = annotation.metadata.updated("sentence",
annotation.metadata.getOrElse("sentence", "0"))))
}
// hardcoded at this time
private lazy val firstPass = Seq(InfixToken(Array("\n")))
private lazy val secondPass = Seq(SuffixedToken($$(suffixes)),
PrefixedToken($$(prefixes)))
private def tokenize(text: String):Seq[String] =
text.split(" ").filter(_!=" ").flatMap{ token =>
var tmp = Seq(token)
firstPass.foreach{ parser =>
tmp = tmp.flatMap(t => parser.separate(t).split(" "))
}
secondPass.foreach{ parser =>
tmp = tmp.flatMap(t => parser.separate(t).split(" "))
}
tmp
}.filter(!_.equals(""))
override val outputAnnotatorType: AnnotatorType = AnnotatorType.TOKEN
/** Annotator reference id. Used to identify elements in metadata or to refer to this annotator type */
override val inputAnnotatorTypes: Array[String] = Array(AnnotatorType.DOCUMENT)
}
|
import requests
def get_peak_data(sample_name: str) -> dict:
api_url = "https://peakinvestigator.veritomyx.com/api/RUN"
api_key = "your_api_key" # Replace with your actual API key
params = {
"sample": sample_name,
"key": api_key
}
response = requests.get(api_url, params=params)
if response.status_code == 200:
peak_data = response.json()
return {
"peak_name": peak_data["peak_name"],
"retention_time": peak_data["retention_time"],
"intensity": peak_data["intensity"]
}
else:
return {"error": "Failed to retrieve peak data"}
# Example usage
sample_name = "Sample1"
print(get_peak_data(sample_name)) |
#!/usr/bin/env bash
set -e
set -x
shopt -s dotglob
readonly name="cli11"
readonly ownership="CLI11 Upstream <kwrobot@kitware.com>"
readonly subtree="ThirdParty/$name/vtk$name"
readonly repo="https://gitlab.kitware.com/third-party/cli11.git"
readonly tag="for/vtk-2021-05-13-v1.9.1"
readonly paths="
include/CLI/*.hpp
LICENSE
README.md
README.kitware.md
CMakeLists.vtk.txt
"
extract_source () {
git_archive
pushd "$extractdir/$name-reduced"
mv -v include/* .
mv CMakeLists.vtk.txt CMakeLists.txt
popd
}
. "${BASH_SOURCE%/*}/../update-common.sh"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.