text stringlengths 1 1.05M |
|---|
<filename>offer/src/main/java/com/java/study/answer/zuo/bbasic/class_03/Code_07_ReverseList.java<gh_stars>1-10
package com.java.study.answer.zuo.bbasic.class_03;
public class Code_07_ReverseList {
public static class Node {
public int value;
public Node next;
public Node(int data) {
this.value = data;
}
}
public static Node reverseList(Node head) {
Node pre = null;
Node next = null;
while (head != null) {
next = head.next;
head.next = pre;
pre = head;
head = next;
}
return pre;
}
public static class DoubleNode {
public int value;
public DoubleNode last;
public DoubleNode next;
public DoubleNode(int data) {
this.value = data;
}
}
public static DoubleNode reverseList(DoubleNode head) {
DoubleNode pre = null;
DoubleNode next = null;
while (head != null) {
next = head.next;
head.next = pre;
head.last = next;
pre = head;
head = next;
}
return pre;
}
public static void printLinkedList(Node head) {
System.out.print("Linked List: ");
while (head != null) {
System.out.print(head.value + " ");
head = head.next;
}
System.out.println();
}
public static void printDoubleLinkedList(DoubleNode head) {
System.out.print("Double Linked List: ");
DoubleNode end = null;
while (head != null) {
System.out.print(head.value + " ");
end = head;
head = head.next;
}
System.out.print("| ");
while (end != null) {
System.out.print(end.value + " ");
end = end.last;
}
System.out.println();
}
public static void main(String[] args) {
Node head1 = new Node(1);
head1.next = new Node(2);
head1.next.next = new Node(3);
printLinkedList(head1);
head1 = reverseList(head1);
printLinkedList(head1);
DoubleNode head2 = new DoubleNode(1);
head2.next = new DoubleNode(2);
head2.next.last = head2;
head2.next.next = new DoubleNode(3);
head2.next.next.last = head2.next;
head2.next.next.next = new DoubleNode(4);
head2.next.next.next.last = head2.next.next;
printDoubleLinkedList(head2);
printDoubleLinkedList(reverseList(head2));
}
}
|
<filename>mtp_api/apps/disbursement/migrations/0002_auto_20171110_1221.py
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-11-10 12:21
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('disbursement', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='disbursement',
name='prison',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='prison.Prison'),
),
migrations.AlterField(
model_name='disbursement',
name='prisoner_number',
field=models.CharField(max_length=250),
),
]
|
<filename>account/forms.py
from django import forms
from django.contrib.auth import get_user_model
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.contrib.auth.forms import ReadOnlyPasswordHashField
from .models import ACCOUNT_TYPE_CHOICES
from .models import (
StudentProfile, FacultyProfile, StaffProfile
)
User = get_user_model()
class UserCreationForm(forms.ModelForm):
account_type = forms.ChoiceField(
label='Account Type', choices=ACCOUNT_TYPE_CHOICES)
password1 = forms.CharField(label='Password', widget=forms.PasswordInput)
password2 = forms.CharField(
label='Confirm Password', widget=forms.PasswordInput)
class Meta:
model = User
exclude = []
unique_together = ('username', 'email')
def clean_password2(self):
password1 = self.cleaned_data.get('password1')
password2 = self.cleaned_data.get('password2')
if password2 is not None and password1 is not None and password1 != password2:
raise forms.ValidationError('Password don\'t match!')
return password2
def save(self, commit=True):
user = super(UserCreationForm, self).save(commit=False)
user.set_password(self.cleaned_data['password1'])
if self.cleaned_data.get('account_type') == 'Administrator':
user.is_staff = user.is_superuser = True
elif self.cleaned_data['account_type'] == 'Staff':
user.is_staff = True
elif self.cleaned_data['account_type'] == 'Student':
user.is_student = True
elif self.cleaned_data['account_type'] == 'Faculty':
user.is_faculty = True
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
class Meta:
model = User
exclude = []
labels = {'account_type': 'Change Account Type'}
unique_together = ('username', 'email')
password = ReadOnlyPasswordHashField()
account_type = forms.ChoiceField(
label='Account Type', choices=ACCOUNT_TYPE_CHOICES, initial='')
def clean_password(self):
# Regardless of what the user provides, return the initial value.
# This is done here, rather than on the field, because the
# field does not have access to the initial value
return self.initial.get('password')
def save(self, commit=True):
user = super(UserChangeForm, self).save(commit=False)
proxy_type = self.cleaned_data.get('account_type')
if proxy_type == 'Administrator':
user.is_staff = user.is_superuser = True
elif proxy_type == 'Staff':
user.is_staff = True
user.is_superuser = False
elif proxy_type == 'Student':
user.is_superuser = user.is_staff = user.is_faculty = False
user.is_student = True
elif proxy_type == 'Faculty':
user.is_superuser = user.is_staff = user.is_student = False
user.is_faculty = True
else:
pass # do nothing
if commit:
user.save()
return user
class UserAdmin(BaseUserAdmin):
add_form = UserCreationForm # create view
form = UserChangeForm # update view
# The fields to be used in displaying the User model.
# These override the definitions on the base UserAdmin
# that reference specific fields on auth.User.
list_display = [
'username', 'email',
'is_active', 'is_superuser',
'is_staff', 'is_faculty', 'is_student'
]
list_filter = ['is_active', 'is_staff',
'is_superuser', 'is_student', 'is_faculty']
fieldsets = (
('Credentials', {'fields': ('username', 'email', 'password')}),
('Personal Information', {'fields': ('first_name', 'middle_name', 'last_name',
'birth_date', 'gender', 'address', 'photo', 'phone_number')
}
),
(
'Account Status', {
'fields': ('is_active', 'account_type',)
}
)
)
# add_fieldsets is not a standard ModelAdmin attribute. UserAdmin
# overrides get_fieldsets to use this attribute when creating a user.
add_fieldsets = (
(
None, {
'classes': ('wide',),
'fields': (
'username', 'email', 'first_name', 'middle_name', 'last_name',
'birth_date', 'gender', 'address', 'photo', 'phone_number', 'is_active', 'account_type',
'<PASSWORD>', '<PASSWORD>'
),
}
),
)
search_fields = ('email',)
ordering = ('email',)
filter_horizontal = ()
# for personal use only
class PersonalUserForm(forms.ModelForm):
class Meta:
model = User
exclude = [
'username', 'first_name', 'last_name',
'middle_name', 'is_staff', 'is_superuser',
'last_login', 'password', 'is_active'
]
# for faculty personal use
class PersonalFacultyForm(forms.ModelForm):
class Meta:
model = FacultyProfile
exclude = [
'user', 'department',
'is_chairperson', 'status', 'updated', 'date_joined'
]
# for staff personal use
class PersonalStaffForm(forms.ModelForm):
class Meta:
model = StaffProfile
exclude = [
'user', 'date_joined', 'updated',
]
# for student personal use
class PersonalStudentForm(forms.ModelForm):
class Meta:
model = StudentProfile
fields = [
'guardian', 'additional_information',
]
|
<reponame>laim0nas100/LuceneIndexAndSearch
package lt.lb.luceneindexandsearch.indexing.content;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.function.Supplier;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
/**
*
* @author laim0nas100
*/
public class SimpleAnalyzer extends Analyzer {
protected Supplier<? extends Tokenizer> tokenizerFactory;
protected List<BiFunction<String, TokenStream, TokenStream>> filters = new ArrayList<>();
public SimpleAnalyzer(Supplier<? extends Tokenizer> tokenizerFactory) {
this.tokenizerFactory = Objects.requireNonNull(tokenizerFactory);
}
public SimpleAnalyzer(Supplier<? extends Tokenizer> tokenizerFactory, ReuseStrategy reuseStrategy) {
super(reuseStrategy);
this.tokenizerFactory = Objects.requireNonNull(tokenizerFactory);
}
public void add(Function<TokenStream, TokenStream> filter) {
Objects.requireNonNull(filter);
add((field, stream) -> filter.apply(stream));
}
public void add(BiFunction<String, TokenStream, TokenStream> filter) {
Objects.requireNonNull(filter);
filters.add(filter);
}
@Override
protected TokenStreamComponents createComponents(String fieldName) {
final Tokenizer src = tokenizerFactory.get();
TokenStream filtered = src;
for (BiFunction<String, TokenStream, TokenStream> filter : filters) {
filtered = filter.apply(fieldName, filtered);
}
return new TokenStreamComponents(src, filtered);
}
}
|
#!/bin/bash
rm -rf tiles
case $1 in
mpz)
./gda2tiles/gdal2tiles-multiprocess.py -l -p raster -z 1-6 -w none map.png tiles
;;
mp)
./gda2tiles/gdal2tiles-multiprocess.py -l -p raster -w none map.png tiles
;;
z)
./gda2tiles/gdal2tiles.py -l -p raster -w none map.png -z 0-5 tiles
;;
*)
./gda2tiles/gdal2tiles.py -l -p raster -w none map.png tiles
;;
esac
|
#!/bin/bash
# convert all the video file to images
# the path/to/data structure will be something like /user/data/train_data/walk/video1.avi
for folder in /Volumes/dgu\'s\ passport/datasets/\(action\)LCA/video_data/validation_data/*
do
count = 0
for file in "$folder"/*.avi
do
if [[ ! -d "$folder"/$count ]]; then
mkdir -p "$folder"/$count
fi
ffmpeg -i "$file" "$folder"/$count/%05d.png
(( count++ ))
done
done |
<gh_stars>0
"use strict";
const nodemailer = require("nodemailer");
const transporter = nodemailer.createTransport({
host: process.env.SMTP_SERVER,
port: 465,
secure: true,
auth: {
user: process.env.EMAIL_FROM,
pass: process.env.EMAIL_FROM_PASSWORD,
},
});
module.exports.handler = async (event) => {
const emailPromises = [];
for (const record of event.Records) {
const message = JSON.parse(record.body).Message;
emailPromises.push(
transporter.sendMail({
from: `"Reservas 👻" <${process.env.EMAIL_FROM}>`,
to: process.env.EMAIL_TO,
subject: "Reserva Efetuada ✔",
text: message,
html: message,
})
);
}
await Promise.all(emailPromises);
console.log("Todos os e-mails foram enviados com sucesso");
return {
statusCode: 200,
body: JSON.stringify({
message: "Go Serverless v1.0! Your function executed successfully!",
}),
};
};
|
import numpy as np
def perform_transformation(array, axis):
if not isinstance(array, list) or not all(isinstance(sublist, list) for sublist in array):
raise ValueError("Input array must be multi-dimensional")
array_np = np.array(array, dtype=np.int32)
array_shape = array_np.shape
if axis < 0 or axis > len(array_shape):
raise ValueError("Axis out of bounds for the input array")
reshaped_array = np.expand_dims(array_np, axis=axis)
return reshaped_array.tolist() |
booking=$(curl -s -X POST https://$1/cargotracker/serviceapi/cargobooking -H 'Content-Type: application/json' -d '{
"bookingAmount": 100,
"originLocation": "CNHKG",
"destLocation" : "USNYC",
"destArrivalDeadline" : "2020-01-28"
}')
bookingId=$(echo `jq -r .bookingId <<< "${booking}"`)
echo $bookingId
curl -s -X GET "https://$1/cargotracker/serviceapi/voyageRouting/optimalRoute?origin=CNHKG&destination=USNYC&deadline=2020-01-28" | jq
routing=$(curl -s -X POST https://$1/cargotracker/serviceapi/cargorouting -H 'Content-Type: application/json' -d $booking)
echo $routing
receive=$(curl -s -X POST https://$1/cargotracker/serviceapi/cargohandling -H 'Content-Type: application/json' -d '{
"bookingId" : "'$bookingId'",
"unLocode" : "CNHKG",
"handlingType" : "RECEIVE",
"completionTime": "2019-08-23",
"voyageNumber" : ""
}')
echo $receive
firstload=$(curl -s -X POST https://$1/cargotracker/serviceapi/cargohandling -H 'Content-Type: application/json' -d '{
"bookingId" : "'$bookingId'",
"unLocode" : "CNHKG",
"handlingType" : "LOAD",
"completionTime": "2019-08-25",
"voyageNumber" : "0100S"
}')
echo $firstload
firstunload=$(curl -s -X POST https://$1/cargotracker/serviceapi/cargohandling -H 'Content-Type: application/json' -d '{
"bookingId" : "'$bookingId'",
"unLocode" : "CNHGH",
"handlingType" : "UNLOAD",
"completionTime": "2019-08-28",
"voyageNumber" : "0100S"
}')
echo $firstunload
secondload=$(curl -s -X POST https://$1/cargotracker/serviceapi/cargohandling -H 'Content-Type: application/json' -d '{
"bookingId" : "'$bookingId'",
"unLocode" : "CNHGH",
"handlingType" : "LOAD",
"completionTime": "2019-09-01",
"voyageNumber" : "0101S"
}')
echo $secondload
secondunload=$(curl -s -X POST https://$1/cargotracker/serviceapi/cargohandling -H 'Content-Type: application/json' -d '{
"bookingId" : "'$bookingId'",
"unLocode" : "JNTKO",
"handlingType" : "UNLOAD",
"completionTime": "2019-09-10",
"voyageNumber" : "0101S"
}')
echo $secondunload
thirdload=$(curl -s -X POST https://$1/cargotracker/serviceapi/cargohandling -H 'Content-Type: application/json' -d '{
"bookingId" : "'$bookingId'",
"unLocode" : "JNTKO",
"handlingType" : "LOAD",
"completionTime": "2019-09-15",
"voyageNumber" : "0102S"
}')
echo $thirdload
thirdunload=$(curl -s -X POST https://$1/cargotracker/serviceapi/cargohandling -H 'Content-Type: application/json' -d '{
"bookingId" : "'$bookingId'",
"unLocode" : "USNYC",
"handlingType" : "UNLOAD",
"completionTime": "2019-09-25",
"voyageNumber" : "0102S"
}')
echo $thirdunload
customs=$(curl -s -X POST https://$1/cargotracker/serviceapi/cargohandling -H 'Content-Type: application/json' -d '{
"bookingId" : "'$bookingId'",
"unLocode" : "USNYC",
"handlingType" : "CUSTOMS",
"completionTime": "2019-09-26",
"voyageNumber" : ""
}')
echo $customs
claim=$(curl -s -X POST https://$1/cargotracker/serviceapi/cargohandling -H 'Content-Type: application/json' -d '{
"bookingId" : "'$bookingId'",
"unLocode" : "USNYC",
"handlingType" : "CLAIM",
"completionTime": "2019-09-28",
"voyageNumber" : ""
}')
echo $claim |
#!/bin/bash
ANSIBLE_HOST_KEY_CHECKING=false ansible-playbook install-openvpn.yml -i inventory
|
import axios from "axios";
import unescape from "./helpers/unescape";
export default {
loaded: false,
translations: {},
async load() {
const translations = await axios
.get("/?view=translations", {
responseType: "json",
})
.then((response) => response.data);
this.loaded = true;
this.translations = translations;
document.dispatchEvent(
new CustomEvent("theme:translations:loaded", {
detail: {
translations,
},
})
);
},
get(name, params = {}) {
try {
const translation = name.split(".").reduce((translations, key) => {
if (translations.hasOwnProperty(key)) {
return translations[key];
}
throw new Error("Translation missed");
}, this.translations);
return Object.keys(params).reduce((result = "", key) => {
let regex = new RegExp(`{{(\\s+)?(${key})(\\s+)?}}`, "gm");
return result.replace(regex, params[key]);
}, unescape(translation));
} catch (e) {}
return `"${name}" translation missed`;
},
all() {
return this.translations;
}
};
|
#!/bin/sh
### BEGIN INIT INFO
# Provides: vvs_notifier
# Required-Start: $remote_fs $syslog
# Required-Stop: $remote_fs $syslog
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: Put a short description of the service here
# Description: Put a long description of the service here
### END INIT INFO
# Change the next 3 lines to suit where you install your script and what you want to call it
DIR=/home/pi/dev/vvsNotifier
DAEMON=$DIR/app.py
DAEMON_NAME=vvs_notifier
# Add any command line options for your daemon here
DAEMON_OPTS=""
# This next line determines what user the script runs as.
# Root generally not recommended but necessary if you are using the Raspberry Pi GPIO from Python.
DAEMON_USER=root
# The process ID of the script when it runs is stored here:
PIDFILE=/var/run/$DAEMON_NAME.pid
. /lib/lsb/init-functions
do_start () {
log_daemon_msg "Starting system $DAEMON_NAME daemon"
start-stop-daemon --start --background --pidfile $PIDFILE --make-pidfile --user $DAEMON_USER --chuid $DAEMON_USER --startas $DAEMON -- $DAEMON_OPTS
log_end_msg $?
}
do_stop () {
log_daemon_msg "Stopping system $DAEMON_NAME daemon"
start-stop-daemon --stop --pidfile $PIDFILE --retry 10
log_end_msg $?
}
case "$1" in
start|stop)
do_${1}
;;
restart|reload|force-reload)
do_stop
do_start
;;
status)
status_of_proc "$DAEMON_NAME" "$DAEMON" && exit 0 || exit $?
;;
*)
echo "Usage: /etc/init.d/$DAEMON_NAME {start|stop|restart|status}"
exit 1
;;
esac
exit 0
|
<reponame>mikita-kandratsyeu/crypto-wallet-app
import React, { useCallback, useState } from 'react';
import { connect } from 'react-redux';
import {
View,
StyleSheet,
FlatList,
Text,
TouchableOpacity,
Image,
} from 'react-native';
// @ts-ignore
import AnimateNumber from 'react-native-countup';
import { useFocusEffect } from '@react-navigation/core';
import { MainLayoutWrapper } from '.';
import { Store } from '../store/types';
import { getHoldings, getCoinMarket } from '../store/market/market.actions';
import { colors, dummyData, fonts, messages, sizes } from '../constants';
import { BalanceInfo, Chart, Icon, IconTextButton } from '../components';
import { HomeProps } from './types';
import { getTotalWallet, getValueChange, showAlert } from './services';
const Home: React.FC<HomeProps> = props => {
const { myHoldings, coins, getHoldings, getCoinMarket } = props;
useFocusEffect(
useCallback(() => {
getHoldings(dummyData.holdings);
getCoinMarket();
}, []),
);
const [selectedCoin, setSelectedCoin] = useState<any>(null);
const valueChange = getValueChange(myHoldings);
const totalWallet = getTotalWallet(myHoldings);
const percentChange = (valueChange / (totalWallet - valueChange)) * 100;
const alertHandler = () =>
showAlert({
title: messages.titleAlert,
body: messages.bodyAlert,
buttons: [
{
text: messages.buttonAlert,
onPress: () => null,
style: 'cancel',
},
],
});
return (
<MainLayoutWrapper>
<View style={styles.root}>
<View style={styles.walletInfoContainer}>
<BalanceInfo
title={messages.yourWallet}
displayAmount={totalWallet}
changePct={percentChange}
containerStyle={styles.balanceInfoContainer}
/>
</View>
<View style={styles.iconTextButtonContainer}>
<IconTextButton
label={messages.transfer}
icon="Send"
containerStyle={styles.iconTextButton}
onPress={alertHandler}
/>
<IconTextButton
label={messages.withdraw}
icon="WithDraw"
containerStyle={styles.iconTextButton}
onPress={alertHandler}
/>
</View>
<Chart
containerStyle={{ marginTop: sizes.padding }}
chartPrices={
selectedCoin
? // eslint-disable-next-line camelcase
selectedCoin?.sparkline_in_7d?.price
: // eslint-disable-next-line camelcase
coins[0]?.sparkline_in_7d?.price
}
/>
<FlatList
data={coins}
keyExtractor={item => item.id}
contentContainerStyle={styles.listCoins}
// eslint-disable-next-line prettier/prettier
ListHeaderComponent={(
<View style={styles.listCoinsHeader}>
<Text style={styles.listCoinsTextHeader}>
{messages.topCryptoCurrency}
</Text>
</View>
// eslint-disable-next-line prettier/prettier
)}
renderItem={({ item }) => {
const priceColor = () => {
if (item.price_change_percentage_7d_in_currency === 0) {
return colors.lightGray3;
}
if (item.price_change_percentage_7d_in_currency > 0) {
return colors.lightGreen;
}
return colors.red;
};
const changeIconStyle = {
transform:
Number(item.price_change_percentage_7d_in_currency) > 0
? [{ rotate: '45deg' }]
: [{ rotate: '125deg' }],
};
return (
<TouchableOpacity
style={styles.listCoinsRenderRoot}
onPress={() => setSelectedCoin(item)}
>
<View style={styles.listCoinsRender}>
<Image
source={{ uri: item.image }}
style={styles.listCoinsIcon}
/>
</View>
<View style={styles.listCoinsTextContainer}>
<Text style={styles.listCoinsText}>{item.name}</Text>
</View>
<View>
<Text style={styles.listCoinsRenderPrice}>
{`$ ${Number(item.current_price).toLocaleString(undefined, {
maximumFractionDigits: 2,
})}`}
</Text>
<View style={styles.priceChangeContainer}>
{item.price_change_percentage_7d_in_currency !== 0 && (
<View style={changeIconStyle}>
<Icon
name="UpArrow"
height={10}
width={10}
color={priceColor()}
/>
</View>
)}
<Text
style={[
styles.priceChangeText,
{
color: priceColor(),
},
]}
>
<AnimateNumber
value={Number(
item.price_change_percentage_7d_in_currency || 0,
)}
timing="linear"
interval={15}
formatter={(value: number) => `${value.toFixed(2)}%`}
/>
</Text>
</View>
</View>
</TouchableOpacity>
);
}}
ListFooterComponent={<View style={styles.footer} />}
/>
</View>
</MainLayoutWrapper>
);
};
const styles = StyleSheet.create({
root: {
flex: 1,
paddingTop: 15,
},
walletInfoContainer: {
paddingHorizontal: sizes.padding,
paddingBottom: sizes.padding * 2,
borderRadius: 25,
backgroundColor: colors.gray,
marginLeft: sizes.base,
marginRight: sizes.base,
},
balanceInfoContainer: {
marginTop: 15,
marginBottom: 15,
},
iconTextButtonContainer: {
flexDirection: 'row',
marginTop: -20,
paddingHorizontal: sizes.radius * 3,
},
iconTextButton: {
flex: 1,
height: 40,
marginRight: Math.floor(sizes.radius / 2),
marginLeft: Math.floor(sizes.radius / 2),
},
listCoins: {
marginTop: 30,
paddingHorizontal: sizes.padding,
},
listCoinsHeader: {
marginBottom: sizes.radius,
},
listCoinsText: {
color: colors.white,
...fonts.h3,
},
listCoinsTextHeader: {
color: colors.white,
...fonts.h3,
fontSize: 18,
},
listCoinsRenderRoot: {
height: 55,
flexDirection: 'row',
alignItems: 'center',
justifyContent: 'center',
},
listCoinsRender: {
width: 35,
},
listCoinsIcon: {
width: 20,
height: 20,
},
listCoinsTextContainer: {
flex: 1,
},
listCoinsRenderText: {
color: colors.white,
...fonts.h3,
},
listCoinsRenderPrice: {
textAlign: 'right',
color: colors.white,
...fonts.h4,
},
priceChangeContainer: {
flexDirection: 'row',
alignItems: 'center',
justifyContent: 'flex-end',
},
priceChangeText: {
marginLeft: 5,
...fonts.body5,
lineHeight: 15,
},
footer: {
marginBottom: 50,
},
});
const mapStateToProps = (state: Store) => ({
myHoldings: state.marketReducer.myHoldings,
coins: state.marketReducer.coins,
});
const mapDispatchToProps = (dispatch: any) => ({
getHoldings: (
holdings: any[],
currency: string,
orderBy: string,
sparkline: boolean,
priceChangePerc: string,
perPage: number,
page: number,
) =>
dispatch(
getHoldings(
holdings,
currency,
orderBy,
sparkline,
priceChangePerc,
perPage,
page,
),
),
getCoinMarket: (
currency: string,
orderBy: string,
sparkline: boolean,
priceChangePerc: string,
perPage: number,
page: number,
) =>
dispatch(
getCoinMarket(
currency,
orderBy,
sparkline,
priceChangePerc,
perPage,
page,
),
),
});
export default connect(mapStateToProps, mapDispatchToProps)(Home);
|
#!/bin/bash
#install_demo_configuration.sh [-y]
SCRIPT_PATH="${BASH_SOURCE[0]}"
if ! [ -x "$(command -v realpath)" ]; then
if [ -L "$SCRIPT_PATH" ]; then
[ -x "$(command -v readlink)" ] || { echo "Not able to resolve symlink. Install realpath or readlink.";exit 1; }
# try readlink (-f not needed because we know its a symlink)
DIR="$( cd "$( dirname $(readlink "$SCRIPT_PATH") )" && pwd -P)"
else
DIR="$( cd "$( dirname "$SCRIPT_PATH" )" && pwd -P)"
fi
else
DIR="$( cd "$( dirname "$(realpath "$SCRIPT_PATH")" )" && pwd -P)"
fi
echo "Search Guard 6 Demo Installer"
echo " ** Warning: Do not use on production or public reachable systems **"
OPTIND=1
assumeyes=0
initsg=0
cluster_mode=0
function show_help() {
echo "install_demo_configuration.sh [-y] [-i] [-c]"
echo " -h show help"
echo " -y confirm all installation dialogues automatically"
echo " -i initialize Search Guard with default configuration (default is to ask if -y is not given)"
echo " -c enable cluster mode by binding to all network interfaces (default is to ask if -y is not given)"
}
while getopts "h?yic" opt; do
case "$opt" in
h|\?)
show_help
exit 0
;;
y) assumeyes=1
;;
i) initsg=1
;;
c) cluster_mode=1
esac
done
shift $((OPTIND-1))
[ "$1" = "--" ] && shift
if [ "$assumeyes" == 0 ]; then
read -r -p "Install demo certificates? [y/N] " response
case "$response" in
[yY][eE][sS]|[yY])
;;
*)
exit 0
;;
esac
fi
if [ "$initsg" == 0 ] && [ "$assumeyes" == 0 ]; then
read -r -p "Initialize Search Guard? [y/N] " response
case "$response" in
[yY][eE][sS]|[yY])
initsg=1
;;
*)
initsg=0
;;
esac
fi
if [ "$cluster_mode" == 0 ] && [ "$assumeyes" == 0 ]; then
echo "Cluster mode requires maybe additional setup of:"
echo " - Virtual memory (vm.max_map_count)"
echo " See https://www.elastic.co/guide/en/elasticsearch/reference/current/vm-max-map-count.html"
echo ""
read -r -p "Enable cluster mode? [y/N] " response
case "$response" in
[yY][eE][sS]|[yY])
cluster_mode=1
;;
*)
cluster_mode=0
;;
esac
fi
set -e
BASE_DIR="$DIR/../../.."
if [ -d "$BASE_DIR" ]; then
CUR="$(pwd)"
cd "$BASE_DIR"
BASE_DIR="$(pwd)"
cd "$CUR"
echo "Basedir: $BASE_DIR"
else
echo "DEBUG: basedir does not exist"
fi
ES_CONF_FILE="$BASE_DIR/config/elasticsearch.yml"
ES_BIN_DIR="$BASE_DIR/bin"
ES_PLUGINS_DIR="$BASE_DIR/plugins"
ES_MODULES_DIR="$BASE_DIR/modules"
ES_LIB_PATH="$BASE_DIR/lib"
SUDO_CMD=""
ES_INSTALL_TYPE=".tar.gz"
#Check if its a rpm/deb install
if [ -f /usr/share/elasticsearch/bin/elasticsearch ]; then
ES_CONF_FILE="/usr/share/elasticsearch/config/elasticsearch.yml"
if [ ! -f "$ES_CONF_FILE" ]; then
ES_CONF_FILE="/etc/elasticsearch/elasticsearch.yml"
fi
ES_BIN_DIR="/usr/share/elasticsearch/bin"
ES_PLUGINS_DIR="/usr/share/elasticsearch/plugins"
ES_MODULES_DIR="/usr/share/elasticsearch/modules"
ES_LIB_PATH="/usr/share/elasticsearch/lib"
if [ -x "$(command -v sudo)" ]; then
SUDO_CMD="sudo"
echo "This script maybe require your root password for 'sudo' privileges"
fi
ES_INSTALL_TYPE="rpm/deb"
fi
if [ $SUDO_CMD ]; then
if ! [ -x "$(command -v $SUDO_CMD)" ]; then
echo "Unable to locate 'sudo' command. Quit."
exit 1
fi
fi
if $SUDO_CMD test -f "$ES_CONF_FILE"; then
:
else
echo "Unable to determine Elasticsearch config directory. Quit."
exit -1
fi
if [ ! -d "$ES_BIN_DIR" ]; then
echo "Unable to determine Elasticsearch bin directory. Quit."
exit -1
fi
if [ ! -d "$ES_PLUGINS_DIR" ]; then
echo "Unable to determine Elasticsearch plugins directory. Quit."
exit -1
fi
if [ ! -d "$ES_MODULES_DIR" ]; then
echo "Unable to determine Elasticsearch modules directory. Quit."
#exit -1
fi
if [ ! -d "$ES_LIB_PATH" ]; then
echo "Unable to determine Elasticsearch lib directory. Quit."
exit -1
fi
ES_CONF_DIR=$(dirname "${ES_CONF_FILE}")
ES_CONF_DIR=`cd "$ES_CONF_DIR" ; pwd`
if [ ! -d "$ES_PLUGINS_DIR/search-guard-6" ]; then
echo "Search Guard plugin not installed. Quit."
exit -1
fi
ES_VERSION=("$ES_LIB_PATH/elasticsearch-*.jar")
ES_VERSION=$(echo $ES_VERSION | sed 's/.*elasticsearch-\(.*\)\.jar/\1/')
SG_VERSION=("$ES_PLUGINS_DIR/search-guard-6/search-guard-6-*.jar")
SG_VERSION=$(echo $SG_VERSION | sed 's/.*search-guard-6-\(.*\)\.jar/\1/')
OS=$(sb_release -ds 2>/dev/null || cat /etc/*release 2>/dev/null | head -n1 || uname -om)
echo "Elasticsearch install type: $ES_INSTALL_TYPE on $OS"
echo "Elasticsearch config dir: $ES_CONF_DIR"
echo "Elasticsearch config file: $ES_CONF_FILE"
echo "Elasticsearch bin dir: $ES_BIN_DIR"
echo "Elasticsearch plugins dir: $ES_PLUGINS_DIR"
echo "Elasticsearch lib dir: $ES_LIB_PATH"
echo "Detected Elasticsearch Version: $ES_VERSION"
echo "Detected Search Guard Version: $SG_VERSION"
if $SUDO_CMD grep --quiet -i searchguard "$ES_CONF_FILE"; then
echo "$ES_CONF_FILE seems to be already configured for Search Guard. Quit."
exit -1
fi
set +e
read -r -d '' SG_ADMIN_CERT << EOM
-----BEGIN CERTIFICATE-----
MIIEdzCCA1+gAwIBAgIGAWLrc1O4MA0GCSqGSIb3DQEBCwUAMIGPMRMwEQYKCZIm
iZPyLGQBGRYDY29tMRcwFQYKCZImiZPyLGQBGRYHZXhhbXBsZTEZMBcGA1UECgwQ
RXhhbXBsZSBDb20gSW5jLjEhMB8GA1UECwwYRXhhbXBsZSBDb20gSW5jLiBSb290
IENBMSEwHwYDVQQDDBhFeGFtcGxlIENvbSBJbmMuIFJvb3QgQ0EwHhcNMTgwNDIy
MDM0MzQ3WhcNMjgwNDE5MDM0MzQ3WjBNMQswCQYDVQQGEwJkZTENMAsGA1UEBwwE
dGVzdDEPMA0GA1UECgwGY2xpZW50MQ8wDQYDVQQLDAZjbGllbnQxDTALBgNVBAMM
BGtpcmswggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDCwgBOoO88uMM8
dREJsk58Yt4Jn0zwQ2wUThbvy3ICDiEWhiAhUbg6dTggpS5vWWJto9bvaaqgMVoh
ElfYHdTDncX3UQNBEP8tqzHON6BFEFSGgJRGLd6f5dri6rK32nCotYS61CFXBFxf
WumXjSukjyrcTsdkR3C5QDo2oN7F883MOQqRENPzAtZi9s3jNX48u+/e3yvJzXsB
GS9Qmsye6C71enbIujM4CVwDT/7a5jHuaUp6OuNCFbdRPnu/wLYwOS2/yOtzAqk7
/PFnPCe7YOa10ShnV/jx2sAHhp7ZQBJgFkkgnIERz9Ws74Au+EbptWnsWuB+LqRL
x5G02IzpAgMBAAGjggEYMIIBFDCBvAYDVR0jBIG0MIGxgBSSNQzgDx4rRfZNOfN7
X6LmEpdAc6GBlaSBkjCBjzETMBEGCgmSJomT8ixkARkWA2NvbTEXMBUGCgmSJomT
8ixkARkWB2V4YW1wbGUxGTAXBgNVBAoMEEV4YW1wbGUgQ29tIEluYy4xITAfBgNV
BAsMGEV4YW1wbGUgQ29tIEluYy4gUm9vdCBDQTEhMB8GA1UEAwwYRXhhbXBsZSBD
b20gSW5jLiBSb290IENBggEBMB0GA1UdDgQWBBRsdhuHn3MGDvZxOe22+1wliCJB
mDAMBgNVHRMBAf8EAjAAMA4GA1UdDwEB/wQEAwIF4DAWBgNVHSUBAf8EDDAKBggr
BgEFBQcDAjANBgkqhkiG9w0BAQsFAAOCAQEAkPrUTKKn+/6g0CjhTPBFeX8mKXhG
zw5z9Oq+xnwefZwxV82E/tgFsPcwXcJIBg0f43BaVSygPiV7bXqWhxASwn73i24z
lveIR4+z56bKIhP6c3twb8WWR9yDcLu2Iroin7dYEm3dfVUrhz/A90WHr6ddwmLL
3gcFF2kBu3S3xqM5OmN/tqRXFmo+EvwrdJRiTh4Fsf0tX1ZT07rrGvBFYktK7Kma
lqDl4UDCF1UWkiiFubc0Xw+DR6vNAa99E0oaphzvCmITU1wITNnYZTKzVzQ7vUCq
kLmXOFLTcxTQpptxSo5xDD3aTpzWGCvjExCKpXQtsITUOYtZc02AGjjPOQ==
-----END CERTIFICATE-----
EOM
read -r -d '' SG_ADMIN_CERT_KEY << EOM
-----BEGIN PRIVATE KEY-----
MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDCwgBOoO88uMM8
dREJsk58Yt4Jn0zwQ2wUThbvy3ICDiEWhiAhUbg6dTggpS5vWWJto9bvaaqgMVoh
ElfYHdTDncX3UQNBEP8tqzHON6BFEFSGgJRGLd6f5dri6rK32nCotYS61CFXBFxf
WumXjSukjyrcTsdkR3C5QDo2oN7F883MOQqRENPzAtZi9s3jNX48u+/e3yvJzXsB
GS9Qmsye6C71enbIujM4CVwDT/7a5jHuaUp6OuNCFbdRPnu/wLYwOS2/yOtzAqk7
/PFnPCe7YOa10ShnV/jx2sAHhp7ZQBJgFkkgnIERz9Ws74Au+EbptWnsWuB+LqRL
x5G02IzpAgMBAAECggEAEzwnMkeBbqqDgyRqFbO/PgMNvD7i0b/28V0dCtCPEVY6
klzrg3RCERP5V9AN8VVkppYjPkCzZ2A4b0JpMUu7ncOmr7HCnoSCj2IfEyePSVg+
4OHbbcBOAoDTHiI2myM/M9++8izNS34qGV4t6pfjaDyeQQ/5cBVWNBWnKjS34S5H
rJWpAcDgxYk5/ah2Xs2aULZlXDMxbSikjrv+n4JIYTKFQo8ydzL8HQDBRmXAFLjC
gNOSHf+5u1JdpY3uPIxK1ugVf8zPZ4/OEB23j56uu7c8+sZ+kZwfRWAQmMhFVG/y
OXxoT5mOruBsAw29m2Ijtxg252/YzSTxiDqFziB/eQKBgQDjeVAdi55GW/bvhuqn
xME/An8E3hI/FyaaITrMQJUBjiCUaStTEqUgQ6A7ZfY/VX6qafOX7sli1svihrXC
uelmKrdve/CFEEqzX9JWWRiPiQ0VZD+EQRsJvX85Tw2UGvVUh6dO3UGPS0BhplMD
jeVpyXgZ7Gy5we+DWjfwhYrCmwKBgQDbLmQhRy+IdVljObZmv3QtJ0cyxxZETWzU
MKmgBFvcRw+KvNwO+Iy0CHEbDu06Uj63kzI2bK3QdINaSrjgr8iftXIQpBmcgMF+
a1l5HtHlCp6RWd55nWQOEvn36IGN3cAaQkXuh4UYM7QfEJaAbzJhyJ+wXA3jWqUd
8bDTIAZ0ywKBgFuZ44gyTAc7S2JDa0Up90O/ZpT4NFLRqMrSbNIJg7d/m2EIRNkM
HhCzCthAg/wXGo3XYq+hCdnSc4ICCzmiEfoBY6LyPvXmjJ5VDOeWs0xBvVIK74T7
jr7KX2wdiHNGs9pZUidw89CXVhK8nptEzcheyA1wZowbK68yamph7HHXAoGBAK3x
7D9Iyl1mnDEWPT7f1Gh9UpDm1TIRrDvd/tBihTCVKK13YsFy2d+LD5Bk0TpGyUVR
STlOGMdloFUJFh4jA3pUOpkgUr8Uo/sbYN+x6Ov3+I3sH5aupRhSURVA7YhUIz/z
tqIt5R+m8Nzygi6dkQNvf+Qruk3jw0S3ahizwsvvAoGAL7do6dTLp832wFVxkEf4
gg1M6DswfkgML5V/7GQ3MkIX/Hrmiu+qSuHhDGrp9inZdCDDYg5+uy1+2+RBMRZ3
vDUUacvc4Fep05zp7NcjgU5y+/HWpuKVvLIlZAO1MBY4Xinqqii6RdxukIhxw7eT
C6TPL5KAcV1R/XAihDhI18Y=
-----END PRIVATE KEY-----
EOM
read -r -d '' NODE_CERT << EOM
-----BEGIN CERTIFICATE-----
MIIEyTCCA7GgAwIBAgIGAWLrc1O2MA0GCSqGSIb3DQEBCwUAMIGPMRMwEQYKCZIm
iZPyLGQBGRYDY29tMRcwFQYKCZImiZPyLGQBGRYHZXhhbXBsZTEZMBcGA1UECgwQ
RXhhbXBsZSBDb20gSW5jLjEhMB8GA1UECwwYRXhhbXBsZSBDb20gSW5jLiBSb290
IENBMSEwHwYDVQQDDBhFeGFtcGxlIENvbSBJbmMuIFJvb3QgQ0EwHhcNMTgwNDIy
MDM0MzQ3WhcNMjgwNDE5MDM0MzQ3WjBeMRIwEAYKCZImiZPyLGQBGRYCZGUxDTAL
BgNVBAcMBHRlc3QxDTALBgNVBAoMBG5vZGUxDTALBgNVBAsMBG5vZGUxGzAZBgNV
BAMMEm5vZGUtMC5leGFtcGxlLmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC
AQoCggEBAJa+f476vLB+AwK53biYByUwN+40D8jMIovGXm6wgT8+9Sbs899dDXgt
9CE1Beo65oP1+JUz4c7UHMrCY3ePiDt4cidHVzEQ2g0YoVrQWv0RedS/yx/DKhs8
Pw1O715oftP53p/2ijD5DifFv1eKfkhFH+lwny/vMSNxellpl6NxJTiJVnQ9HYOL
gf2t971ITJHnAuuxUF48HcuNovW4rhtkXef8kaAN7cE3LU+A9T474ULNCKkEFPIl
ZAKN3iJNFdVsxrTU+CUBHzk73Do1cCkEvJZ0ZFjp0Z3y8wLY/gqWGfGVyA9l2CUq
eIZNf55PNPtGzOrvvONiui48vBKH1LsCAwEAAaOCAVkwggFVMIG8BgNVHSMEgbQw
gbGAFJI1DOAPHitF9k0583tfouYSl0BzoYGVpIGSMIGPMRMwEQYKCZImiZPyLGQB
GRYDY29tMRcwFQYKCZImiZPyLGQBGRYHZXhhbXBsZTEZMBcGA1UECgwQRXhhbXBs
ZSBDb20gSW5jLjEhMB8GA1UECwwYRXhhbXBsZSBDb20gSW5jLiBSb290IENBMSEw
HwYDVQQDDBhFeGFtcGxlIENvbSBJbmMuIFJvb3QgQ0GCAQEwHQYDVR0OBBYEFKyv
78ZmFjVKM9g7pMConYH7FVBHMAwGA1UdEwEB/wQCMAAwDgYDVR0PAQH/BAQDAgXg
MCAGA1UdJQEB/wQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjA1BgNVHREELjAsiAUq
AwQFBYISbm9kZS0wLmV4YW1wbGUuY29tgglsb2NhbGhvc3SHBH8AAAEwDQYJKoZI
hvcNAQELBQADggEBAIOKuyXsFfGv1hI/Lkpd/73QNqjqJdxQclX57GOMWNbOM5H0
5/9AOIZ5JQsWULNKN77aHjLRr4owq2jGbpc/Z6kAd+eiatkcpnbtbGrhKpOtoEZy
8KuslwkeixpzLDNISSbkeLpXz4xJI1ETMN/VG8ZZP1bjzlHziHHDu0JNZ6TnNzKr
XzCGMCohFfem8vnKNnKUneMQMvXd3rzUaAgvtf7Hc2LTBlf4fZzZF1EkwdSXhaMA
1lkfHiqOBxtgeDLxCHESZ2fqgVqsWX+t3qHQfivcPW6txtDyrFPRdJOGhiMGzT/t
e/9kkAtQRgpTb3skYdIOOUOV0WGQ60kJlFhAzIs=
-----END CERTIFICATE-----
EOM
read -r -d '' NODE_KEY << EOM
-----BEGIN PRIVATE KEY-----
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCWvn+O+rywfgMC
ud24mAclMDfuNA/IzCKLxl5usIE/PvUm7PPfXQ14LfQhNQXqOuaD9fiVM+HO1BzK
wmN3j4g7eHInR1cxENoNGKFa0Fr9EXnUv8sfwyobPD8NTu9eaH7T+d6f9oow+Q4n
xb9Xin5IRR/pcJ8v7zEjcXpZaZejcSU4iVZ0PR2Di4H9rfe9SEyR5wLrsVBePB3L
jaL1uK4bZF3n/JGgDe3BNy1PgPU+O+FCzQipBBTyJWQCjd4iTRXVbMa01PglAR85
O9w6NXApBLyWdGRY6dGd8vMC2P4KlhnxlcgPZdglKniGTX+eTzT7Rszq77zjYrou
PLwSh9S7AgMBAAECggEABwiohxFoEIwws8XcdKqTWsbfNTw0qFfuHLuK2Htf7IWR
htlzn66F3F+4jnwc5IsPCoVFriCXnsEC/usHHSMTZkL+gJqxlNaGdin6DXS/aiOQ
nb69SaQfqNmsz4ApZyxVDqsQGkK0vAhDAtQVU45gyhp/nLLmmqP8lPzMirOEodmp
U9bA8t/ttrzng7SVAER42f6IVpW0iTKTLyFii0WZbq+ObViyqib9hVFrI6NJuQS+
IelcZB0KsSi6rqIjXg1XXyMiIUcSlhq+GfEa18AYgmsbPwMbExate7/8Ci7ZtCbh
lx9bves2+eeqq5EMm3sMHyhdcg61yzd5UYXeZhwJkQKBgQDS9YqrAtztvLY2gMgv
d+wOjb9awWxYbQTBjx33kf66W+pJ+2j8bI/XX2CpZ98w/oq8VhMqbr9j5b8MfsrF
EoQvedA4joUo8sXd4j1mR2qKF4/KLmkgy6YYusNP2UrVSw7sh77bzce+YaVVoO/e
0wIVTHuD/QZ6fG6MasOqcbl6hwKBgQC27cQruaHFEXR/16LrMVAX+HyEEv44KOCZ
ij5OE4P7F0twb+okngG26+OJV3BtqXf0ULlXJ+YGwXCRf6zUZkld3NMy3bbKPgH6
H/nf3BxqS2tudj7+DV52jKtisBghdvtlKs56oc9AAuwOs37DvhptBKUPdzDDqfys
Qchv5JQdLQKBgERev+pcqy2Bk6xmYHrB6wdseS/4sByYeIoi0BuEfYH4eB4yFPx6
UsQCbVl6CKPgWyZe3ydJbU37D8gE78KfFagtWoZ56j4zMF2RDUUwsB7BNCDamce/
OL2bCeG/Erm98cBG3lxufOX+z47I8fTNfkdY2k8UmhzoZwurLm73HJ3RAoGBAKsp
6yamuXF2FbYRhUXgjHsBbTD/vJO72/yO2CGiLRpi/5mjfkjo99269trp0C8sJSub
5PBiSuADXFsoRgUv+HI1UAEGaCTwxFTQWrRWdtgW3d0sE2EQDVWL5kmfT9TwSeat
mSoyAYR5t3tCBNkPJhbgA7pm4mASzHQ50VyxWs25AoGBAKPFx9X2oKhYQa+mW541
bbqRuGFMoXIIcr/aeM3LayfLETi48o5NDr2NDP11j4yYuz26YLH0Dj8aKpWuehuH
uB27n6j6qu0SVhQi6mMJBe1JrKbzhqMKQjYOoy8VsC2gdj5pCUP/kLQPW7zm9diX
CiKTtKgPIeYdigor7V3AHcVT
-----END PRIVATE KEY-----
EOM
read -r -d '' ROOT_CA << EOM
-----BEGIN CERTIFICATE-----
MIID/jCCAuagAwIBAgIBATANBgkqhkiG9w0BAQsFADCBjzETMBEGCgmSJomT8ixk
ARkWA2NvbTEXMBUGCgmSJomT8ixkARkWB2V4YW1wbGUxGTAXBgNVBAoMEEV4YW1w
bGUgQ29tIEluYy4xITAfBgNVBAsMGEV4YW1wbGUgQ29tIEluYy4gUm9vdCBDQTEh
MB8GA1UEAwwYRXhhbXBsZSBDb20gSW5jLiBSb290IENBMB4XDTE4MDQyMjAzNDM0
NloXDTI4MDQxOTAzNDM0NlowgY8xEzARBgoJkiaJk/IsZAEZFgNjb20xFzAVBgoJ
kiaJk/IsZAEZFgdleGFtcGxlMRkwFwYDVQQKDBBFeGFtcGxlIENvbSBJbmMuMSEw
HwYDVQQLDBhFeGFtcGxlIENvbSBJbmMuIFJvb3QgQ0ExITAfBgNVBAMMGEV4YW1w
bGUgQ29tIEluYy4gUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
ggEBAK/u+GARP5innhpXK0c0q7s1Su1VTEaIgmZr8VWI6S8amf5cU3ktV7WT9SuV
TsAm2i2A5P+Ctw7iZkfnHWlsC3HhPUcd6mvzGZ4moxnamM7r+a9otRp3owYoGStX
ylVTQusAjbq9do8CMV4hcBTepCd+0w0v4h6UlXU8xjhj1xeUIz4DKbRgf36q0rv4
VIX46X72rMJSETKOSxuwLkov1ZOVbfSlPaygXIxqsHVlj1iMkYRbQmaTib6XWHKf
MibDaqDejOhukkCjzpptGZOPFQ8002UtTTNv1TiaKxkjMQJNwz6jfZ53ws3fh1I0
RWT6WfM4oeFRFnyFRmc4uYTUgAkCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAf
BgNVHSMEGDAWgBSSNQzgDx4rRfZNOfN7X6LmEpdAczAdBgNVHQ4EFgQUkjUM4A8e
K0X2TTnze1+i5hKXQHMwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4IB
AQBoQHvwsR34hGO2m8qVR9nQ5Klo5HYPyd6ySKNcT36OZ4AQfaCGsk+SecTi35QF
RHL3g2qffED4tKR0RBNGQSgiLavmHGCh3YpDupKq2xhhEeS9oBmQzxanFwWFod4T
nnsG2cCejyR9WXoRzHisw0KJWeuNlwjUdJY0xnn16srm1zL/M/f0PvCyh9HU1mF1
ivnOSqbDD2Z7JSGyckgKad1Omsg/rr5XYtCeyJeXUPcmpeX6erWJJNTUh6yWC/hY
G/dFC4xrJhfXwz6Z0ytUygJO32bJG4Np2iGAwvvgI9EfxzEv/KP+FGrJOvQJAq4/
BU36ZAa80W/8TBnqZTkNnqZV
-----END CERTIFICATE-----
EOM
set -e
echo "$SG_ADMIN_CERT" | $SUDO_CMD tee "$ES_CONF_DIR/kirk.pem" > /dev/null
echo "$NODE_CERT" | $SUDO_CMD tee "$ES_CONF_DIR/esnode.pem" > /dev/null
echo "$ROOT_CA" | $SUDO_CMD tee "$ES_CONF_DIR/root-ca.pem" > /dev/null
echo "$NODE_KEY" | $SUDO_CMD tee "$ES_CONF_DIR/esnode-key.pem" > /dev/null
echo "$SG_ADMIN_CERT_KEY" | $SUDO_CMD tee "$ES_CONF_DIR/kirk-key.pem" > /dev/null
echo "" | $SUDO_CMD tee -a "$ES_CONF_FILE"
echo "######## Start Search Guard Demo Configuration ########" | $SUDO_CMD tee -a "$ES_CONF_FILE" > /dev/null
echo "# WARNING: revise all the lines below before you go into production" | $SUDO_CMD tee -a "$ES_CONF_FILE" > /dev/null
echo "searchguard.ssl.transport.pemcert_filepath: esnode.pem" | $SUDO_CMD tee -a "$ES_CONF_FILE" > /dev/null
echo "searchguard.ssl.transport.pemkey_filepath: esnode-key.pem" | $SUDO_CMD tee -a "$ES_CONF_FILE" > /dev/null
echo "searchguard.ssl.transport.pemtrustedcas_filepath: root-ca.pem" | $SUDO_CMD tee -a "$ES_CONF_FILE" > /dev/null
echo "searchguard.ssl.transport.enforce_hostname_verification: false" | $SUDO_CMD tee -a "$ES_CONF_FILE" > /dev/null
echo "searchguard.ssl.http.enabled: true" | $SUDO_CMD tee -a "$ES_CONF_FILE" > /dev/null
echo "searchguard.ssl.http.pemcert_filepath: esnode.pem" | $SUDO_CMD tee -a "$ES_CONF_FILE" > /dev/null
echo "searchguard.ssl.http.pemkey_filepath: esnode-key.pem" | $SUDO_CMD tee -a "$ES_CONF_FILE" > /dev/null
echo "searchguard.ssl.http.pemtrustedcas_filepath: root-ca.pem" | $SUDO_CMD tee -a "$ES_CONF_FILE" > /dev/null
echo "searchguard.allow_unsafe_democertificates: true" | $SUDO_CMD tee -a "$ES_CONF_FILE" > /dev/null
if [ "$initsg" == 1 ]; then
echo "searchguard.allow_default_init_sgindex: true" | $SUDO_CMD tee -a "$ES_CONF_FILE" > /dev/null
fi
echo "searchguard.authcz.admin_dn:" | $SUDO_CMD tee -a "$ES_CONF_FILE" > /dev/null
echo " - CN=kirk,OU=client,O=client,L=test, C=de" | $SUDO_CMD tee -a "$ES_CONF_FILE" > /dev/null
echo "" | $SUDO_CMD tee -a "$ES_CONF_FILE" > /dev/null
echo "searchguard.audit.type: internal_elasticsearch" | $SUDO_CMD tee -a "$ES_CONF_FILE" > /dev/null
echo "searchguard.enable_snapshot_restore_privilege: true" | $SUDO_CMD tee -a "$ES_CONF_FILE" > /dev/null
echo "searchguard.check_snapshot_restore_write_privileges: true" | $SUDO_CMD tee -a "$ES_CONF_FILE" > /dev/null
echo 'searchguard.restapi.roles_enabled: ["sg_all_access"]' | $SUDO_CMD tee -a "$ES_CONF_FILE" > /dev/null
#cluster.routing.allocation.disk.threshold_enabled
if $SUDO_CMD grep --quiet -i "^cluster.routing.allocation.disk.threshold_enabled" "$ES_CONF_FILE"; then
: #already present
else
echo 'cluster.routing.allocation.disk.threshold_enabled: false' | $SUDO_CMD tee -a "$ES_CONF_FILE" > /dev/null
fi
#cluster.name
if $SUDO_CMD grep --quiet -i "^cluster.name" "$ES_CONF_FILE"; then
: #already present
else
echo "cluster.name: searchguard_demo" | $SUDO_CMD tee -a "$ES_CONF_FILE" > /dev/null
fi
#network.host
if $SUDO_CMD grep --quiet -i "^network.host" "$ES_CONF_FILE"; then
: #already present
else
if [ "$cluster_mode" == 1 ]; then
echo "network.host: 0.0.0.0" | $SUDO_CMD tee -a "$ES_CONF_FILE" > /dev/null
fi
fi
#discovery.zen.minimum_master_nodes
if $SUDO_CMD grep --quiet -i "^discovery.zen.minimum_master_nodes" "$ES_CONF_FILE"; then
: #already present
else
echo "discovery.zen.minimum_master_nodes: 1" | $SUDO_CMD tee -a "$ES_CONF_FILE" > /dev/null
fi
#node.max_local_storage_nodes
if $SUDO_CMD grep --quiet -i "^node.max_local_storage_nodes" "$ES_CONF_FILE"; then
: #already present
else
echo 'node.max_local_storage_nodes: 3' | $SUDO_CMD tee -a "$ES_CONF_FILE" > /dev/null
fi
#xpack.security.enabled
if $SUDO_CMD grep --quiet -i "^xpack.security.enabled" "$ES_CONF_FILE"; then
: #already present
else
if [ -d "$ES_MODULES_DIR/x-pack-security" ];then
echo "xpack.security.enabled: false" | $SUDO_CMD tee -a "$ES_CONF_FILE" > /dev/null
fi
fi
echo "######## End Search Guard Demo Configuration ########" | $SUDO_CMD tee -a "$ES_CONF_FILE" > /dev/null
$SUDO_CMD chmod +x "$ES_PLUGINS_DIR/search-guard-6/tools/sgadmin.sh"
ES_PLUGINS_DIR=`cd "$ES_PLUGINS_DIR" ; pwd`
echo "### Success"
echo "### Execute this script now on all your nodes and then start all nodes"
#Generate sgadmin_demo.sh
echo "#!/bin/bash" | $SUDO_CMD tee sgadmin_demo.sh > /dev/null
echo $SUDO_CMD \""$ES_PLUGINS_DIR/search-guard-6/tools/sgadmin.sh"\" -cd \""$ES_PLUGINS_DIR/search-guard-6/sgconfig"\" -icl -key \""$ES_CONF_DIR/kirk-key.pem"\" -cert \""$ES_CONF_DIR/kirk.pem"\" -cacert \""$ES_CONF_DIR/root-ca.pem"\" -nhnv | $SUDO_CMD tee -a sgadmin_demo.sh > /dev/null
$SUDO_CMD chmod +x sgadmin_demo.sh
if [ "$initsg" == 0 ]; then
echo "### After the whole cluster is up execute: "
$SUDO_CMD cat sgadmin_demo.sh | tail -1
echo "### or run ./sgadmin_demo.sh"
echo "### After that you can also use the Search Guard Configuration GUI, see http://docs.search-guard.com/v6/configuration-gui"
else
echo "### Search Guard will be automatically initialized."
echo "### If you like to change the runtime configuration "
echo "### change the files in ../sgconfig and execute: "
$SUDO_CMD cat sgadmin_demo.sh | tail -1
echo "### or run ./sgadmin_demo.sh"
echo "### To use the Search Guard Configuration GUI see http://docs.search-guard.com/v6/configuration-gui"
fi
echo "### To access your Search Guard secured cluster open https://<hostname>:<HTTP port> and log in with admin/admin."
echo "### (Ignore the SSL certificate warning because we installed self-signed demo certificates)" |
#!/bin/bash
rm -r search_logs*
rm -r logs_eval*
rm logs_search/*
rm logs_eval/*
|
<reponame>scott-wyatt/ngrx-forms<filename>src/update-function/set-value.spec.ts
import { setValue } from './set-value';
import { FormGroupValue, INITIAL_STATE } from './test-util';
import { updateGroup } from './update-group';
describe(setValue.name, () => {
it('should call reducer for controls', () => {
const resultState = setValue<string>('A')(INITIAL_STATE.controls.inner);
expect(resultState).not.toBe(INITIAL_STATE.controls.inner);
});
it('should call reducer for groups', () => {
const resultState = setValue<FormGroupValue>({ inner: 'A', inner5: INITIAL_STATE.value.inner5 })(INITIAL_STATE);
expect(resultState).not.toBe(INITIAL_STATE);
});
it('should call reducer for arrays', () => {
const resultState = setValue<string[]>(['A'])(INITIAL_STATE.controls.inner5);
expect(resultState).not.toBe(INITIAL_STATE.controls.inner5);
});
it('should call reducer for controls uncurried', () => {
const resultState = setValue(INITIAL_STATE.controls.inner, 'A');
expect(resultState).not.toBe(INITIAL_STATE.controls.inner);
});
it('should call reducer for groups uncurried', () => {
const resultState = setValue(INITIAL_STATE, { inner: 'A', inner5: INITIAL_STATE.value.inner5 });
expect(resultState).not.toBe(INITIAL_STATE);
});
it('should call reducer for arrays uncurried', () => {
const resultState = setValue(INITIAL_STATE.controls.inner5, ['A']);
expect(resultState).not.toBe(INITIAL_STATE.controls.inner5);
});
it('should throw if curried and no state', () => {
expect(() => setValue<string>('')(undefined as any)).toThrowError();
});
it('should work inside an updateGroup', () => {
const resultState = updateGroup(INITIAL_STATE, {
inner: setValue<string>('A'),
});
expect(resultState).not.toEqual(INITIAL_STATE);
});
it('should work inside an updateGroup uncurried', () => {
const resultState = updateGroup<typeof INITIAL_STATE.value>(INITIAL_STATE, {
inner: inner => setValue<string>(inner, 'A'),
});
expect(resultState).not.toEqual(INITIAL_STATE);
});
});
|
public static boolean stringToBoolean(String input) {
if (input == null) {
return false;
}
switch (input.toLowerCase().trim()) {
case "true":
return true;
case "false":
return false;
default:
throw new IllegalArgumentException("Cannot convert to boolean: " + input);
}
}
boolean result = stringToBoolean("True");
System.out.println(result);
// Output: true |
package migrations
const (
// this migration modifies the database to support
// enrichments.
// see: https://github.com/quay/clair-enrichment-spec
migration4 = `
ALTER TABLE update_operation
ADD COLUMN kind text;
CREATE INDEX on update_operation (kind);
UPDATE update_operation
SET kind = 'vulnerability';
CREATE TABLE enrichment
(
id BIGSERIAL PRIMARY KEY,
hash_kind text,
hash bytea,
updater text,
tags text[],
data jsonb
);
CREATE UNIQUE INDEX ON enrichment (hash_kind, hash);
-- use inverted index for tags index
CREATE INDEX ON enrichment USING gin (tags);
CREATE TABLE uo_enrich
(
uo BIGINT REFERENCES update_operation (id),
enrich BIGINT REFERENCES enrichment (id),
updater text,
fingerprint text,
date timestamptz,
PRIMARY KEY (uo, enrich)
);
`
)
|
import glob
import os
import os.path as osp
import cv2
srcs = glob.glob('main*_xml*')
for src in srcs:
scenes = glob.glob(osp.join(src, 'scene*') )
for scene in scenes:
print(scene )
imsNames = glob.glob(osp.join(scene, 'ims_*.rgbe') )
if len(imsNames ) != 0:
for imsName in imsNames:
imName = imsName.replace('ims_', 'im_')
im = cv2.imread(imName, -1)
ims = cv2.imread(imsName, -1)
imn = 1.0/3.0 * im + 2.0/3.0 * ims
imnName = imName.replace('.rgbe', '.hdr')
cv2.imwrite(imnName, imn )
os.system('rm %s' % imName )
os.system('rm %s' % imsName )
else:
imNames = glob.glob(osp.join(scene, 'im_*.rgbe') )
for imName in imNames:
imNewName = imName.replace('.rgbe', '.hdr')
os.system('mv %s %s' % (imName, imNewName ) )
|
<gh_stars>0
package org.acme.quarkus.sample;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.logging.Logger;
import javax.enterprise.context.ApplicationScoped;
import com.google.cloud.translate.Translate.TranslateOption;
import com.google.cloud.translate.TranslateOptions;
import com.google.cloud.translate.Translation;
import org.eclipse.microprofile.config.inject.ConfigProperty;
import org.eclipse.microprofile.reactive.messaging.Incoming;
import org.eclipse.microprofile.reactive.messaging.Outgoing;
import io.reactivex.Flowable;
import io.smallrye.reactive.messaging.annotations.Broadcast;
import io.vertx.core.json.JsonObject;
/**
* LinguaGreeterService
*/
@ApplicationScoped
public class LinguaGreeterService {
Logger logger = Logger.getLogger(LinguaGreeterService.class.getName());
@ConfigProperty(name="cloud.profile",defaultValue = "localhost")
String cloudProfile;
@ConfigProperty(name="tick.time",defaultValue = "5")
long tickTime;
@ConfigProperty(name="google.api.translate.srcLangCode")
String srcLangCode;
@ConfigProperty(name="google.api.translate.targetLangCodes")
List<String> targetLangCodes;
@ConfigProperty(name="greetings.text")
String text;
@Outgoing("translated-greetings")
public Flowable<String> greetings(){
return Flowable.interval(tickTime, TimeUnit.SECONDS)
.map(tick -> {
logger.info("Tick : "+tick);
String targetLangCode = targetLangCodes.get(0);
Collections.rotate(targetLangCodes, 1);
String translatedText = String.format("%s(%s)",
translateText(text,targetLangCode),targetLangCode);
JsonObject transTextJson = new JsonObject().
put("translatedText",translatedText)
.put("cloud", cloudProfile)
.put("lang", value);
return transTextJson.encode();
});
}
// @Incoming("translated-greetings")
// public void logger(String greetingText){
// logger.info("Greetings:"+greetingText);
// }
private String translateText(String text, String targetLangCode){
TranslateOptions translateOptions = TranslateOptions.getDefaultInstance();
Translation translator = translateOptions
.getService()
.translate(text,
TranslateOption.sourceLanguage(srcLangCode),
TranslateOption.targetLanguage(targetLangCode));
return translator.getTranslatedText();
}
} |
import Foundation
class FeedImage {
@NSManaged public var location: String?
@NSManaged public var url: URL?
@NSManaged public var feedCache: FeedCache?
var id: UUID
var imageData: Data?
init(id: UUID, url: URL) {
self.id = id
self.url = url
}
func retrieveImageData(completion: @escaping (Data?) -> Void) {
guard let url = url else {
completion(nil)
return
}
// Simulate asynchronous network request to retrieve image data
DispatchQueue.global().async {
do {
let data = try Data(contentsOf: url)
self.imageData = data
self.cacheImageData()
completion(data)
} catch {
completion(nil)
}
}
}
private func cacheImageData() {
guard let imageData = imageData else { return }
if let feedCache = feedCache {
feedCache.cacheImageData(imageData, for: id)
}
}
}
extension FeedImage: Identifiable {
var id: UUID {
return self.id
}
} |
#!/bin/bash
#SBATCH -n 1
time python mu_sims_intron.py 11400 11600 |
const fmt = require('util').format;
const { User } = require('../../../models/User');
const OAuth = require('../../../models/OAuth');
const utils = require('./utils');
module.exports = {
command: 'create',
desc: 'Create a access token',
builder(yargs) {
yargs
.option('user-id', {
alias: 'u',
describe: 'User ID for token',
type: 'string',
required: true
})
.option('client-id', {
alias: 'c',
describe: 'Client ID for token',
type: 'string',
required: true
});
},
handler: async argv => {
try {
let user = await User.findById(argv.u);
if (!user) {
throw new Error(fmt(`User ID: "%s" doesn't exist!`, argv.u));
}
user = {
id: user._id.toString()
};
const client = await OAuth.getClientById(argv.c);
if (!client) {
throw new Error(fmt(`Client ID: "%s" doesn't exist!`, argv.c));
}
const accessTokenExpiresAt = new Date();
accessTokenExpiresAt.setSeconds(accessTokenExpiresAt.getSeconds() + 1800);
const refreshTokenExpiresAt = new Date();
refreshTokenExpiresAt.setSeconds(
refreshTokenExpiresAt.getSeconds() + 3600
);
const token = {
accessToken: OAuth.generateAccessToken(client, user, undefined),
accessTokenExpiresAt,
refreshToken: OAuth.generateAccessToken(client, user, undefined),
refreshTokenExpiresAt
};
const data = await OAuth.saveToken(token, client, user);
if (!data) {
throw new Error(
fmt(
`Something went wrong data wasn't returned after save for token: %s`,
token.accessToken
)
);
}
await utils.showToken(token.accessToken);
process.exit(0);
} catch (error) {
throw error;
}
}
};
|
@WebServlet("/auth")
public class AuthServlet extends HttpServlet {
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
String email = request.getParameter("email");
String password = request.getParameter("password");
try {
// Authenticate user with email and password
User user = User.authenticate(email, password);
// If authentication is successful, return success message
if (user != null) {
response.getWriter().write("{ message: 'success' }");
} else {
response.sendError(HttpServletResponse.SC_UNAUTHORIZED);
}
} catch (Exception e) {
response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
}
}
} |
<html>
<head>
<script type="text/javascript">
window.onload = function(){
var customersTable = document.getElementById('customerTable').getElementsByTagName('tbody')[0];
// Get customer information from database
// Iterate on each customer
for(var customer of customers) {
var row = customersTable.insertRow();
row.insertCell().innerHTML = customer.name;
row.insertCell().innerHTML = customer.age;
row.insertCell().innerHTML = customer.gender;
row.insertCell().innerHTML = customer.email;
}
}
</script>
</head>
<body>
<table id="customerTable">
<caption> Customer Information </caption>
<thead>
<td>Name</td>
<td>Age</td>
<td>Gender</td>
<td>Email</td>
</thead>
<tbody>
</tbody>
</table>
</body>
</html> |
const db = require("../../models");
const ObjectID = require("mongodb").ObjectID;
module.exports = {
addConnection: async (req, res) => {
let connectionBody= req.body;
const newConnection = new db.Connection(connectionBody)
newConnection.save()
.then(saved => {
if(!saved){
res.status(400).json(false)
}
if(saved) {
return res.status(201).json("success")
}
}).catch(error => {
return res.status(500).json(error)
})
}
} |
<filename>VHClassSDKDemo/VHClassSDKDemo/Classes/Modular/Section2/Live & Interactive/interactive/VCLiveLayoutView.h<gh_stars>0
//
// VCLiveLayoutView.h
// VHClassSDK_bu
//
// Created by vhall on 2019/2/19.
// Copyright © 2019 class. All rights reserved.
//
#import <UIKit/UIKit.h>
@class VCInteractiveMaskView;
@class VCLiveLayoutView;
@class VHCInteractiveRoom;
NS_ASSUME_NONNULL_BEGIN
typedef NS_ENUM(NSUInteger,ViewLayoutType) {
ViewLayoutType1v1,
ViewLayoutType1vN,
};
typedef NS_ENUM(NSUInteger,AddViewType) {
AddViewTypeHost, //讲师视图
AddViewTypeShared, //讲师的共享桌面
AddViewTypeOwn, //学员自己的视图
AddViewTypeOther, //其他学员视图
};
typedef NS_ENUM(NSUInteger,LayoutEvent) {//"摄像头","麦克风","切换屏幕","切换摄像头"
LayoutEvent_VideoStauts, //摄像头事件
LayoutEvent_Audio, //麦克风事件
LayoutEvent_ScreenChange, //切换屏幕
LayoutEvent_VideoSwitch, //切换摄像头
};
@protocol VCLiveLayoutViewDelegate <NSObject>
- (void)layoutView:(VCLiveLayoutView *)layoutView layoutEvent:(LayoutEvent)event renderView:(UIView *)renderView clickButton:(UIButton *)sender;
@end
@interface VCLiveLayoutView : UIView
@property (nonatomic, weak) id <VCLiveLayoutViewDelegate> delegate;
- (instancetype)initWithFrame:(CGRect)frame layoutType:(ViewLayoutType)type;
- (void)addView:(UIView *)view type:(AddViewType)type;
- (void)removeView:(UIView *)view;
- (void)removAllViews;
- (void)room:(VHCInteractiveRoom *)room liveUser:(NSString *)joinId micphoneStatusChanged:(BOOL)isClose byUser:(NSString *)byUserId;
- (void)liveUser:(NSString *)joinId cameraStatusChanged:(BOOL)isClose byUser:(NSString *)byUserId;
- (void)changeMainViewWithMaskView:(UIView *)renderView;
@property (nonatomic, weak, nullable) UIView *docView;
@end
NS_ASSUME_NONNULL_END
|
#!/bin/bash
BASEPATH=/geodata/ch.swisstopo.lk25.farbig/
OUTPATH=/geodata/output/ch.swisstopo.lk25.farbig/
gdalbuildvrt -addalpha $OUTPATH/lk25_farbig.vrt $BASEPATH/*.tif
gdal_translate $OUTPATH/lk25_farbig.vrt $OUTPATH/ch.swisstopo.lk25.farbig_tmp.tif -a_srs EPSG:2056 -co 'COMPRESS=DEFLATE' -co 'PREDICTOR=2' -co 'TILED=YES' -co 'BIGTIFF=YES'
gdaladdo --config COMPRESS_OVERVIEW DEFLATE --config PREDICTOR_OVERVIEW 2 -ro -r average $OUTPATH/ch.swisstopo.lk25.farbig_tmp.tif 2
gdaladdo --config COMPRESS_OVERVIEW DEFLATE --config PREDICTOR_OVERVIEW 2 -ro -r average $OUTPATH/ch.swisstopo.lk25.farbig_tmp.tif.ovr 2
gdaladdo --config COMPRESS_OVERVIEW DEFLATE --config PREDICTOR_OVERVIEW 2 -ro -r average $OUTPATH/ch.swisstopo.lk25.farbig_tmp.tif.ovr.ovr 2
gdaladdo --config COMPRESS_OVERVIEW DEFLATE --config PREDICTOR_OVERVIEW 2 -ro -r average $OUTPATH/ch.swisstopo.lk25.farbig_tmp.tif.ovr.ovr.ovr 2
gdaladdo --config COMPRESS_OVERVIEW DEFLATE --config PREDICTOR_OVERVIEW 2 -ro -r average $OUTPATH/ch.swisstopo.lk25.farbig_tmp.tif.ovr.ovr.ovr.ovr 2
gdaladdo --config COMPRESS_OVERVIEW DEFLATE --config PREDICTOR_OVERVIEW 2 -ro -r average $OUTPATH/ch.swisstopo.lk25.farbig_tmp.tif.ovr.ovr.ovr.ovr.ovr 2
gdal_translate $OUTPATH/ch.swisstopo.lk25.farbig_tmp.tif $OUTPATH/ch.swisstopo.lk25.farbig.tif -co 'COPY_SRC_OVERVIEWS=YES' -co 'COMPRESS=DEFLATE' -co 'PREDICTOR=2' -co 'TILED=YES' -co 'BIGTIFF=YES'
gdal_translate -expand rgba --config OGR_SQLITE_SYNCHRONOUS OFF -co APPEND_SUBDATASET=YES -co RASTER_TABLE=ch.swisstopo.lk25.farbig -co TILE_FORMAT=PNG_JPEG -of GPKG $OUTPATH/ch.swisstopo.lk25.farbig.tif $OUTPATH/ch.swisstopo.lk25.farbig.gpkg
gdaladdo --config OGR_SQLITE_SYNCHRONOUS OFF -oo TABLE=ch.swisstopo.lk25.farbig -r average $OUTPATH/ch.swisstopo.lk25.farbig.gpkg 2 4 8 16 32 64 128 256
|
string = 'abbcde'
length = 3
def count_substrings(string, length):
'''Finds the number of sub-strings of a given length.'''
count = 0
for i in range(len(string)-length+1):
substring = string[i:i+length]
if len(substring) == length:
count += 1
return count |
#!/usr/bin/env bash
# Intended to be run after authenticating to services such as Dropbox
# Restore applications
mackup restore
# Force configuration of gcal
gcalcli agenda
npm install -g diff-so-fancy
# Start syncing mail
echo "prime" | gpg -e -r "William Huba" --no-tty --batch > ~/.passwords/prime.gpg
brew services start offlineimap
|
catkin_make --cmake-args \
-DCMAKE_BUILD_TYPE=Release \
-DPYTHON_EXECUTABLE=/usr/bin/python3 \
-DPYTHON_INCLUDE_DIR=/usr/include/python3.6m \
-DPYTHON_LIBRARY=/usr/lib/x86_64-linux-gnu/libpython3.6m.so
|
<filename>tests/testsuite/CUDASamples/6_Advanced_mergeSort_mergeRanksAndIndicesKernel/common_merge.h
typedef unsigned int uint;
#define SHARED_SIZE_LIMIT 1024U
#define SAMPLE_STRIDE 128
#define umin(x,y) (x < y ? x : y)
__device__ static __attribute__((always_inline)) uint iDivUp(uint a, uint b)
{
return ((a % b) == 0) ? (a / b) : (a / b + 1);
}
__device__ static __attribute__((always_inline)) uint getSampleCount(uint dividend)
{
return iDivUp(dividend, SAMPLE_STRIDE);
}
#define W (sizeof(uint) * 8)
__device__ static __attribute__((always_inline)) uint nextPowerOfTwo(uint x)
{
/*
--x;
x |= x >> 1;
x |= x >> 2;
x |= x >> 4;
x |= x >> 8;
x |= x >> 16;
return ++x;
*/
return 1U << (W - __clz(x - 1));
}
template<uint sortDir> __device__ static __attribute__((always_inline)) uint binarySearchInclusive(uint val, uint *data, uint L, uint stride)
{
if (L == 0)
{
return 0;
}
uint pos = 0;
for (; stride > 0; stride >>= 1)
{
uint newPos = umin(pos + stride, L);
if ((sortDir && (data[newPos - 1] <= val)) || (!sortDir && (data[newPos - 1] >= val)))
{
pos = newPos;
}
}
return pos;
}
template<uint sortDir> __device__ static __attribute__((always_inline)) uint binarySearchExclusive(uint val, uint *data, uint L, uint stride)
{
if (L == 0)
{
return 0;
}
uint pos = 0;
for (; stride > 0; stride >>= 1)
{
uint newPos = umin(pos + stride, L);
if ((sortDir && (data[newPos - 1] < val)) || (!sortDir && (data[newPos - 1] > val)))
{
pos = newPos;
}
}
return pos;
}
|
<reponame>AthosMatos/ATHOS-Engine
#include "D2D.h"
IDXGIKeyedMutex* D2D::keyedMutex11 = NULL;
IDXGIKeyedMutex* D2D::keyedMutex10 = NULL;
ID2D1RenderTarget* D2D::D2DRenderTarget = NULL;
ID3D11Buffer* D2D::d2dIndexBuffer = NULL;
ID3D11Buffer* D2D::d2dVertBuffer = NULL;
ID3D11ShaderResourceView* D2D::d2dTexture = NULL;
IDWriteFactory* D2D::DWriteFactory = NULL; |
#!/bin/sh
set -e -x
export WALE_S3_PREFIX=s3://waldeltabucket
export WALG_USE_WAL_DELTA=true
/usr/lib/postgresql/10/bin/initdb ${PGDATA}
echo "archive_mode = on" >> /var/lib/postgresql/10/main/postgresql.conf
echo "archive_command = '/usr/bin/timeout 600 /usr/bin/wal-g wal-push %p && mkdir -p /tmp/deltas/$(basename %p)'" >> /var/lib/postgresql/10/main/postgresql.conf
echo "archive_timeout = 600" >> /var/lib/postgresql/10/main/postgresql.conf
/usr/lib/postgresql/10/bin/pg_ctl -D ${PGDATA} -w start
pgbench -i -s 10 postgres
wal-g backup-push ${PGDATA}
export WALG_COMPRESSION_METHOD=lz4
pgbench -i -s 20 postgres
wal-g backup-push ${PGDATA}
export WALG_COMPRESSION_METHOD=lzma
pgbench -i -s 30 postgres
wal-g backup-push ${PGDATA}
export WALG_COMPRESSION_METHOD=brotli
pgbench -i -s 40 postgres
pg_dumpall -f /tmp/dump1
sleep 1
wal-g backup-push ${PGDATA}
scripts/drop_pg.sh
wal-g backup-fetch ${PGDATA} LATEST
echo "restore_command = 'echo \"WAL file restoration: %f, %p\"&& /usr/bin/wal-g wal-fetch \"%f\" \"%p\"'" > ${PGDATA}/recovery.conf
/usr/lib/postgresql/10/bin/pg_ctl -D ${PGDATA} -w start
pg_dumpall -f /tmp/dump2
diff /tmp/dump1 /tmp/dump2
scripts/drop_pg.sh
echo "Several delta backup success!!!!!!"
|
/**
* Contains <a href="http://netty.io/">Netty</a>-based HTTP-specific part of the
* {@link stincmale.server.Server} framework.
*/
@javax.annotation.ParametersAreNonnullByDefault
package stincmale.server.netty4.tcp.http;
|
from wtforms import Form, StringField
from wtforms.validators import DataRequired
class FirstRunForm(Form):
"""First-run form.
Form, which using in web page, which
shows at first run of application.
"""
gallery_title = StringField(
"Gallery title:",
validators=[DataRequired()],
description="Gallery title name") |
class Solution:
def findMaxAverage(self, nums: List[int], k: int) -> float:
n = len(nums)
sums = sum(nums[:k])
avg = -10000
for l in range(n-k+1):
avg = max(avg, sums / k)
r = l + k
if r < n:
sums = sums - nums[l] + nums[r]
return avg
|
import React from 'react';
import s from './index.scss';
interface Props {
cover?: string;
link?: string;
name?: string;
descr?: string;
}
const ShowItem: React.FC<Props> = ({ cover, link, name, descr }) => {
return (
<div style={{ backgroundImage: `url(${cover})` }} className={s.showItem}>
<a href={link} rel='noreferrer' target='_blank' className={s.link}>
<div className={s.title}>
<span>{name}</span>
</div>
<div className={s.descr}>{descr}</div>
<div className={s.mask} />
</a>
</div>
);
};
export default ShowItem;
|
# import relevant libraries
import pandas as pd
import numpy as np
import tensorflow.keras
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
# load the data
raw_dataset = pd.read_csv('house_prices.csv')
# one-hot-encode categorial data
dataset = pd.get_dummies(raw_dataset, columns=['no_of_bedrooms'])
# split data into training and testing sets
train_dataset = dataset.sample(frac=0.8, random_state=0)
test_dataset = dataset.drop(train_dataset.index)
# get the data labels
train_labels = train_dataset.pop('price')
test_labels = test_dataset.pop('price')
# build the model
model = Sequential([
Dense(64, activation='relu', input_shape=[len(train_dataset.keys())]),
Dense(64, activation='relu'),
Dense(1)
])
# compile and train the model
model.compile(loss='mse',
optimizer=tensorflow.keras.optimizers.RMSprop(0.001),
metrics=['mae', 'mse'])
model.fit(train_dataset, train_labels, epochs=100, verbose=0)
# evaluate the model
test_loss_mae, test_loss_mse = model.evaluate(test_dataset, test_labels, verbose=2)
print("Test MAE: {}, Test MSE: {}".format(test_loss_mae, test_loss_mse)) |
<filename>kattis/soylent.cc
// https://open.kattis.com/problems/soylent
#include <iostream>
using namespace std;
int main() {
int t;
cin >> t;
for (int i = 0; i < t; i++) {
int n;
cin >> n;
int x = n / 400;
if (n % 400) x++;
cout << x << endl;
}
}
|
import re
def is_valid_passport(passport):
required_fields = {"byr", "iyr", "eyr", "hgt", "hcl", "ecl", "pid"}
fields = dict(field.split(":") for field in passport.split())
if required_fields.issubset(fields.keys()):
if not (1920 <= int(fields["byr"]) <= 2002):
return False
if not (2010 <= int(fields["iyr"]) <= 2020):
return False
if not (2020 <= int(fields["eyr"]) <= 2030):
return False
if not re.match(r"^(59|6\d|7[0-6])in$|^(1[5-8]\d|19[0-3])cm$", fields["hgt"]):
return False
if not re.match(r"^#[0-9a-f]{6}$", fields["hcl"]):
return False
if fields["ecl"] not in {"amb", "blu", "brn", "gry", "grn", "hzl", "oth"}:
return False
if not re.match(r"^\d{9}$", fields["pid"]):
return False
return True
return False
def count_valid_passports(passport_data):
passports = " ".join(passport_data).split(" ")
return sum(is_valid_passport(passport) for passport in passports)
passport_data = [
"ecl:gry pid:860033327 eyr:2020 hcl:#fffffd",
"byr:1937 iyr:2017 cid:147 hgt:183cm",
"",
"iyr:2013 ecl:amb cid:350 eyr:2023 pid:028048884",
"hcl:#cfa07d byr:1929",
"",
"hcl:#ae17e1 iyr:2013 eyr:2024 ecl:brn pid:760753108 byr:1931 hgt:179cm",
"",
"hcl:#cfa07d eyr:2025 pid:166559648 iyr:2011 ecl:brn hgt:59in"
]
print(count_valid_passports(passport_data)) # Output: 2 |
#include <iostream>
#include <BWAPI.h>
#include <string>
#include "tPlayManager.h"
using namespace BWAPI;
using namespace Filter;
Bus tPlaymanager::refresh(PMBus r){
Broodwar->drawTextScreen(0, 30, "%d %d", Broodwar->self()->minerals() - r.resource.mineral, Broodwar->self()->gas() - r.resource.gas);
Bus b; b.busno = busno;
map<UnitType, vector<Unit>> table;
r.list.find(UnitTypes::Terran_Supply_Depot) != r.list.end() ? building_depot = r.list[UnitTypes::Terran_Supply_Depot] : building_depot = 0;
map<UnitType, int> n, _n;
map<UnitType, bool> idle;
unsigned C = 0;
for (auto& u : kinds){
n[u] = 0;
_n[u] = 0;
idle[u] = false;
}
for (auto& u : r.list){
if (_n.find(u.first) == _n.end())
idle[u.first] = false;
//n[u.first] = u.second;
_n[u.first] = u.second;
}
for (auto& u : r.addons){
if (_n.find(u.first) == _n.end())
idle[u.first] = false;
_n[u.first] = u.second;
}
for (auto& u : Broodwar->self()->getUnits()){
UnitType UT = u->getType();
if (IsBuilding(u)){
idle.find(UT) != idle.end() ? 0 : idle[UT] = false;
if (table.find(UT) != table.end())
table[UT].push_back(u);
else{
vector<Unit> UV;
UV.push_back(u);
table[UT] = UV;
}
if (UT == UnitTypes::Terran_Supply_Depot){
if (!u->isCompleted()) building_depot++;
}
if (u->isTraining()) C++;
else if (u->isCompleted()) idle[UT] = true;
}
if (!u->isCompleted()) _n.find(UT) != _n.end() ? _n[UT]++ : _n[UT] = 0;
else n.find(UT) != n.end() ? n[UT]++ : n[UT] = 0;
}
b.bb.table = table;
r.C = C;
r.number = n;
r._number = _n;
r.idle = idle;
return test(b, r);
}
Bus tPlaymanager::test(Bus res, PMBus r){
int C = r.C * 2 + 2;
res.cb.gas2 = res.cb.gas = r.number[UnitTypes::Terran_Barracks] + r._number[UnitTypes::Terran_Barracks] > 0;
// GATE CONDITION
if (Broodwar->self()->minerals() <= r.resource.mineral) return res;
// SCV
if (r.number[UnitTypes::Terran_SCV] < r.wk && ok(r, UnitTypes::Terran_SCV)){
res.bb.busno = ++busno;
res.bb.UT = UnitTypes::Terran_SCV;
return res;
}
// DEPOT
if (Broodwar->self()->supplyTotal() + building_depot * 16 - Broodwar->self()->supplyUsed() <= C && Broodwar->self()->supplyTotal() + building_depot < 400 ){ // C = PRODUCING BUILDINGS * 2
res.cb.busno = ++busno;
res.cb.UT = UnitTypes::Terran_Supply_Depot;
return res;
}
// BARRACK
if (r.number[UnitTypes::Terran_Supply_Depot] > 0 && r.number[UnitTypes::Terran_Barracks] + r._number[UnitTypes::Terran_Barracks] == 0){
res.cb.busno = ++busno;
res.cb.UT = UnitTypes::Terran_Barracks;
return res;
}
// FACTORY
if (r.number[UnitTypes::Terran_Factory] + r._number[UnitTypes::Terran_Factory] < 2 + Broodwar->self()->hasResearched(TechTypes::Tank_Siege_Mode) && r.number[UnitTypes::Terran_Barracks] > 0){
res.cb.busno = ++busno;
res.cb.UT = UnitTypes::Terran_Factory;
return res;
}
// MACHINE_SHOP
if (r.number[UnitTypes::Terran_Machine_Shop] + r._number[UnitTypes::Terran_Machine_Shop] < (r.number[UnitTypes::Terran_Factory] + r._number[UnitTypes::Terran_Factory]) / 2 &&
r.number[UnitTypes::Terran_Factory] > 0){
res.bb.busno = ++busno;
res.bb.UT = UnitTypes::Terran_Machine_Shop;
return res;
}
/// SQUAD
// Marine
if (r.number[UnitTypes::Terran_Factory] == 0 && ok(r, UnitTypes::Terran_Marine)){
res.bb.busno = ++busno;
res.bb.UT = UnitTypes::Terran_Marine;
return res;
}
// Tank
if (ok(r, UnitTypes::Terran_Siege_Tank_Tank_Mode) && r.number[UnitTypes::Terran_Machine_Shop] > 0 && r.number[UnitTypes::Terran_Machine_Shop] > r._number[UnitTypes::Terran_Siege_Tank_Tank_Mode]){
res.bb.busno = ++busno;
res.bb.UT = UnitTypes::Terran_Siege_Tank_Tank_Mode;
return res;
}
// Vulture
if (ok(r, UnitTypes::Terran_Vulture)){
res.bb.busno = ++busno;
res.bb.UT = UnitTypes::Terran_Vulture;
return res;
}
/// RESEARCH
// VULTURE
// MINE & BOOST
static bool _mine = true, _ion = true, _siege = true;
if (r.number[UnitTypes::Terran_Machine_Shop] > 0 && ok(r, TechTypes::Spider_Mines) && _mine){
_mine = false;
res.bb.busno = ++busno;
res.bb.TT = TechTypes::Spider_Mines;
return res;
}
else if (r.number[UnitTypes::Terran_Machine_Shop] > 0 && ok(r, UpgradeTypes::Ion_Thrusters) && _ion){
_ion = false;
res.bb.busno = ++busno;
res.bb.UpT = UpgradeTypes::Ion_Thrusters;
return res;
}
else if (r.number[UnitTypes::Terran_Machine_Shop] > 0 && ok(r, TechTypes::Tank_Siege_Mode) && _siege){
_siege = false;
res.bb.busno = ++busno;
res.bb.TT = TechTypes::Tank_Siege_Mode;
return res;
}
return res;
}
bool tPlaymanager::ok(PMBus r, UnitType UT){
if (Broodwar->self()->minerals() >= r.resource.mineral + UT.mineralPrice() &&
Broodwar->self()->gas() >= r.resource.gas + UT.gasPrice() &&
r.idle[UT.whatBuilds().first] && Broodwar->self()->supplyTotal() >= Broodwar->self()->supplyUsed() + UT.supplyRequired()) return true;
return false;
}
bool tPlaymanager::ok(PMBus r, TechType UT){
if (Broodwar->self()->hasResearched(UT)) return false;
if (Broodwar->self()->minerals() >= r.resource.mineral + UT.mineralPrice() &&
Broodwar->self()->gas() >= r.resource.gas + UT.gasPrice() &&
r.idle[UT.whatResearches()]) return true;
return false;
}
bool tPlaymanager::ok(PMBus r, UpgradeType UT){
if (Broodwar->self()->getUpgradeLevel(UT) == Broodwar->self()->getMaxUpgradeLevel(UT)) return false;
if (Broodwar->self()->minerals() >= r.resource.mineral + UT.mineralPrice() &&
Broodwar->self()->gas() >= r.resource.gas + UT.gasPrice() &&
r.idle[UT.whatUpgrades()]) return true;
return false;
} |
#!/bin/bash
expiredate=$(date)+5
if[[ -z "$1" ]]; then
echo ""
echo "Please provide one file name"
exit 1
fi
cat "$1" | while read username groupname realname
do
if[[ -z $username || -z $groupname || -z $realname ]]; then
continue
fi
result = $(egrep "^$username:" < /etc/passwd)
if[[ -n "$result" ]]; then
echo "User '$username' already exists"
continue
fi
result = $(egrep "^$groupname:" < /etc/group)
if[[ -z "$result" ]]; then
groupadd "$groupname"
fi
useradd -c "$realname" \
-d "/home/$username" \
-e "$expiredate" \
-f 365 \
-g "$groupname" \
-m \
-s /bin/bash \
"$username"
if[[ $? == 0 ]]; then
echo "User <$username> has been added"
else
echo "ERROR: adding user '$username' with (group '$groupname'), (name '$realname')"
exit 1
fi
done |
<reponame>kuldeepkeshwar/react-webpack-boilerplate
import React from 'react';
import './styles.scss';
const Tag = props => (<label key={props.text} htmlFor className="label">{props.text}</label>);
Tag.propTypes = {
text: React.PropTypes.string.isRequired,
};
export default Tag;
|
#!/usr/bin/env bash
# Copyright 2018 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
# Enable tracing in this script off by setting the TRACE variable in your
# environment to any value:
#
# $ TRACE=1 test.sh
TRACE=${TRACE:-""}
if [ -n "$TRACE" ]; then
set -x
fi
k8s_version=1.19.2
etcd_version=3.4.10
goarch=amd64
goos="unknown"
if [[ "$OSTYPE" == "linux"* ]]; then
goos="linux"
elif [[ "$OSTYPE" == "darwin"* ]]; then
goos="darwin"
fi
if [[ "$goos" == "unknown" ]]; then
echo "OS '$OSTYPE' not supported. Aborting." >&2
exit 1
fi
# Turn colors in this script off by setting the NO_COLOR variable in your
# environment to any value:
#
# $ NO_COLOR=1 test.sh
NO_COLOR=${NO_COLOR:-""}
if [ -z "$NO_COLOR" ]; then
header=$'\e[1;33m'
reset=$'\e[0m'
else
header=''
reset=''
fi
function header_text {
echo "$header$*$reset"
}
rc=0
tmp_root=/tmp
kb_root_dir=$tmp_root/kubebuilder
kb_orig=$(pwd)
# Skip fetching and untaring the tools by setting the SKIP_FETCH_TOOLS variable
# in your environment to any value:
#
# $ SKIP_FETCH_TOOLS=1 ./fetch_ext_bins.sh
#
# If you skip fetching tools, this script will use the tools already on your
# machine, but rebuild the kubebuilder and kubebuilder-bin binaries.
SKIP_FETCH_TOOLS=${SKIP_FETCH_TOOLS:-""}
function prepare_staging_dir {
header_text "preparing staging dir"
if [ -z "$SKIP_FETCH_TOOLS" ]; then
rm -rf "$kb_root_dir"
else
rm -f "$kb_root_dir/kubebuilder/bin/kubebuilder"
rm -f "$kb_root_dir/kubebuilder/bin/kubebuilder-gen"
rm -f "$kb_root_dir/kubebuilder/bin/vendor.tar.gz"
fi
}
# fetch k8s API gen tools and make it available under kb_root_dir/bin.
function fetch_tools {
if [ -n "$SKIP_FETCH_TOOLS" ]; then
return 0
fi
header_text "fetching tools"
kb_tools_archive_name="kubebuilder-tools-$k8s_version-$goos-$goarch.tar.gz"
kb_tools_download_url="https://storage.googleapis.com/kubebuilder-tools/$kb_tools_archive_name"
kb_tools_archive_path="$tmp_root/$kb_tools_archive_name"
if [ ! -f $kb_tools_archive_path ]; then
curl -fsL ${kb_tools_download_url} -o "$kb_tools_archive_path"
fi
tar -zvxf "$kb_tools_archive_path" -C "$tmp_root/"
}
function setup_envs {
header_text "setting up env vars"
# Setup env vars
export PATH=/tmp/kubebuilder/bin:$PATH
export TEST_ASSET_KUBECTL=/tmp/kubebuilder/bin/kubectl
export TEST_ASSET_KUBE_APISERVER=/tmp/kubebuilder/bin/kube-apiserver
export TEST_ASSET_ETCD=/tmp/kubebuilder/bin/etcd
# Ensure that some home var is set and that it's not the root
export HOME=${HOME:=/tmp/kubebuilder-testing}
if [ $HOME == "/" ]; then
export HOME=/tmp/kubebuilder-testing
fi
}
|
/*
* Interval.sql
* Chapter 3, Oracle10g PL/SQL Programming
* by <NAME>, <NAME>, <NAME>
*
* This script demonstrates the use of the INTERVAL types
*/
exec clean_schema.trigs
exec clean_schema.procs
exec clean_schema.tables
SET SERVEROUTPUT ON
PROMPT
PROMPT ** This estimates the time until my daughter leaves for college **
PROMPT
DECLARE
v_college_deadline TIMESTAMP;
BEGIN
v_college_deadline := TO_TIMESTAMP('06/06/2004', 'DD/MM/YYYY')
+ INTERVAL '12-3' YEAR TO MONTH;
DBMS_OUTPUT.PUT_LINE('My daughter leaves for college in '
||v_college_deadline);
END;
/
PROMPT
PROMPT ** This gets the date a little more exact **
PROMPT
DECLARE
v_college_deadline TIMESTAMP;
BEGIN
v_college_deadline := TO_TIMESTAMP('06/06/2004', 'DD/MM/YYYY')
+ INTERVAL '12-3' YEAR TO MONTH
+ INTERVAL '19 9:0:0.0' DAY TO SECOND;
DBMS_OUTPUT.PUT_LINE('My daughter leaves for college in '
||v_college_deadline);
END;
/
|
<gh_stars>10-100
//
// Flare.h
// Flare
//
// Created by <NAME> on 8/14/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <CoreGraphics/CoreGraphics.h>
//! Project version number for Flare.
FOUNDATION_EXPORT double FlareVersionNumber;
//! Project version string for Flare.
FOUNDATION_EXPORT const unsigned char FlareVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <Flare/PublicHeader.h>
|
//*********************************************************************************
//
// Copyright(c) 2016-2017 <NAME> All Rights Reserved
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
//*********************************************************************************
package cmu.xprize.comp_questions;
import cmu.xprize.util.ILoadableObject;
import edu.cmu.xprize.listener.ListenerBase;
public interface ICQn_ViewManager extends ILoadableObject {
public void initStory(IVManListener owner, String assetPath, String assetLocation);
public void onDestroy();
void startStory();
//UHQ
void setPictureMatch();
void setClozePage();
void seekToPage(int pageIndex);
void nextPage();
void prevPage();
void seekToParagraph(int paraIndex);
void nextPara();
void prevPara();
void seekToLine(int lineIndex);
void echoLine();
void parrotLine();
void nextLine();
void prevLine();
void seekToWord(int wordIndex);
void nextWord();
void prevWord();
void setHighLight(String highlight, boolean update);
//UHQ
void decideToPlayGenericQuestion();
void genericQuestions();
void displayGenericQuestion();
void setRandomGenericQuestion();
void setClozeQuestion();
void displayClozeQuestion();
void displayPictureMatching();
void hasClozeDistractor();
void hasQuestion();
public boolean endOfData();
public void onUpdate(String[] heardWords);
public void onUpdate(ListenerBase.HeardWord[] heardWords, boolean finalResult);
public void onUpdate(String[] heardWords, boolean finalResult);
public void continueListening();
public void setSpeakButton(String command);
public void setPageFlipButton(String command);
public void resetImageButtons();
public void showImageButtons();
public void hideImageButtons();
public void enableImageButtons();
public void disableImageButtons();
public void resetClozeButtons();
public void showClozeButtons();
public void hideClozeButtons();
public void enableClozeButtons();
public void disableClozeButtons();
public void showClozeWordInBlank();
public void hideClozeWordInBlank();
public void publishClozeWord();
public void highlightClozeWord();
public void undoHighlightClozeWord();
public void playClozeSentence();
public void execCommand(String _command, Object _target);
// public int getmCurrPara();
//
// public int getmCurrLine();
//
// public int getmParaCount();
//
// public int getmLineCount();
// public int getSegmentNdx();
// public int getNumSegments();
// public int getUtteranceNdx();
// public int getNumUtterance();
// public boolean getEndOfSentence();
//
// public CASB_Narration[] getRawNarration();
// public int getUtterancePrev();
// public int getSegmentPrev();
boolean isClozeMode();
boolean isGenMode();
boolean isPicMode();
}
|
#!/bin/bash
if [ $# -le 0 ]
then
cowsay -d "Arguments expected: gatling-test"
exit 1
fi
gatling.sh -sf user-files -s $1
cd /opt/gatling
tar -czvf gatling.tar.gz results
wget --method PUT --body-file=gatling.tar.gz https://transfer.sh/gatling.tar.gz -O - -nv
|
#!/bin/bash -e
DIR=$(dirname "$(readlink -f "$0")")
docker run --rm -v $DIR:$DIR maven:3.5-jdk-8 mvn -f $DIR clean install
|
<filename>representation/representation.go<gh_stars>1-10
/* Copyright 2020 Freerware
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package representation
import (
"net/url"
)
// Representation is an HTTP resource representation.
//
// For the purposes of HTTP, a "representation" is information that is
// intended to reflect a past, current, or desired state of a given
// resource, in a format that can be readily communicated via the
// protocol, and that consists of a set of representation metadata and a
// potentially unbounded stream of representation data.
type Representation interface {
ContentLocation() url.URL
ContentType() string
ContentEncoding() []string
ContentCharset() string
ContentLanguage() string
ContentFeatures() []string
SourceQuality() float32
Bytes() ([]byte, error)
FromBytes([]byte) error
}
|
#!/usr/bin/env bash
echo 'nameserver 8.8.8.8' > /etc/resolv.conf
iptables -F
iptables -F -t nat
iptables -F -t mangle
iptables -t nat -A POSTROUTING -o eth0 -j MASQUERADE
iptables -A FORWARD -i eth0 -o eth1 -m state --state RELATED,ESTABLISHED -j ACCEPT
iptables -A FORWARD -i eth1 -o eth0 -j ACCEPT
iptables -t nat -A PREROUTING -p tcp -i eth1 -d 142.1.97.172 --dport 80 -j DNAT --to-destination 10.0.0.3:8080
python /shared/server.py
|
python3 cfggui.py -c gmod2Configspec.cfg fast14.cfg fast19.cfg
|
<filename>7-assets/past-student-repos/dev-portfolio-master/src/components/~reusables/variables/spacing.js
//====== SPACING VARIABLES ======//
export const small_space = '12px';
export const normal_space = '20px';
export const medium_space = '32px';
export const large_space = '50px';
export const extra_large_space = '80px'; |
#!/bin/bash
main() {
if [ "$WERCKER_NPM_BUILD_USE_CACHE" == "true" ]; then
info "Using wercker cache"
setup_cache
fi
set +e
npm_build
set -e
success "Finished npm build"
}
setup_cache() {
debug 'Creating $WERCKER_CACHE_DIR/wercker/npm'
mkdir -p "$WERCKER_CACHE_DIR/wercker/npm"
debug 'Configuring npm to use wercker cache'
npm config set cache "$WERCKER_CACHE_DIR/wercker/npm"
}
clear_cache() {
warn "Clearing npm cache"
npm cache clear
# make sure the cache contains something, so it will override cache that get's stored
debug 'Creating $WERCKER_CACHE_DIR/wercker/npm'
mkdir -p "$WERCKER_CACHE_DIR/wercker/npm"
printf keep > "$WERCKER_CACHE_DIR/wercker/npm/.keep"
}
npm_build() {
local retries=3;
for try in $(seq "$retries"); do
info "Starting npm build, try: $try"
npm run build $WERCKER_NPM_INSTALL_OPTIONS && return;
if [ "$WERCKER_NPM_BUILD_CLEAR_CACHE_ON_FAILED" == "true" ]; then
clear_cache
fi
done
fail "Failed to successfully execute npm build, retries: $retries"
}
main;
|
<reponame>thunderlink/thunderfish
import React, { Component } from 'react'
import { connect } from 'react-redux'
import { Link } from 'react-router-dom'
import * as actions from 'store/actions'
import './CommentElement.css'
class CommentElement extends Component {
state = {
editTry: false,
editText: ""
}
constructor(props) {
super(props)
this.state.editTry = false
this.state.editText = this.props.commentDetail.comment_text
}
onSubmitHandler = (e) => {
e.preventDefault()
this.props.putCommentRequest(this.props.meetingId, this.props.commentDetail.id, this.state.editText)
}
onEditHandler = (e) => {
e.preventDefault()
this.setState((prevState) => {
return {...prevState, editTry: !prevState.editTry}
})
}
onDeleteHandler = (e) => {
e.preventDefault()
this.props.deleteComment(this.props.meetingId, this.props.commentDetail.id)
}
render() {
return (
<div className="comment-element">
<div className="writer">
<p> {this.props.commentDetail.nickname} </p>
<Link to={`/user/${this.props.commentDetail.writer}/`}> 정보 </Link>
</div>
<form onSubmit={this.onSubmitHandler}>
{
(this.props.id === this.props.commentDetail.writer) ? (
(this.state.editTry) ? (
<div className="content">
<textarea
type="text"
value={this.state.editText}
onChange={(e)=>this.setState({editText: e.target.value})}
/>
<div className="button-set">
<button type="submit">
확인
</button>
<button onClick={this.onEditHandler}>
취소
</button>
</div>
</div>
) : (
<div className="content">
<div className="text">
<p> {this.props.commentDetail.comment_text}</p>
</div>
{
(this.props.id === this.props.commentDetail.writer) ? (
<div className="button-set">
<button onClick={this.onEditHandler}>
수정
</button>
<button onClick={this.onDeleteHandler}>
삭제
</button>
</div>
) : (
null
)
}
</div>
)) : (
<div className="content">
<div className="text">
<p> {this.props.commentDetail.comment_text}</p>
</div>
</div>
)
}
</form>
</div>
)
}
}
const mapStateToProps = state => {
return {
id: state.user.id,
}
}
const mapDispatchToProps = dispatch => {
return {
deleteComment: (pid, id) => {
dispatch(actions.comment.deleteCommentRequest(pid, id))
},
putCommentRequest: (pid, id, text) => {
dispatch(actions.comment.putCommentRequest(pid, id, text))
},
}
}
export default connect(mapStateToProps, mapDispatchToProps)(CommentElement);
|
import {objClone} from 'ibuprofen/lib/obj'
const DEFAULT_CONFIG= {
dialect: 'postgres',
host: 'localhost',
port: 5432,
database: 'dibi',
user: 'postgres',
password: '<PASSWORD>',
// Maximum/Minimum number of connection in pool
max: 5,
min: 0,
// The maximum time, in milliseconds, that a connection can be idle before being released.
// Use with combination of evict for proper working, for more details read
// https://github.com/coopernurse/node-pool/issues/178#issuecomment-327110870
idleTimeoutMillis: 10000
}
export default (config) => {
if (config === undefined)
config= {}
return Object.assign( objClone(DEFAULT_CONFIG), config)
}
|
sort -r $1
|
const data = [
{
Technology: 'JavaScript',
Website: 'https://en.wikipedia.org/wiki/JavaScript',
Description: [
'A programming language for client-side web page interaction and behavior.',
'Also used for servers (Node.js), desktop applications (Electron.js), and mobile apps (React Native).',
]
},
{
Technology: 'HTML',
Website: 'https://en.wikipedia.org/wiki/HTML',
Description: [
'Hypertext Markup Language is used to create web page elements and meta data for search engines (SEO) and social media.',
'Used with CSS for styling and JavaScript for events and actions.',
],
},
{
Technology: 'CSS',
Website: 'https://en.wikipedia.org/wiki/Cascading_Style_Sheets',
Description: [
'Cascading Style Sheets is a style sheet language used for the presentation of HTML web pages.',
'Used for color, layout, animation, device responsiveness, and more.',
],
},
{
Technology: 'React.js',
Website: 'https://en.wikipedia.org/wiki/React_(web_framework)',
Description: [
'A library developed by Facebook which moves document creation from HTML to JavaScript with added simplicity and performance.',
'Used for developing single-page (GatsbyJS, Next.js, etc) and mobile (React Native) applications.',
],
},
{
Technology: 'Node.js',
Website: 'https://nodejs.org/en/about',
Description: [
'A JavaScript runtime designed to build scalable, network applications.',
'Used to run JavaScript outside of a web browser, for running servers or building native applications.',
],
},
{
Technology: 'Jest',
Website: 'https://jestjs.io',
Description: [
'A highly compatible JavaScript Testing Framework with a focus on simplicity.',
'Supports snapshot tests, integration tests, unit tests, dependency mocking, code coverage, and more.',
'Jest is a fork and superset of Jasmine, with extra features and performance improvements.',
],
},
{
Technology: 'SASS',
Website: 'https://sass-lang.com',
Description: [
'A CSS Preprocessor, SASS (or more closely, SCSS) is a superscript of CSS style sheet language with extra features.',
`Used to enhance readability, optimize output, separate large files, and improve code reuse.`,
],
},
{
Technology: 'TypeScript',
Website: 'https://en.wikipedia.org/wiki/TypeScript',
Description: [
'A superset of JavaScript programming language developed by Microsoft adding optional static typing to the language.',
'Used to catch type errors at compile time, better IDE code-suggestion, and polyfilling latest syntax and features (ES6/ES2015+) to older-browser-compatible JavaScript.',
],
},
{
Technology: 'PHP',
Website: 'https://en.wikipedia.org/wiki/PHP',
Description: [
'A general purpose scripting language, usually processed on a web server.',
'Used to create APIs or serve server-generated html built from templates with built-in scripts.',
],
},
{
Technology: 'Git',
Website: 'https://git-scm.com/about',
Description: [
'A collaboration and version control tool featuring branching, merging, history tracking, and more.',
'For collaboration, source code git repositories are usually pushed to cloud hosts such as GitHub, GitLab, Bitbucket, and more.',
],
},
{
Technology: 'Bash (Unix shell)',
Website: 'https://en.wikipedia.org/wiki/Bash_(Unix_shell)',
Description: [
'A command line interface (CLI) commonly used to read, run, navigate, and modify directories and files on Unix operating systems.',
'Git for windows also installs an emulated Bash terminal for windows operating systems.',
],
},
{
Technology: 'NPM',
Website: 'https://docs.npmjs.com/about-npm',
Description: [
'Node Package Manager is a public registry and manager for creation, publishing, and installation of JavaScript compatible modules.',
],
},
{
Technology: 'Electron.js',
Website: 'https://en.wikipedia.org/wiki/Electron_(software_framework)',
Description: [
'A framework for building desktop applications from web technologies (HTML, CSS, JavaScript, and more) by using Chromium browser engine and Node.js runtime.',
],
},
{
Technology: 'R',
Website: 'https://en.wikipedia.org/wiki/R_(programming_language)',
Description: [
'A programming language for calculating or developing software for statistics, graphing, or data analysis.',
],
},
{
Technology: 'Python',
Website: 'https://en.wikipedia.org/wiki/Python_(programming_language)',
Description: [
'A programming language known for ease of use and readability, often used for data science, web development, and software prototyping.',
],
},
{
Technology: 'C',
Website: 'https://en.wikipedia.org/wiki/C_(programming_language)',
Description: [
'A programming language from 1970s still used today for performance and low-level access.',
],
},
{
Technology: 'C++',
Website: 'https://en.wikipedia.org/wiki/C%2B%2B',
Description: [
'A superset of the C programming language with added features and version or platform specific alterations.',
],
},
{
Technology: 'Java',
Website: 'https://en.wikipedia.org/wiki/Java_(programming_language)',
Description: [
'An object oriented programming language developed by Oracle featuring high platform compatability through the Java Virtual Machine (JVM).',
],
},
{
Technology: 'Supertest',
Website: 'https://www.npmjs.com/package/supertest',
Description: [
'A JavaScript library for testing HTTP assertions via Superagent, an Ajax (Asynchronous JavaScript and XML) client',
],
},
{
Technology: 'React Testing Library',
Website: 'https://testing-library.com/docs/react-testing-library/intro',
Description: [
'A JavaScript library for testing React component rendering and functionality by mocking events and user input.',
],
},
{
Technology: 'Knex.js',
Website: 'http://knexjs.org',
Description: [
'A JavaScript library for building database agnostic SQL queries.',
],
},
{
Technology: 'TypeORM',
Website: 'https://typeorm.io/#/',
Description: [
'A TypeScript library for building database agnostic SQL queries.',
],
},
{
Technology: 'PostgreSQL',
Website: 'https://en.wikipedia.org/wiki/PostgreSQL',
Description: [
'A free and open-source relational database management system (RDBMS) emphasizing extensibility and SQL compliance.',
],
},
{
Technology: 'Redux.js',
Website: 'https://redux.js.org',
Description: [
'A JavaScript library for simplifying the sharing, modification, event response of an application\'s global state.',
'Redux simplifies state management through encapsulation (reducers) and abstraction (actions).',
],
},
{
Technology: 'Redux Thunk',
Website: 'https://www.npmjs.com/package/redux-thunk',
Description: [
'A JavaScript library for Redux.js, featuring dynamic action creators instead explicitly creating every permutation.',
'A thunk is a function that wraps an expression to delay its evaluation.',
],
},
{
Technology: 'jQuery',
Website: 'https://en.wikipedia.org/wiki/JQuery',
Description: [
'A JavaScript library designed to simplify HTML DOM tree traversal and manipulation, as well as event handling, CSS animation, and Ajax (Asynchronous JavaScript and XML).',
'As of June, 2020, jQuery is still used by more than 75% of the 10 million most popular websites (w3techs.com).',
'Modern JavaScript (ES6/2015+) has already implemented the most common jQuery features into the language.',
],
},
{
Technology: 'Bootstrap',
Website: 'https://en.wikipedia.org/wiki/Bootstrap_(front-end_framework)',
Description: [
'A CSS framework for responsive web pages, laybout, typography, forms, buttons, navigation, and more.',
],
},
{
Technology: 'Material-UI',
Website: 'https://material-ui.com/',
Description: [
'A stylized React component library for responsive web pages, laybout, typography, forms, buttons, navigation, and more.',
],
},
{
Technology: 'Angular',
Website: 'https://en.wikipedia.org/wiki/Angular_(web_framework)',
Description: [
'A TypeScript framework for developing web application from a hierarchy of components composed of an HTML template and TypeScript handler.',
],
},
{
Technology: 'Express.js',
Website: 'https://expressjs.com/',
Description: [
'A Node.js JavaScript framework simplifying the creation of HTTP servers.',
],
},
{
Technology: 'Django',
Website: 'https://www.djangoproject.com/start/overview/',
Description: [
'A Python framework for creating web applications, with built in tools for front end, back end, and security.',
],
},
{
Technology: 'SQLite',
Website: 'https://en.wikipedia.org/wiki/SQLite',
Description: [
'A relational database management system (RDBMS) for a small, embedded database engine.',
'SQLite uses a dynamically and weakly typed SQL syntax',
],
},
{
Technology: 'LESS',
Website: 'http://lesscss.org/#overview',
Description: [
'A CSS Preprocessor, LESS is a superscript of CSS style sheet language with extra features.',
'Less popular than SASS.',
],
},
{
Technology: 'Axios',
Website: 'https://www.npmjs.com/package/axios',
Description: [
'A JavaScript library which simplifies Ajax/XMLHttpRequests and adds features and security.',
'More browser-compatible than modern JavaScript\'s Fetch API.',
],
},
{
Technology: 'JWT',
Website: 'https://en.wikipedia.org/wiki/JSON_Web_Token',
Description: [
'JSON Web Token is an Internet standard for creating data with optional signature and/or optional encryption whose payload holds JSON data.',
'Used for authentication of data, but it should never package sensitive data because the payload can always be read.',
],
},
{
Technology: 'Session management',
Website: 'https://en.wikipedia.org/wiki/Session_(computer_science)',
Description: [
'Using a unique session token (via an authorization HTTP cookies) to lookup a client\'s session id and information.',
'All data, remains server-side, unlike JWTs.',
],
},
{
Technology: 'SQL',
Website: 'https://en.wikipedia.org/wiki/SQL',
Description: [
'Structured Query Language is a language used in programming and designed for managing data held in a relational database management system (RDBMS).',
'Different RDBMSs have extended the language in different ways, such as altering query output or adding custom constraints.'
],
},
{
Technology: 'Flask',
Website: 'https://en.wikipedia.org/wiki/Flask_(web_framework)',
Description: [
'A micro web framework for Python servers and web applications.',
'Has extensions for adding features of other, more robust web frameworks.',
],
},
{
Technology: 'Pipenv',
Website: 'https://pypi.org/project/pipenv/',
Description: [
'A tool that combines PIP and VitualENV for simple and contained python dependency management.',
'Prevents bloat and conflicts in the global environment.',
],
},
{
Technology: 'GraphQL',
Website: 'https://en.wikipedia.org/wiki/GraphQL',
Description: [
'A data query and manipulation language for APIs, and a runtime for fulfilling queries with existing data.',
'The query and answer both share the same shape.',
],
},
{
Technology: 'Pandas',
Website: 'https://en.wikipedia.org/wiki/Pandas_(software)',
Description: [
'A software library written for Python for data manipulation and analysis. In particular, it offers data structures and operations for manipulating numerical tables and time series.',
],
},
{
Technology: 'styled-components',
Website: 'https://styled-components.com/docs',
Description: [
'A JavaScript library for React.js which adds programmable CSS styling to component constructors.',
'Includes many additional features of CSS preprocessors (SASS, LESS, etc.).',
],
},
{
Technology: 'bcrypt',
Website: 'https://en.wikipedia.org/wiki/Bcrypt',
Description: [
'A password-hashing function based on the Blowfish cipher incorporating a salt to protect against rainbow table attacks and a slowness factor to resist brute-force search attacks.',
],
},
{
Technology: 'Markdown',
Website: 'https://en.wikipedia.org/wiki/Markdown',
Description: [
'A lightweight markup language with plain-text-formatting syntax creating rich text output from similarly readable plain text input.',
],
},
{
Technology: 'PIP',
Website: 'https://en.wikipedia.org/wiki/Pip_(package_manager)',
Description: [
'The standard package-management system used to install and manage software packages written in Python',
'Used to install individual packages or, more commonly, a list of packages in a "requirements.txt" file.',
],
},
{
Technology: 'Spring Framework',
Website: 'https://en.wikipedia.org/wiki/Spring_Framework',
Description: [
'An application framework and inversion of control container for the Java platform.',
'Used with many extensions for building web applications on top of the Java EE (Enterprise Edition) platform.',
],
},
{
Technology: 'Spring Boot',
Website: 'https://en.wikipedia.org/wiki/Spring_Framework#Spring_Boot',
Description: [
'A Spring Framework extension for simplifying the creation of stand-alone, production-grade Java applications.',
],
},
{
Technology: 'Apache Maven',
Website: 'https://maven.apache.org',
Description: [
'A software project management and comprehension tool, used for installing, extending, and building Java packages.',
],
},
{
Technology: 'Create React App',
Website: 'https://reactjs.org/docs/create-a-new-react-app.html',
Description: [
'An integrated toolchain for creating and managing React.js applications.',
'Does not support static or server-side rendering, by default.',
],
},
{
Technology: 'Gatsby.js',
Website: 'https://www.gatsbyjs.org/docs',
Description: [
'A toolchain and build tool for creating and managing React.js applications.',
'Features GraphQL and a wide range of plugins for building static files for deployment without a server.',
],
},
{
Technology: 'Heroku',
Website: 'https://en.wikipedia.org/wiki/Heroku',
Description: [
'A cloud platform as a service (PaaS) featuring simplicity of continuous integration and deployment.',
],
},
{
Technology: 'JUnit',
Website: 'https://junit.org/junit4',
Description: [
'JUnit is a simple framework to write repeatable tests for Java applications.',
],
},
{
Technology: 'AWS',
Website: 'https://en.wikipedia.org/wiki/Amazon_Web_Services',
Description: [
'Amazon Web Services provides on-demand cloud computing platforms and APIs on a metered pay-as-you-go basis.',
],
},
{
Technology: 'Apache Tomcat',
Website: 'https://en.wikipedia.org/wiki/Apache_Tomcat',
Description: [
'An open-source implementation of the Java Servlet, JavaServer Pages, Java Expression Language and WebSocket technologies.',
'Used as the default servlet for Spring Web MVC applications.',
],
},
{
Technology: 'Netty',
Website: 'https://en.wikipedia.org/wiki/Netty_(software)',
Description: [
'A non-blocking I/O client-server framework for the development of Java network applications.',
'Used as the default server for Spring WebFlux applications.'
],
},
{
Technology: 'Hibernate',
Website: 'https://en.wikipedia.org/wiki/Hibernate_(framework)',
Description: [
'An object-relational mapping (ORM) tool for the Java programming language, for mapping an object-oriented domain model to a relational database.',
'Used as the default data access technology behind Spring Data JPA, Spring Data JDBC, and more.'
],
},
{
Technology: 'Kubernetes',
Website: 'https://en.wikipedia.org/wiki/Kubernetes',
Description: [
'An open-source container-orchestration system for automating application deployment, scaling, and management.',
],
},
{
Technology: 'Docker',
Website: 'https://en.wikipedia.org/wiki/Docker_(software)',
Description: [
'A set of platform as a service (PaaS) products that uses OS-level virtualization to deliver software in containers.',
'Containers are isolated bundles of software and its libraries and configurations, all run by a single operating system kernel using fewer resources than virtual machines.',
'Often used with Kubernetes for scaling and network handling.'
],
},
{
Technology: 'Yarn',
Website: 'https://yarnpkg.com/getting-started',
Description: [
'Yarn is a package manager for JavaScript modules also using NPM registry.',
'Features greater installation speed and extra automated dependency conflict resolution than NPM.',
],
},
{
Technology: 'Webpack',
Website: 'https://en.wikipedia.org/wiki/Webpack',
Description: [
'A JavaScript bundler which generates static assets from module dependencies and web application assets.',
],
},
{
Technology: 'Babel',
Website: 'https://babeljs.io/docs/en',
Description: [
'A toolchain that is mainly used to convert ECMAScript 2015+ code into a backwards compatible version of JavaScript in current and older browsers or environments/',
],
},
{
Technology: 'MongoDB',
Website: 'https://en.wikipedia.org/wiki/MongoDB',
Description: [
'A NoSQL database program using JSON-like documents with optional schemas.',
'Outperforms SQL databases for simple queries.',
],
},
{
Technology: 'Netlify',
Website: 'https://docs.netlify.com',
Description: [
'A cloud platform as a service (PaaS) featuring simplicity of continuous integration and deployment for front ends and serverless APIs.',
],
},
{
Technology: 'Vercel',
Website: 'https://vercel.com',
Description: [
'A cloud platform as a service (PaaS) featuring simplicity of continuous integration and deployment for front ends and serverless APIs.',
],
},
{
Technology: 'Cloudflare',
Website: 'https://en.wikipedia.org/wiki/Cloudflare',
Description: [
'A combination of content-delivery-network (CDN) services, DDoS mitigation, Internet security, and distributed domain-name-server services.',
],
},
{
Technology: 'Figma',
Website: 'https://www.figma.com',
Description: [
'A cloud software as a service (SaaS) for UI/UX design, prototyping, and collaboration.',
'A web application most similar to Sketch in features, but without Mac OS requirement.',
],
},
{
Technology: 'Trello',
Website: 'https://www.figma.com',
Description: [
'A web-based Kanban-style list-making application for project management and collaboration.',
'Developed by Atlassian, the same company developing JIRA, using similar UI and features.',
],
},
{
Technology: 'Regular Expression',
Website: 'https://en.wikipedia.org/wiki/Regular_expression',
Description: [
'A sequence of characters that define a search pattern.',
'Used by string searching algorithms for "find" or "find and replace" or for input validation.',
],
},
{
Technology: 'XSLT/XPath',
Website: 'https://en.wikipedia.org/wiki/XPath',
Description: [
'A query language for selecting nodes from an XML document.',
'Used by to conditionally select HTML document nodes or their content/attributes for manipulation or parsing data.',
],
},
{
Technology: 'C#',
Website: 'https://en.wikipedia.org/wiki/C_Sharp_%28programming_language%29',
Description: [
'A type-safe object-oriented programming language designed by Microsoft alongside the .NET ecosystem.',
],
},
{
Technology: 'Team Foundation Version Control (TFVC)',
Website: 'https://docs.microsoft.com/en-us/azure/devops/repos/tfvc/what-is-tfvc',
Description: [
'A centralized version control system by Microsoft, integrated with Visual Studio.',
'Uses path-based branching and stores history only on a central server, compared to Git locally tracking all history and branching.',
],
},
{
Technology: 'Azure DevOps Server',
Website: 'https://en.wikipedia.org/wiki/Azure_DevOps_Server',
Description: [
'A Microsoft product that provides version control, reporting, requirements management, project management, automated builds, testing, and release management capabilities.',
],
},
{
Technology: 'MSTest',
Website: 'https://docs.microsoft.com/en-us/dotnet/core/testing/#mstest',
Description: [
'Microsoft\'s test framework for all .NET languages, with functionality similar to xUnit and NUnit.',
],
},
{
Technology: 'ASP.NET',
Website: 'https://en.wikipedia.org/wiki/ASP.NET',
Description: [
'An open-source, server-side web-application framework designed for web development to produce dynamic web pages.',
'Has multiple programming models for building modular web pages and APIs.'
],
},
{
Technology: 'SQL Server Management Studio (SSMS)',
Website: 'https://docs.microsoft.com/en-us/sql/ssms/sql-server-management-studio-ssms?view=sql-server-ver15',
Description: [
'An integrated environment for managing any SQL infrastructure, with built in Microsoft Azure integration',
],
},
{
Technology: 'Windows Presentation Foundation (WPF)',
Website: 'https://docs.microsoft.com/en-us/dotnet/desktop/wpf/overview/',
Description: [
'A UI framework for building Windows desktop apps, with more customization than Windows Forms.',
],
},
{
Technology: '.NET Framework',
Website: 'https://docs.microsoft.com/en-us/dotnet/framework/get-started/overview',
Description: [
'A Windows only version of .NET which is being deprecated after version 4 for better cross-platform consolidation.',
],
},
{
Technology: '.NET',
Website: 'https://docs.microsoft.com/en-us/dotnet/core/introduction',
Description: [
'.NET is a free, open-source development platform for building many kinds of apps, using the C# programming language.',
],
},
{
Technology: 'Windows Forms (WinForms)',
Website: 'https://docs.microsoft.com/en-us/dotnet/desktop/winforms/overview/',
Description: [
'A UI framework for rapidly building Windows desktop apps, simpler than Windows Presentation Foundation.',
],
},
{
Technology: 'Windows API (Win32 API)',
Website: 'https://docs.microsoft.com/en-us/windows/win32/desktop-programming',
Description: [
'The API for native C/C++ Windows applications that require direct access to Windows and hardware.',
],
},
{
Technology: 'Microsoft Teams',
Website: 'https://en.wikipedia.org/wiki/Microsoft_Teams',
Description: [
'A communication suite platform catered toward businesses which lets users communicate with voice calls, video calls, text messaging, media and files, with built in Microsoft 365 integration.',
],
},
{
Technology: 'Slack',
Website: 'https://en.wikipedia.org/wiki/Slack_(software)',
Description: [
'A communication platform catered toward businesses which lets users communicate with voice calls, video calls, text messaging, media and files.',
],
},
{
Technology: 'Discord',
Website: 'https://en.wikipedia.org/wiki/Discord_(software)',
Description: [
'A communication platform which lets users communicate with voice calls, video calls, text messaging, media and files in private chats or as part of community servers.',
],
},
{
Technology: 'Zoom',
Website: 'https://en.wikipedia.org/wiki/Zoom_(software)',
Description: [
'A video conferencing software used by businesses and academics which grew in popularity during the COVID-19 pandemic.',
],
},
{
Technology: 'OBS Studio',
Website: 'https://obsproject.com/',
Description: [
'A free and open source software for video recording and live streaming.',
],
},
{
Technology: 'Visual Studio',
Website: 'https://visualstudio.microsoft.com/vs/',
Description: [
'A comprehensive integrated development environment (IDE) for .NET and C++ developers on Windows.',
],
},
{
Technology: 'Visual Studio Code',
Website: 'https://code.visualstudio.com/',
Description: [
'A free and open source text and code editor developed by Microsoft, with extensions to support just about any programming language.',
],
},
{
Technology: 'GNU Image Manipulation Program (GIMP)',
Website: 'https://www.gimp.org/',
Description: [
'A free and open source rastorized graphics editor, with functionality similar to Adobe Photoshop.',
],
},
{
Technology: 'Notepad++',
Website: 'https://notepad-plus-plus.org/',
Description: [
'A free and open source text and code editor, with functionality similar to Visual Studio Code.',
],
},
{
Technology: 'Inkscape',
Website: 'https://inkscape.org/about/',
Description: [
'A free and open source vector graphics editor, with functionality similar to Adobe Illustrator.',
],
},
{
Technology: 'Kdenlive',
Website: 'https://kdenlive.org/en/features/',
Description: [
'A free and open source video editor, with functionality similar to Adobe Premiere Pro or DaVinci Resolve.',
],
},
{
Technology: 'Knockout (KO)',
Website: 'https://kdenlive.org/en/features/',
Description: [
'A JavaScript library for dynamic user interfaces with underlying data models, for a Model-View-ViewModel (MVVM) pattern.',
],
},
{
Technology: 'Sencha Ext JS',
Website: 'https://www.sencha.com/products/extjs/',
Description: [
'A JavaScript framework for building data-intensive, cross-platform web and mobile applications for any modern device.',
],
},
]
export default data
|
#!/bin/bash
bosh interpolate \
broker-src/bosh/terraform.yml \
-l terraform-yaml/state.yml \
> terraform-secrets/terraform.yml
|
#!/bin/bash
dieharder -d 4 -g 61 -S 1792677522
|
<reponame>jschoolcraft/urlagg
class CreateSourceTaggings < ActiveRecord::Migration
def self.up
create_table :source_taggings do |t|
t.references :link
t.references :source_tag
t.integer :position
t.timestamps
end
add_index :source_taggings, :link_id
add_index :source_taggings, :source_tag_id
end
def self.down
remove_index :source_taggings, :link_id
remove_index :source_taggings, :source_tag_id
drop_table :source_taggings
end
end |
$(document).ready(function() {
// Carousel Initialization
$('.jcarousel')
.jcarousel({
// Options go here
});
// Prev Control Initialization
$('.jcarousel-control-prev')
.on('jcarouselcontrol:active', function() {
$(this).removeClass('inactive');
})
.on('jcarouselcontrol:inactive', function() {
$(this).addClass('inactive');
})
.jcarouselControl({
// Options go here
target: '-=1'
});
// Next Control Initialization
$('.jcarousel-control-next')
.on('jcarouselcontrol:active', function() {
$(this).removeClass('inactive');
})
.on('jcarouselcontrol:inactive', function() {
$(this).addClass('inactive');
})
.jcarouselControl({
// Options go here
target: '+=1'
});
// Pagination Initialization
$('.jcarousel-pagination')
.on('jcarouselpagination:active', 'li', function() {
$(this).addClass('active');
})
.on('jcarouselpagination:inactive', 'li', function() {
$(this).removeClass('active');
})
.jcarouselPagination({
// Options go here
'item': function(page, carouselItems) {
return '<li><a href="#' + page + '">' + page + '</a></li>';
}
});
$('#tryit-start').on('click', function() {
$('.jcarousel').jcarousel('scroll', 1);
});
}); |
<filename>main.c
#ifdef _WIN32
#include <windows.h>
#endif
#include <stdio.h>
#include <stdlib.h>
#ifndef __APPLE__
#include <GL/gl.h>
#include <GL/glut.h>
#else
#include <OpenGL/gl.h>
#include <GLUT/glut.h>
#endif
#include <AR/gsub.h>
#include <AR/video.h>
#include <AR/param.h>
#include <AR/ar.h>
#include <AR/arMulti.h>
#include "globals.h"
#include "object.h"
#include "scene.h"
#ifdef _WIN32
char *vconf = "Data\\WDM_camera_flipV.xml";
#else
char *vconf = "";
#endif
int thresh = 100;
int count = 0;
int debugLevel = 0;
ARParam cparam;
int xsize, ysize;
char *config_name = "Data/road-markers/marker.dat";
char *cparam_name = "Data/camera_para.dat";
char *model_name = "Data/road-markers/buildings.dat";
static void init(void);
static void cleanup(void);
static void keyEvent( unsigned char key, int x, int y);
static void mainLoop(void);
int main(int argc, char **argv)
{
glutInit(&argc, argv);
init();
arVideoCapStart();
argMainLoop( NULL, keyEvent, mainLoop );
return (0);
}
static void keyEvent( unsigned char key, int x, int y)
{
/* quit if the ESC key is pressed */
if( key == 0x1b ) {
printf("*** %f (frame/sec)\n", (double)count/arUtilTimer());
cleanup();
exit(0);
}
if( key == 't' ) {
printf("*** %f (frame/sec)\n", (double)count/arUtilTimer());
printf("Enter new threshold value (current = %d): ", thresh);
scanf("%d",&thresh); while( getchar()!='\n' );
printf("\n");
count = 0;
}
/* turn on and off the debug mode with 'd' */
if( key == 'd' ) {
debugLevel = (debugLevel + 1) % 4;
printf("*** Debug level set to %d\n", debugLevel);
showBuildings = (debugLevel >= 2);
printf("*** showBuildings set to %d\n", showBuildings);
arDebug = debugLevel == 3;
printf("*** arDebug set to %d\n", arDebug);
if( arDebug == 0 ) {
glClearColor( 0.0, 0.0, 0.0, 0.0 );
glClear(GL_COLOR_BUFFER_BIT);
argSwapBuffers();
glClear(GL_COLOR_BUFFER_BIT);
argSwapBuffers();
}
count = 0;
}
}
/* main loop */
static void mainLoop(void)
{
static int ms_prev;
int ms;
float s_elapsed;
// Find out how long since mainLoop() last ran.
ms = glutGet(GLUT_ELAPSED_TIME);
s_elapsed = (float)(ms - ms_prev) * 0.001;
if (s_elapsed < 0.01f) return; // Don't update more often than 100 Hz.
ms_prev = ms;
ARUint8 *dataPtr;
ARMarkerInfo *marker_info;
int marker_num;
double err;
int i;
/* grab a vide frame */
if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) {
arUtilSleep(2);
return;
}
if( count == 0 ) arUtilTimerReset();
count++;
/* detect the markers in the video frame */
if( arDetectMarkerLite(dataPtr, thresh, &marker_info, &marker_num) < 0 ) {
cleanup();
exit(0);
}
argDrawMode2D();
if( !arDebug ) {
argDispImage( dataPtr, 0,0 );
} else {
argDispImage( dataPtr, 1, 1 );
if( arImageProcMode == AR_IMAGE_PROC_IN_HALF )
argDispHalfImage( arImage, 0, 0 );
else
argDispImage( arImage, 0, 0);
}
if (debugLevel > 0) {
glLineWidth( 1.5 );
for( i = 0; i < marker_num; i++ ) {
glColor3f( green[0], green[1], green[2] );
argDrawSquare( marker_info[i].vertex, 0, 0 );
}
}
/* check for known patterns */
for( i = 0; i < totalBuildingMarkers; i++ ) {
int k = -1;
int j;
for( j = 0; j < marker_num; j++ ) {
if( buildingMarkers[i].id == marker_info[j].id) {
/* you've found a pattern */
//printf("Found pattern: %d ",patt_id);
if( k == -1 ) k = j;
else /* make sure you have the best pattern (highest confidence factor) */
if( marker_info[k].cf < marker_info[j].cf ) k = j;
}
}
if( k == -1 ) {
buildingMarkers[i].visible = 0;
continue;
}
/* calculate the transform for each marker */
if( buildingMarkers[i].visible == 0 ) {
arGetTransMat(&marker_info[k],
buildingMarkers[i].marker_center, buildingMarkers[i].marker_width,
buildingMarkers[i].trans);
}
else {
arGetTransMatCont(&marker_info[k], buildingMarkers[i].trans,
buildingMarkers[i].marker_center, buildingMarkers[i].marker_width,
buildingMarkers[i].trans);
}
buildingMarkers[i].visible = 1;
}
arVideoCapNext();
argDrawMode3D();
argDraw3dCamera( 0, 0 );
glClearDepth( 1.0 );
glClear(GL_DEPTH_BUFFER_BIT);
if( (err=arMultiGetTransMat(marker_info, marker_num, multiMarkerConfig)) < 0 ) {
drawBuildingsThatHaveMarkers();
argSwapBuffers();
return;
}
// if (arDebug) {
// printf("err = %f\n", err);
// }
if(err > 100.0 ) {
drawBuildingsThatHaveMarkers();
argSwapBuffers();
return;
}
drawBuildingsThatHaveMarkers();
drawCarsAndStaticBuildings(ms);
argSwapBuffers();
}
static void init( void )
{
ARParam wparam;
/* open the video path */
if( arVideoOpen( vconf ) < 0 ) exit(0);
/* find the size of the window */
if( arVideoInqSize(&xsize, &ysize) < 0 ) exit(0);
printf("Image size (x,y) = (%d,%d)\n", xsize, ysize);
/* set the initial camera parameters */
if( arParamLoad(cparam_name, 1, &wparam) < 0 ) {
printf("Camera parameter load error !!\n");
exit(0);
}
arParamChangeSize( &wparam, xsize, ysize, &cparam );
arInitCparam( &cparam );
printf("*** Camera Parameter ***\n");
arParamDisp( &cparam );
if( (multiMarkerConfig = arMultiReadConfigFile(config_name)) == NULL ) {
printf("multiMarkerConfig data load error !!\n");
exit(0);
}
if( (buildingMarkers=read_ObjData(model_name, &totalBuildingMarkers)) == NULL ) {
printf("ObjData data load error !!\n");
exit(0);
}
printf("Objectfile num = %d\n", totalBuildingMarkers);
/* open the graphics window */
argInit( &cparam, 1.0, 0, 1, 0, 0 );
arFittingMode = AR_FITTING_TO_IDEAL;
arImageProcMode = AR_IMAGE_PROC_IN_HALF;
argDrawMode = AR_DRAW_BY_TEXTURE_MAPPING;
argTexmapMode = AR_DRAW_TEXTURE_HALF_IMAGE;
}
/* cleanup function called when program exits */
static void cleanup(void)
{
arVideoCapStop();
arVideoClose();
argCleanup();
}
|
<reponame>Chankin026/quick4j
package com.chankin.ssms.core.entity;
/*
*
* 封装业务层异常
*
* */
public class ServiceException extends UserException {
}
|
<reponame>dwoodard/TrustBuilder<gh_stars>0
import Vue from 'vue';
import {App as InertiaApp, plugin as InertiaPlugin} from '@inertiajs/inertia-vue';
import {InertiaProgress} from '@inertiajs/progress';
import {VueMaskDirective} from 'v-mask';
import PortalVue from 'portal-vue';
import VueMeta from 'vue-meta';
import vuetify from '@/plugins/vuetify';
import Layout from '@/layouts/Layout';
require('./bootstrap');
require('@/plugins/filters');
Vue.mixin({methods: {route}});
Vue.use(InertiaPlugin);
Vue.use(PortalVue);
Vue.use(VueMeta);
Vue.directive('mask', VueMaskDirective);
InertiaProgress.init();
const app = document.getElementById('app');
new Vue({
vuetify,
metaInfo: {
titleTemplate: (title) => (title ? ' - PROJECT' : 'PROJECT')
},
render: (h) => h(InertiaApp, {
props: {
initialPage: JSON.parse(app.dataset.page),
resolveComponent: (name) => import(`./pages/${name}`)
.then(({default: page}) => {
if (page.layout === undefined) {
page.layout = Layout;
}
return page;
})
}
})
}).$mount(app);
|
#!/bin/bash
export SRCDIR="tale-numtheory-jun03-crt/Pictures"
export DSTDIR="tale-numtheory-jun03-crt"
convert $SRCDIR/sequences.png -resize 800 $DSTDIR/sequences.png
convert $SRCDIR/crt-example.png -resize 400 $DSTDIR/crt-example.png
convert $SRCDIR/blankinship3.png -resize 440 $DSTDIR/blankinship3.png
convert $SRCDIR/torus-topology.png -resize 440 $DSTDIR/torus-topology.png
convert $SRCDIR/square-torus-topology.png -resize 350 $DSTDIR/square-torus-topology.png
|
#!/bin/bash
# Helper function to output error messages to STDERR, with red text
error() {
(set +x; tput -Tscreen bold
tput -Tscreen setaf 1
echo $*
tput -Tscreen sgr0) >&2
}
# Helper function that sifts through /etc/nginx/conf.d/, looking for lines that
# contain ssl_certificate_key, and try to find domain names in them. We accept
# a very restricted set of keys: Each key must map to a set of concrete domains
# (no wildcards) and each keyfile will be stored at the default location of
# /etc/letsencrypt/live/<primary_domain_name>/privkey.pem
parse_domains() {
# For each configuration file in /etc/nginx/conf.d/*.conf*
for conf_file in /etc/nginx/conf.d/*.conf*; do
sed -n -e 's&^\s*ssl_certificate_key\s*\/etc/letsencrypt/live/\(.*\)/privkey.pem;&\1&p' $conf_file | xargs echo
done
}
# Given a config file path, spit out all the ssl_certificate_key file paths
parse_keyfiles() {
sed -n -e 's&^\s*ssl_certificate_key\s*\(.*\);&\1&p' "$1"
}
# Given a config file path, return 0 if all keyfiles exist (or there are no
# keyfiles), return 1 otherwise
keyfiles_exist() {
for keyfile in $(parse_keyfiles $1); do
currentfile=${keyfile//$'\r'/}
if [ ! -f $currentfile ]; then
echo "Couldn't find keyfile $currentfile for $1"
return 1
fi
done
return 0
}
# Helper function that sifts through /etc/nginx/conf.d/, looking for configs
# that don't have their keyfiles yet, and disabling them through renaming
auto_enable_configs() {
for conf_file in /etc/nginx/conf.d/*.conf*; do
if keyfiles_exist $conf_file; then
if [ ${conf_file##*.} = nokey ]; then
echo "Found all the keyfiles for $conf_file, enabling..."
mv $conf_file ${conf_file%.*}
fi
else
if [ ${conf_file##*.} = conf ]; then
echo "Keyfile(s) missing for $conf_file, disabling..."
mv $conf_file $conf_file.nokey
fi
fi
done
}
# Helper function to ask certbot for the given domain(s). Must have defined the
# EMAIL environment variable, to register the proper support email address.
get_certificate() {
echo "Getting certificate for domain $1 on behalf of user $2"
PRODUCTION_URL='https://acme-v01.api.letsencrypt.org/directory'
STAGING_URL='https://acme-staging.api.letsencrypt.org/directory'
if [ "${IS_STAGING}" = "1" ]; then
letsencrypt_url=$STAGING_URL
echo "Staging ..."
else
letsencrypt_url=$PRODUCTION_URL
echo "Production ..."
fi
echo "running certbot ... $letsencrypt_url $1 $2"
certbot certonly --agree-tos --keep -n --text --email $2 \
-d $1 --http-01-port 1337 \
--standalone --preferred-challenges http-01 --debug
}
# Given a domain name, return true if a renewal is required (last renewal
# ran over a week ago or never happened yet), otherwise return false.
is_renewal_required() {
# If the file does not exist assume a renewal is required
last_renewal_file="/etc/letsencrypt/live/$1/privkey.pem"
[ ! -e "$last_renewal_file" ] && return;
# If the file exists, check if the last renewal was more than a week ago
one_week_sec=604800
now_sec=$(date -d now +%s)
last_renewal_sec=$(stat -c %Y "$last_renewal_file")
last_renewal_delta_sec=$(( ($now_sec - $last_renewal_sec) ))
is_finshed_week_sec=$(( ($one_week_sec - $last_renewal_delta_sec) ))
[ $is_finshed_week_sec -lt 0 ]
}
|
import {
Client as ErisClient,
Collection as ErisCollection,
User as ErisUser,
} from "eris";
import Dispatcher from "./Dispatcher.js";
import Registry from "./Registry.js";
import AddCommas from "../util/AddCommas.js";
import Embed from "../util/Embed.js";
import Color from "../util/Color.js";
class Client extends ErisClient {
constructor(token, options) {
super(token, options);
this.rawOptions = options;
this.owner = options.owner;
this.users = new ErisCollection(ErisUser, 1);
this.dispatcher = new Dispatcher(this);
this.registry = new Registry(this);
this.util = { AddCommas, Embed, Color };
}
}
export default Client;
|
package Adapter.adapter3;
public interface GreatCar {
void autoDrive();
void stop();
}
|
package com.qtimes.views.swipelistview;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.animation.ValueAnimator;
import android.content.Context;
import android.util.AttributeSet;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewConfiguration;
import android.view.animation.LinearInterpolator;
import android.widget.Button;
import android.widget.RelativeLayout;
import com.qtimes.views.R;
/**
* Created by liuj on 2016/7/4.
*/
public class SwipeRemoveContainer extends RelativeLayout {
private static final long ANIM_DURATION = 150;
private Button btnRemove;
private int downX;
private int downY;
private int x;
private int y;
private int touchSlop;
private boolean isMove;
private boolean isOpen;
private OnRemoveListener listener;
private Animator.AnimatorListener removeAnimatorListener = new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
super.onAnimationEnd(animation);
if (listener != null) {
listener.onRemoveClick();
}
}
};
public SwipeRemoveContainer(Context context, AttributeSet attrs) {
super(context, attrs);
touchSlop = ViewConfiguration.get(context).getScaledTouchSlop();
btnRemove = (Button) LayoutInflater.from(context).inflate(R.layout.btn_remove, this, false);
addView(btnRemove);
btnRemove.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
closeRemove();
}
});
}
@Override
public boolean dispatchTouchEvent(MotionEvent ev) {
Log.d("Swiperemove", isOpen + "");
if (isOpen) {
if (ev.getAction() == MotionEvent.ACTION_DOWN) {
if (!canClickRemoveBtn(ev.getX(), ev.getY())) {
close();
MotionEvent cancelEvent = MotionEvent.obtain(ev);
cancelEvent.setAction(MotionEvent.ACTION_CANCEL);
super.dispatchTouchEvent(cancelEvent);
return false;
}
}
}
return super.dispatchTouchEvent(ev);
}
@Override
public boolean onTouchEvent(MotionEvent event) {
int currentX = (int) event.getX();
int currentY = (int) event.getY();
Log.d("swipe", "onTouchEvent()------------action:" + event.getAction() + "," + currentX + "---------" + x);
if (event.getAction() == MotionEvent.ACTION_DOWN) {
autoScroll();
downX = currentX;
downY = currentY;
} else if (event.getAction() == MotionEvent.ACTION_MOVE) {
if (isMove) {
scrollBy(x - currentX);
} else {
if (judgeCanMove(currentX - downX)) {
return true;
}
}
} else if (event.getAction() == MotionEvent.ACTION_UP) {
isMove = false;
getParent().requestDisallowInterceptTouchEvent(false);
autoScroll();
}
x = currentX;
y = currentY;
return true;
}
@Override
public boolean onInterceptTouchEvent(MotionEvent ev) {
int currentX = (int) ev.getX();
int currentY = (int) ev.getY();
if (ev.getAction() == MotionEvent.ACTION_DOWN) {
autoScroll();
downX = currentX;
downY = currentY;
} else if (ev.getAction() == MotionEvent.ACTION_MOVE) {
if (isMove) {
Log.d("swipe", "onInterceptTouchEvent()1");
scrollBy(x - currentX);
} else {
if (judgeCanMove(currentX - downX)) {
return true;
}
}
} else if (ev.getAction() == MotionEvent.ACTION_UP) {
isMove = false;
getParent().requestDisallowInterceptTouchEvent(false);
autoScroll();
}
x = currentX;
y = currentY;
return super.onInterceptTouchEvent(ev);
}
private boolean canMove(int distance) {
return Math.abs(distance) > touchSlop;
}
private boolean judgeCanMove(int dx) {
if (canMove(dx)) {
if (dx < 0) {
isMove = true;
} else if (isOpen) {
isMove = true;
}
}
getParent().requestDisallowInterceptTouchEvent(isMove);
return isMove;
}
private void scrollBy(int dx) {
scrollBy(dx, 0);
}
@Override
public void scrollTo(int x, int y) {
if (x > getMaxScrollWidth()) {
x = getMaxScrollWidth();
} else if (x < 0) {
x = 0;
}
super.scrollTo(x, y);
}
private void autoScroll() {
isOpen = false;
int scrollX = getScrollX();
if (scrollX != 0) {
if (scrollX > getMaxScrollWidth() / 2) {
isOpen = true;
Log.d("swiperemove", "isopen = true");
animScroll(getMaxScrollWidth(), null);
} else {
isOpen = false;
animScroll(0, null);
}
}
}
/**
* 动画滚动
*
* @param to
*/
private void animScroll(int to, Animator.AnimatorListener animatorListener) {
ValueAnimator valueAnimator = ValueAnimator.ofInt(getScrollX(), to);
valueAnimator.setDuration(getDuration(Math.abs(to - getScrollX())));
valueAnimator.setInterpolator(new LinearInterpolator());
valueAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator animation) {
int value = (int) animation.getAnimatedValue();
scrollTo(value, 0);
}
});
if (animatorListener != null) {
valueAnimator.addListener(animatorListener);
}
valueAnimator.start();
}
/**
* 根据距离计算时间
*
* @param distance
* @return
*/
private long getDuration(int distance) {
return (long) (ANIM_DURATION * ((distance * 1.0f) / getMaxScrollWidth()));
}
/**
* 获取最大的滚动距离
*
* @return
*/
private int getMaxScrollWidth() {
return btnRemove.getWidth();
}
public void close() {
Log.d("Swiperemove", "close");
isOpen = false;
isMove = false;
if (getScrollX() > 0) {
animScroll(0, null);
}
}
/**
* 关闭并触发移除回调
*/
public void closeRemove() {
isOpen = false;
isMove = false;
animScroll(0, removeAnimatorListener);
}
/**
* 重置状态
*/
public void reset() {
isOpen = false;
isMove = false;
scrollTo(0, 0);
}
/**
* 判断是否被删除按钮处理
*
* @param x
* @param y
* @return
*/
public boolean canClickRemoveBtn(float x, float y) {
boolean canRemove = false;
if (isOpen) {
canRemove = (x > getWidth() - btnRemove.getWidth()) && x < getWidth();
}
return canRemove;
}
public boolean isMove() {
return isMove;
}
public boolean isOpen() {
return isOpen;
}
public boolean handleEvent() {
return isMove() || isOpen() || getScrollX() != 0;
}
public void setListener(OnRemoveListener listener) {
this.listener = listener;
}
public interface OnRemoveListener {
void onRemoveClick();
}
}
|
#!/usr/bin/env bash
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##########################################################################
# INSTRUCTIONS:
#
# Run this script from the oppia root folder:
# bash scripts/run_e2e_tests.sh
#
# Optional arguments:
# --browserstack Run the tests on browserstack using the
# protractor-browserstack.conf.js file.
# --skip-install=true/false If true, skips installing dependencies. The
# default value is false.
# --sharding=true/false Disables/Enables parallelization of protractor tests.
# --sharding-instances=# Sets the number of parallel browsers to open while
# sharding.
# --prod_env Run the tests in prod mode. Static resources are served from
# build directory and use cache slugs.
# --community_dashboard_enabled Run the test after enabling the community
# dashboard page.
# Sharding must be disabled (either by passing in false to --sharding or 1 to
# --sharding-instances) if running any tests in isolation (fit or fdescribe).
# --suite=suite_name Performs test for different suites, here suites are the
# name of the test files present in core/tests/protractor_desktop/ and
# core/test/protractor/ dirs. e.g. for the file
# core/tests/protractor/accessibility.js use --suite=accessibility.
# For performing a full test, no argument is required.
#
# The root folder MUST be named 'oppia'.
#
# Note: You can replace 'it' with 'fit' or 'describe' with 'fdescribe' to run a
# single test or test suite.
function cleanup {
# Send a kill signal to the dev server and Selenium server. The awk command
# gets just the process ID from the grepped line.
kill `ps aux | grep "[Dd]ev_appserver.py --host=0.0.0.0 --port=9001" | awk '{print $2}'` || true
kill `ps aux | grep node_modules/webdriver-manager/selenium | awk '{print $2}'` || true
if [ -d "../protractor-screenshots" ]; then
echo ""
echo " Note: If ADD_SCREENSHOT_REPORTER is set to true in"
echo " core/tests/protractor.conf.js, you can view screenshots"
echo " of the failed tests in ../protractor-screenshots/"
echo ""
fi
echo Done!
}
if [ -z "$BASH_VERSION" ]
then
echo ""
echo " Please run me using bash: "
echo ""
echo " bash $0"
echo ""
return 1
fi
set -e
python -m scripts.install_third_party_libs
python -m scripts.setup
python -m scripts.setup_gae
if [ "$TRAVIS" == 'true' ]; then
python -m scripts.install_chrome_on_travis
fi
if ( nc -vz localhost 8181 ); then
echo ""
echo " There is already a server running on localhost:8181."
echo " Please terminate it before running the end-to-end tests."
echo " Exiting."
echo ""
exit 1
fi
if ( nc -vz localhost 9001 ); then
echo ""
echo " There is a already a server running on localhost:9001."
echo " Please terminate it before running the end-to-end tests."
echo " Exiting."
echo ""
exit 1
fi
export OPPIA_DIR=`pwd`
# Set COMMON_DIR to the absolute path of the directory above OPPIA_DIR. This
# is necessary becaue COMMON_DIR (or subsequent variables which refer to it)
# may use it in a situation where relative paths won't work as expected (such
# as $PYTHONPATH).
export COMMON_DIR=$(cd $OPPIA_DIR/..; pwd)
export TOOLS_DIR=$COMMON_DIR/oppia_tools
export NODE_PATH=$TOOLS_DIR/node-10.15.3
export PATH=$NODE_PATH/bin:$PATH
# Forces the cleanup function to run on exit.
# Developers: note that at the end of this script, the cleanup() function at
# the top of the file is run.
trap cleanup EXIT
# Argument passed to feconf.py to help choose production templates folder.
DEV_MODE=true
RUN_ON_BROWSERSTACK=False
# Currently, the community dashboard page is disabled.
community_dashboard_status_variable="COMMUNITY_DASHBOARD_ENABLED = False"
for arg in "$@"; do
# Used to emulate running Oppia in a production environment.
if [ "$arg" == "--prod_env" ]; then
DEV_MODE=false
echo " Generating files for production mode..."
fi
# Used to enable the community dashboard page.
if [ "$arg" == "--community_dashboard_enabled" ]; then
community_dashboard_status_variable="COMMUNITY_DASHBOARD_ENABLED = True"
fi
# Used to run the e2e tests on browserstack.
if [ "$arg" == "--browserstack" ]; then
RUN_ON_BROWSERSTACK=True
echo " Running the tests on browserstack..."
fi
done
# Update the community dashboard status in feconf.py file.
sed -i.bak -e s/"COMMUNITY_DASHBOARD_ENABLED = .*"/"$community_dashboard_status_variable"/ feconf.py
if [[ "$DEV_MODE" == "true" ]]; then
constants_env_variable="\"DEV_MODE\": true"
sed -i.bak -e s/"\"DEV_MODE\": .*"/"$constants_env_variable"/ assets/constants.ts
python -m scripts.build
APP_YAML_FILEPATH="app_dev.yaml"
node_modules/webpack/bin/webpack.js --config webpack.dev.config.ts
else
constants_env_variable="\"DEV_MODE\": false"
sed -i.bak -e s/"\"DEV_MODE\": .*"/"$constants_env_variable"/ assets/constants.ts
python -m scripts.build --prod_env
APP_YAML_FILEPATH="app.yaml"
fi
# Delete the modified feconf.py file(-i.bak)
rm assets/constants.ts.bak
# Start a selenium server using chromedriver 2.41.
# The 'detach' option continues the flow once the server is up and runnning.
# The 'quiet' option prints only the necessary information about the server start-up
# process.
node_modules/.bin/webdriver-manager update --versions.chrome 2.41
node_modules/.bin/webdriver-manager start --versions.chrome 2.41 --detach --quiet
# Start a selenium process. The program sends thousands of lines of useless
# info logs to stderr so we discard them.
# TODO(jacob): Find a webdriver or selenium argument that controls log level.
(node_modules/.bin/webdriver-manager start 2>/dev/null)&
# Start a demo server.
(python ../oppia_tools/google_appengine_1.9.67/google_appengine/dev_appserver.py --host=0.0.0.0 --port=9001 --clear_datastore=yes --dev_appserver_log_level=critical --log_level=critical --skip_sdk_update_check=true $APP_YAML_FILEPATH)&
# Wait for the servers to come up.
while ! nc -vz localhost 4444; do sleep 1; done
while ! nc -vz localhost 9001; do sleep 1; done
# Delete outdated screenshots
if [ -d "../protractor-screenshots" ]; then
rm -r ../protractor-screenshots
fi
# Parse additional command line arguments that may be passed to protractor.
# Credit: http://stackoverflow.com/questions/192249
# Passing different suites and sharding parameters for tests.
SUITE="full"
SHARDING=true
SHARD_INSTANCES=3
for j in "$@"; do
# Match each space-separated argument passed to the shell file to a separate
# case label, based on a pattern. E.g. Match to -suite=*, -sharding=*, where the
# asterisk refers to any characters following the equals sign, other than
# whitespace.
case $j in
--suite=*)
# Extract the value right of the equal sign by substringing the $i variable
# at the equal sign.
# http://tldp.org/LDP/abs/html/string-manipulation.html
SUITE="${j#*=}"
# Shifts the argument parameters over by one. E.g. $2 becomes $1, etc.
shift
;;
--sharding=*)
SHARDING="${j#*=}"
shift
;;
--sharding-instances=*)
SHARD_INSTANCES="${j#*=}"
shift
;;
--prod_env*)
shift
;;
--browserstack*)
shift
;;
--community_dashboard_enabled*)
shift
;;
*)
echo "Error: Unknown command line option: $j"
;;
esac
done
# Run the end-to-end tests. The conditional is used to run protractor without
# any sharding parameters if it is disabled. This helps with isolated tests.
# Isolated tests do not work properly unless no sharding parameters are passed
# in at all.
# TODO(bhenning): Figure out if this is a bug with protractor.
if [ "$RUN_ON_BROWSERSTACK" == "False" ]; then
if [ "$SHARDING" = "false" ] || [ "$SHARD_INSTANCES" = "1" ]; then
node_modules/protractor/bin/protractor core/tests/protractor.conf.js --suite "$SUITE" --params.devMode="$DEV_MODE"
else
node_modules/protractor/bin/protractor core/tests/protractor.conf.js --capabilities.shardTestFiles="$SHARDING" --capabilities.maxInstances=$SHARD_INSTANCES --suite "$SUITE" --params.devMode="$DEV_MODE"
fi
else
if [ "$SHARDING" = "false" ] || [ "$SHARD_INSTANCES" = "1" ]; then
node_modules/protractor/bin/protractor core/tests/protractor-browserstack.conf.js --suite "$SUITE" --params.devMode="$DEV_MODE"
else
node_modules/protractor/bin/protractor core/tests/protractor-browserstack.conf.js --capabilities.shardTestFiles="$SHARDING" --capabilities.maxInstances=$SHARD_INSTANCES --suite "$SUITE" --params.devMode="$DEV_MODE"
fi
fi
|
import nltk
from nltk.corpus import reuters
from sklearn.naive_bayes import MultinomialNB
# Preparing data
train_docs_ids = reuters.fileids(categories='train')
train_docs = [reuters.raw(doc_id) for doc_id in train_docs_ids]
train_labels = [reuters.categories(doc_id) for doc_id in train_docs_ids]
# Training the Naive Bayes classifier
nb_classifier = MultinomialNB().fit(train_docs, train_labels)
# Classifying emails
email_docs = ["This is an email about sports.",
"This is an email about politics."]
email_labels = nb_classifier.predict(email_docs)
for email_doc, email_label in zip(email_docs, email_labels):
print("\'{}\' is labelled as \'{}\'.".format(email_doc, email_label)) |
<filename>lib/generators/templates/db/migrate/create_page_products.rb
class CreatePageProducts < ActiveRecord::Migration
def self.up
create_table :spree_page_products do |t|
t.references :page, :null => false, :default => 0
t.references :product, :null => false, :default => 0
t.integer :position, :default => 999
t.timestamps
end
end
def self.down
drop_table :spree_page_products
end
end
|
#!/usr/bin/env sh
docker build . -t capstone
|
import numpy as np
import sys
def load_data(fname):
f = open(fname, 'r')
ctr = 0
y_str = ''
for line in f:
line = line.strip().split(';')
if ctr == 0:
x_str = line
else:
y_str = line
ctr+=1
f.close()
X = []
Y = []
for item in x_str:
temp = [float(x) for x in item.split(',')]
X.append(temp)
if len(y_str)>0:
for item in y_str:
temp = int(item)
Y.append(temp)
X = np.array(X)
Y = np.array(Y)
return X, Y
def generate_training_data_binary(num):
if num == 1:
data = np.zeros((10,3))
for i in range(5):
data[i] = [i-5, 0, 1]
data[i+5] = [i+1, 0, -1]
elif num == 2:
data = np.zeros((10,3))
for i in range(5):
data[i] = [0, i-5, 1]
data[i+5] = [0, i+1, -1]
elif num == 3:
data = np.zeros((10,3))
data[0] = [3, 2, 1]
data[1] = [6, 2, 1]
data[2] = [3, 6, 1]
data[3] = [4, 4, 1]
data[4] = [5, 4, 1]
data[5] = [-1, -2, -1]
data[6] = [-2, -4, -1]
data[7] = [-3, -3, -1]
data[8] = [-4, -2, -1]
data[9] = [-4, -4, -1]
elif num == 4:
data = np.zeros((10,3))
data[0] = [-1, 1, 1]
data[1] = [-2, 2, 1]
data[2] = [-3, 5, 1]
data[3] = [-3, -1, 1]
data[4] = [-2, 1, 1]
data[5] = [3, -6, -1]
data[6] = [0, -2, -1]
data[7] = [-1, -7, -1]
data[8] = [1, -10, -1]
data[9] = [0, -8, -1]
else:
print("Incorrect num", num, "provided to generate_training_data_binary.")
sys.exit()
return data
def generate_training_data_multi(num):
if num == 1:
data = np.zeros((20,3))
for i in range(5):
data[i] = [i-5, 0, 1]
data[i+5] = [i+1, 0, 2]
data[i+10] = [0, i-5, 3]
data[i+15] = [0, i+1, 4]
Y = 4
elif num == 2:
data = np.zeros((15,3))
data[0] = [-5, -5, 1]
data[1] = [-3, -2, 1]
data[2] = [-5, -3, 1]
data[3] = [-5, -4, 1]
data[4] = [-2, -9, 1]
data[5] = [0, 6, 2]
data[6] = [-1, 3, 2]
data[7] = [-2, 1, 2]
data[8] = [1, 7, 2]
data[9] = [1, 5, 2]
data[10] = [6, 3, 3]
data[11] = [9, 2, 3]
data[12] = [10, 4, 3]
data[13] = [8, 1, 3]
data[14] = [9, 0, 3]
Y = 3
else:
print("Incorrect num", num, "provided to generate_training_data_binary.")
sys.exit()
return [data, Y]
|
#!/usr/bin/env bash
#
#############################################################################
# Brandon Bernie
# bmbernie@gmail.com
#
# Post install configuration for OS X
#############################################################################
# Ask for the administrator password upfront
sudo -v
# Keep-alive: update existing `sudo` time stamp until `.osx` has finished
while true; do sudo -n true; sleep 60; kill -0 "$$" || exit; done 2>/dev/null &
###############################################################################
# General UI/UX #
###############################################################################
# Set computer name (as done via System Preferences → Sharing)
#sudo scutil --set ComputerName "Orca"
#sudo scutil --set HostName "Orca"
#sudo scutil --set LocalHostName "Orca"
#sudo defaults write /Library/Preferences/SystemConfiguration/com.apple.smb.server NetBIOSName -string "Orca"
###############################################################################
# System #
###############################################################################
xcode-select --install
echo -e "Setting up system level configurations ..."
# Disable the sound effects on boot
sudo nvram SystemAudioVolume=" "
###############################################################################
# General #
###############################################################################
echo -e "Setting up general configurations ..."
# Menu bar: disable transparency
defaults write NSGlobalDomain AppleEnableMenuBarTransparency -bool false
# Set sidebar icon size to medium
defaults write NSGlobalDomain NSTableViewDefaultSizeMode -int 2
# Always show scrollbars
defaults write NSGlobalDomain AppleShowScrollBars -string "Always"
# Disable opening and closing window animations
defaults write NSGlobalDomain NSAutomaticWindowAnimationsEnabled -bool false
# Increase window resize speed for Cocoa applications
defaults write NSGlobalDomain NSWindowResizeTime -float 0.001
# Expand save panel by default
defaults write NSGlobalDomain NSNavPanelExpandedStateForSaveMode -bool true
# Expand print panel by default
defaults write NSGlobalDomain PMPrintingExpandedStateForPrint -bool true
# Automatically quit printer app once the print jobs complete
defaults write com.apple.print.PrintingPrefs "Quit When Finished" -bool true
# Disable automatic termination of inactive apps
defaults write NSGlobalDomain NSDisableAutomaticTermination -bool true
# Set Help Viewer windows to non-floating mode
defaults write com.apple.helpviewer DevMode -bool true
# Reveal IP address, hostname, OS version, etc. when clicking the clock
# in the login window
sudo defaults write /Library/Preferences/com.apple.loginwindow AdminHostInfo HostName
# Check for software updates daily, not just once per week
defaults write com.apple.SoftwareUpdate ScheduleFrequency -int 1
# Increase sound quality for Bluetooth headphones/headsets
defaults write com.apple.BluetoothAudioAgent "Apple Bitpool Min (editable)" -int 40
# Enable full keyboard access for all controls
# (e.g. enable Tab in modal dialogs)
defaults write NSGlobalDomain AppleKeyboardUIMode -int 3
# Set a blazingly fast keyboard repeat rate
defaults write NSGlobalDomain KeyRepeat -int 0
# Automatically illuminate built-in MacBook keyboard in low light
defaults write com.apple.BezelServices kDim -bool true
# Turn off keyboard illumination when computer is not used for 5 minutes
defaults write com.apple.BezelServices kDimTime -int 300
###############################################################################
# Screen #
###############################################################################
echo -e "Settin up display configurations ..."
# Require password immediately after sleep or screen saver begins
defaults write com.apple.screensaver askForPassword -int 1
defaults write com.apple.screensaver askForPasswordDelay -int 0
# Save screenshots to the desktop
defaults write com.apple.screencapture location -string "${HOME}/Desktop"
# Save screenshots in PNG format (other options: BMP, GIF, JPG, PDF, TIFF)
defaults write com.apple.screencapture type -string "png"
# Disable shadow in screenshots
defaults write com.apple.screencapture disable-shadow -bool true
###############################################################################
# Finder #
###############################################################################
echo -e "Setting up the finder configurations ..."
# Finder: disable window animations and Get Info animations
defaults write com.apple.finder DisableAllAnimations -bool true
# Show icons for hard drives, servers, and removable media on the desktop
defaults write com.apple.finder ShowExternalHardDrivesOnDesktop -bool true
defaults write com.apple.finder ShowHardDrivesOnDesktop -bool true
defaults write com.apple.finder ShowMountedServersOnDesktop -bool true
defaults write com.apple.finder ShowRemovableMediaOnDesktop -bool true
# Finder: show hidden files by default
defaults write com.apple.finder AppleShowAllFiles -bool true
# Finder: show all filename extensions
defaults write NSGlobalDomain AppleShowAllExtensions -bool true
# Finder: show status bar
defaults write com.apple.finder ShowStatusBar -bool true
# Finder: show path bar
defaults write com.apple.finder ShowPathbar -bool true
# Finder: allow text selection in Quick Look
defaults write com.apple.finder QLEnableTextSelection -bool true
# Display full POSIX path as Finder window title
defaults write com.apple.finder _FXShowPosixPathInTitle -bool true
# When performing a search, search the current folder by default
defaults write com.apple.finder FXDefaultSearchScope -string "SCcf"
# Avoid creating .DS_Store files on network volumes
defaults write com.apple.desktopservices DSDontWriteNetworkStores -bool true
# Automatically open a new Finder window when a volume is mounted
defaults write com.apple.frameworks.diskimages auto-open-ro-root -bool true
defaults write com.apple.frameworks.diskimages auto-open-rw-root -bool true
defaults write com.apple.finder OpenWindowForNewRemovableDisk -bool true
# Show item info near icons on the desktop and in other icon views
/usr/libexec/PlistBuddy -c "Set :DesktopViewSettings:IconViewSettings:showItemInfo true" ~/Library/Preferences/com.apple.finder.plist
/usr/libexec/PlistBuddy -c "Set :FK_StandardViewSettings:IconViewSettings:showItemInfo true" ~/Library/Preferences/com.apple.finder.plist
/usr/libexec/PlistBuddy -c "Set :StandardViewSettings:IconViewSettings:showItemInfo true" ~/Library/Preferences/com.apple.finder.plist
# Show item info to the right of the icons on the desktop
/usr/libexec/PlistBuddy -c "Set DesktopViewSettings:IconViewSettings:labelOnBottom false" ~/Library/Preferences/com.apple.finder.plist
# Enable snap-to-grid for icons on the desktop and in other icon views
/usr/libexec/PlistBuddy -c "Set :DesktopViewSettings:IconViewSettings:arrangeBy grid" ~/Library/Preferences/com.apple.finder.plist
/usr/libexec/PlistBuddy -c "Set :FK_StandardViewSettings:IconViewSettings:arrangeBy grid" ~/Library/Preferences/com.apple.finder.plist
/usr/libexec/PlistBuddy -c "Set :StandardViewSettings:IconViewSettings:arrangeBy grid" ~/Library/Preferences/com.apple.finder.plist
# Increase grid spacing for icons on the desktop and in other icon views
/usr/libexec/PlistBuddy -c "Set :DesktopViewSettings:IconViewSettings:gridSpacing 100" ~/Library/Preferences/com.apple.finder.plist
/usr/libexec/PlistBuddy -c "Set :FK_StandardViewSettings:IconViewSettings:gridSpacing 100" ~/Library/Preferences/com.apple.finder.plist
/usr/libexec/PlistBuddy -c "Set :StandardViewSettings:IconViewSettings:gridSpacing 100" ~/Library/Preferences/com.apple.finder.plist
# Increase the size of icons on the desktop and in other icon views
/usr/libexec/PlistBuddy -c "Set :DesktopViewSettings:IconViewSettings:iconSize 80" ~/Library/Preferences/com.apple.finder.plist
/usr/libexec/PlistBuddy -c "Set :FK_StandardViewSettings:IconViewSettings:iconSize 80" ~/Library/Preferences/com.apple.finder.plist
/usr/libexec/PlistBuddy -c "Set :StandardViewSettings:IconViewSettings:iconSize 80" ~/Library/Preferences/com.apple.finder.plist
# Use list view in all Finder windows by default
# Four-letter codes for the other view modes: `icnv`, `clmv`, `Flwv`
defaults write com.apple.finder FXPreferredViewStyle -string "Nlsv"
# Disable the warning before emptying the Trash
defaults write com.apple.finder WarnOnEmptyTrash -bool false
# Empty Trash securely by default
defaults write com.apple.finder EmptyTrashSecurely -bool true
# Show the ~/Library folder
chflags nohidden ~/Library
# Remove Dropbox’s green checkmark icons in Finder
file=/Applications/Dropbox.app/Contents/Resources/emblem-dropbox-uptodate.icns
[ -e "${file}" ] && mv -f "${file}" "${file}.bak"
###############################################################################
# Dock #
###############################################################################
echo -e "Setting up the Dock ..."
# Enable highlight hover effect for the grid view of a stack (Dock)
defaults write com.apple.dock mouse-over-hilite-stack -bool true
# Set the icon size of Dock items to 36 pixels
defaults write com.apple.dock tilesize -int 36
# Minimize windows into their application’s icon
defaults write com.apple.dock minimize-to-application -bool true
# Enable spring loading for all Dock items
defaults write com.apple.dock enable-spring-load-actions-on-all-items -bool true
#Show indicator lights for open applications in the Dock
defaults write com.apple.dock show-process-indicators -bool true
# Don’t animate opening applications from the Dock
defaults write com.apple.dock launchanim -bool false
# Speed up Mission Control animations
defaults write com.apple.dock expose-animation-duration -float 0.1
# Make Dock icons of hidden applications translucent
defaults write com.apple.dock showhidden -bool true
# Reset Launchpad
find ~/Library/Application\ Support/Dock -name "*.db" -maxdepth 1 -delete
# Use `sudo mdutil -i off "/Volumes/foo"` to stop indexing any volume.
sudo defaults write /.Spotlight-V100/VolumeConfiguration Exclusions -array "/Volumes"
# Change indexing order and disable some file types
defaults write com.apple.spotlight orderedItems -array \
'{"enabled" = 1;"name" = "APPLICATIONS";}' \
'{"enabled" = 1;"name" = "SYSTEM_PREFS";}' \
'{"enabled" = 1;"name" = "DIRECTORIES";}' \
'{"enabled" = 1;"name" = "PDF";}' \
'{"enabled" = 1;"name" = "FONTS";}' \
'{"enabled" = 0;"name" = "DOCUMENTS";}' \
'{"enabled" = 0;"name" = "MESSAGES";}' \
'{"enabled" = 0;"name" = "CONTACT";}' \
'{"enabled" = 0;"name" = "EVENT_TODO";}' \
'{"enabled" = 0;"name" = "IMAGES";}' \
'{"enabled" = 0;"name" = "BOOKMARKS";}' \
'{"enabled" = 0;"name" = "MUSIC";}' \
'{"enabled" = 0;"name" = "MOVIES";}' \
'{"enabled" = 0;"name" = "PRESENTATIONS";}' \
'{"enabled" = 0;"name" = "SPREADSHEETS";}' \
'{"enabled" = 0;"name" = "SOURCE";}'
# Load new settings before rebuilding the index
killall mds > /dev/null 2>&1
# Make sure indexing is enabled for the main volume
sudo mdutil -i on / > /dev/null
# Rebuild the index from scratch
sudo mdutil -E / > /dev/null
###############################################################################
# Terminal #
###############################################################################
echo -e "Set the terminal ..."
# Only use UTF-8 in Terminal.app
defaults write com.apple.terminal StringEncodings -array 4
# Enable the debug menu in Address Book
defaults write com.apple.addressbook ABShowDebugMenu -bool true
# Enable Dashboard dev mode (allows keeping widgets on the desktop)
defaults write com.apple.dashboard devmode -bool true
# Enable the debug menu in Disk Utility
defaults write com.apple.DiskUtility DUDebugMenuEnabled -bool true
defaults write com.apple.DiskUtility advanced-image-options -bool true
###############################################################################
# Mac App Store #
###############################################################################
echo -e "Add developer tools and Debug Menus to the App Store ..."
# Enable the WebKit Developer Tools in the Mac App Store
defaults write com.apple.appstore WebKitDeveloperExtras -bool true
# Enable Debug Menu in the Mac App Store
defaults write com.apple.appstore ShowDebugMenu -bool true
#END
|
<filename>src/index.js
import SVGColorCleaner from "./SVGColorCleaner.js";
export default { SVGColorCleaner };
|
#!/bin/bash
# Set an alias
# alias ts='bash /opt/jamesm0rr1s/Tilix-Kali-Setup/set-tilix-quake-size.sh'
# Alias name
# ts == tilix size
# Toggle between 30% and 70% quake window height when no parameters are passed in
# ts
# Change to a specific window height when one parameter is passed in. The parameter must be an integer between 0-100. Only one parameter is allowed
# ts 50
# Get the window height
windowHeight=$(gsettings get com.gexperts.Tilix.Settings quake-height-percent)
# Get the number of arguments
argumentCount=$#
# Check if a window height was not provided, or the first argument is not an integer, or if the first argument is greater than 100
if [ $argumentCount -ne 1 ] || ! [[ "$1" =~ ^[0-9]+$ ]] || [ $1 -gt 100 ]; then
# Check if the window height is 30
if [ "$windowHeight" = "30" ]; then
# Set the window height to 70
gsettings set com.gexperts.Tilix.Settings quake-height-percent 70
# The window height is not 30
else
# Set the window height to 30
gsettings set com.gexperts.Tilix.Settings quake-height-percent 30
fi
# A window height was provided
else
# Set the window height
gsettings set com.gexperts.Tilix.Settings quake-height-percent $1
fi |
wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh
bash Miniconda3-latest-Linux-x86_64.sh
conda create insight
conda activate insight
conda install -c anaconda zeromq
|
def concatTwoBinStrings(str1, str2):
str = ""
for i in (str1 + str2):
if i == '0':
str += '0'
else:
str += '1'
return str
str1 = "10101"
str2 = "101"
print(concatTwoBinStrings(str1, str2)) |
import re
def process_output_files():
summary_report = []
for c in range(1, 51):
out_file = f'simplewcoptmulti_{c}.out'
err_file = f'simplewcoptmulti_{c}.err'
word_count = extract_word_count(out_file)
errors = extract_errors(err_file)
summary_report.append({
'job_number': c,
'word_count': word_count,
'errors': errors
})
return summary_report
def extract_word_count(file_path):
try:
with open(file_path, 'r') as file:
content = file.read()
match = re.search(r'Word count: (\d+)', content)
if match:
return int(match.group(1))
else:
return None
except FileNotFoundError:
return None
def extract_errors(file_path):
try:
with open(file_path, 'r') as file:
return file.read()
except FileNotFoundError:
return None
summary_report = process_output_files()
for report in summary_report:
print(f"Job {report['job_number']}:")
print(f"Word count: {report['word_count']}")
print(f"Errors: {report['errors']}\n") |
"""
## Questions : EASY
### 1812. [Determine Color of a Chessboard Square](https://leetcode.com/problems/determine-color-of-a-chessboard-square/)
You are given coordinates, a string that represents the coordinates of a square of the chessboard. Below is a
chessboard for your reference.
Return true if the square is white, and false if the square is black.
The coordinate will always represent a valid chessboard square. The coordinate will always have the letter first, and
the number second.
Example 1:
Input: coordinates = "a1"
Output: false
Explanation: From the chessboard above, the square with coordinates "a1" is black, so return false.
Example 2:
Input: coordinates = "h3"
Output: true
Explanation: From the chessboard above, the square with coordinates "h3" is white, so return true.
Example 3:
Input: coordinates = "c7"
Output: false
Constraints:
coordinates.length == 2
'a' <= coordinates[0] <= 'h'
'1' <= coordinates[1] <= '8'
"""
# Solutions
class Solution:
def squareIsWhite(self, coordinates: str) -> bool:
white = ['b', 'd', 'f', 'h']
if coordinates[0] in white:
if int(coordinates[1]) & 1:
return True
else:
return False
else:
if int(coordinates[1]) & 1:
return False
else:
return True
# Runtime: 28 ms, faster than 86.26% of Python3 online submissions
# Memory Usage: 14.2 MB, less than 41.67% of Python3 online submissions
class Solution:
"""
Time Complexity: O(1)
Space Complexity: O(1)
"""
def squareIsWhite(self, coordinates: str) -> bool:
"""
To get the index at constant time we can use dictionary as well, but since we only have 8 chars,
it doesn't matter much
chars = {'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5, 'f': 6, 'g': 7, 'h': 8}
return bool((chars.get(coordinates[0]) + int(coordinates[1])) & 1)
"""
chars = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h']
coordinate_sum = chars.index(coordinates[0]) + int(coordinates[1])
return not bool(coordinate_sum & 1) # instead of using not we can add 1 to the coordinate_sum
# Runtime: 16 ms, faster than 99.91% of Python3 online submissions
# Memory Usage: 14.3 MB, less than 41.67% of Python3 online submissions
class Solution:
"""
Time Complexity: O(1)
Space Complexity: O(1)
"""
def squareIsWhite(self, coordinates: str) -> bool:
"""
ord('a') = 97
ord('1') = 49
"""
return bool((ord(coordinates[0]) + ord(coordinates[1])) & 1)
# Runtime: 32 ms, faster than 64.33% of Python3 online submissions
# Memory Usage: 14.2 MB, less than 70.66% of Python3 online submissions
|
<filename>src/components/TagInput/index.js
import component from './TagInput'
export default component
|
import { post, get } from '@/utils/http';
export async function getUserDataListI() {
return get('/api/source/list');
}
export async function getDataDetailI(params) {
return get('/api/source/some', params);
}
export async function setStatusI(params) {
return post('/api/app/set-status', params, {
alertSuccess: true,
alertError: true,
});
}
export async function deleteDataI(params) {
return post('/api/source/delete', params, {
alertSuccess: true,
alertError: true,
});
}
export async function createDataI(params) {
return post('/api/source/create', params, {
alertSuccess: true,
alertError: true,
});
}
export async function updateDataI(params) {
return post('/api/source/update', params, {
alertSuccess: true,
alertError: true,
});
}
export async function getCategoryWithComponent(params) {
return get('/api/app/category/list_com', params);
}
export async function queryDataBySql(params) {
return post('/api/data/sql', params, {
alertSuccess: true,
alertError: true,
});
}
|
#!/bin/bash -eux
# should output one of 'redhat' 'centos' 'oraclelinux'
distro="$(rpm -qf --queryformat '%{NAME}' /etc/redhat-release | cut -f 1 -d '-')"
if [ "$distro" != 'redhat' ]; then
yum -y -q clean all;
fi
[ -f /etc/NetworkManager/NetworkManager.conf ] && sed -i '/^plugins=ifcfg-rh/a dns=none' /etc/NetworkManager/NetworkManager.conf
sed -i '/PEER/d' /etc/sysconfig/network-scripts/ifcfg-e*
sed -i '/HWADDR/d' /etc/sysconfig/network-scripts/ifcfg-e*
sed -i '/UUID/d' /etc/sysconfig/network-scripts/ifcfg-e*
# dhcp must the default for network
# sed -i 's/BOOTPROTO=dhcp/BOOTPROTO=none/g' /etc/sysconfig/network-scripts/ifcfg-*
# Clean up network interface persistence
rm -f /etc/udev/rules.d/70-persistent-net.rules
mkdir -p /etc/udev/rules.d/70-persistent-net.rules
rm -f /lib/udev/rules.d/75-persistent-net-generator.rules
rm -rf /dev/.udev/
for ndev in /etc/sysconfig/network-scripts/ifcfg-*; do
if [ "$ndev" != "/etc/sysconfig/network-scripts/ifcfg-lo" ]; then
sed -i '/^HWADDR/d' "$ndev"
sed -i '/^UUID/d' "$ndev"
fi
done
# delete any logs that have built up during the install
find /var/log/ -name "*.log" -exec rm -f {} \;
rm -fr /tmp/*
|
import pathlib
import sys
import typing
import shutil
def build_all(src: pathlib.Path, dest: pathlib.Path, log: typing.TextIO = sys.stdout) -> None:
if not src.exists():
print(f"Source directory '{src}' does not exist.", file=log)
return
try:
dest.mkdir(parents=True, exist_ok=True)
except PermissionError:
print(f"Permission denied: Unable to create directory '{dest}'", file=log)
return
except FileExistsError:
print(f"Directory '{dest}' already exists.", file=log)
return
except Exception as e:
print(f"Error creating directory '{dest}': {e}", file=log)
return
for item in src.iterdir():
if item.is_dir():
build_all(item, dest / item.name, log)
else:
try:
shutil.copy2(item, dest / item.name)
print(f"Copied '{item}' to '{dest / item.name}'", file=log)
except PermissionError:
print(f"Permission denied: Unable to copy '{item}' to '{dest / item.name}'", file=log)
except FileNotFoundError:
print(f"File '{item}' not found.", file=log)
except Exception as e:
print(f"Error copying '{item}' to '{dest / item.name}': {e}", file=log) |
import React, {useEffect, useReducer} from 'react';
import 'antd/es/tabs/style';
import 'antd/es/empty/style';
import styles from './index.module.less';
import {Empty, Tabs} from 'antd';
import Basic from './mods/basic';
import Config from './mods/config';
import {Graph} from '@antv/x6';
const {TabPane} = Tabs;
interface IProps {
flowChart: Graph;
}
const SettingBar: React.FC<IProps> = props => {
const {flowChart} = props;
const forceUpdate = useReducer(n => n + 1, 0)[1];
useEffect(() => {
flowChart.on('settingBar:forceUpdate', forceUpdate);
return () => {
flowChart.off('settingBar:forceUpdate');
};
}, []);
const nodes = flowChart.getSelectedCells().filter(v => v.shape !== 'edge');
if(nodes.length === 1) {
return (
<div className={styles.container}>
<Tabs tabBarGutter={0} defaultActiveKey={'basic'} tabBarStyle={{display: 'flex', flex: 1, justifyContent: 'center', alignItems: 'center'}}>
<TabPane tab={'基础信息'} key={'basic'}>
<Basic selectedCell={nodes[0]}/>
</TabPane>
<TabPane tab={'投放配置'} key={'config'}>
<Config selectedCell={nodes[0]}/>
</TabPane>
</Tabs>
</div>
);
} else {
return (
<div className={`${styles.container} ${styles.center}`}>
<Empty description={'请选择一个节点'} image={Empty.PRESENTED_IMAGE_SIMPLE}/>
</div>
);
}
};
export default SettingBar;
|
<gh_stars>0
package org.hzero.sso.azure.service;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.security.core.userdetails.AuthenticationUserDetailsService;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.util.Assert;
import org.hzero.core.user.UserType;
import org.hzero.sso.azure.token.AzureAuthenticationToken;
import org.hzero.sso.core.domain.entity.SsoUser;
import org.hzero.sso.core.security.service.SsoUserAccountService;
import org.hzero.sso.core.security.service.SsoUserDetailsBuilder;
import org.hzero.sso.core.exception.LoginExceptions;
/**
*
* @author <EMAIL>
*/
public class AzureUserDetailsService implements AuthenticationUserDetailsService<AzureAuthenticationToken> {
private static final Logger LOGGER = LoggerFactory.getLogger(AzureUserDetailsService.class);
private SsoUserAccountService userAccountService;
private SsoUserDetailsBuilder userDetailsBuilder;
public AzureUserDetailsService(SsoUserAccountService userAccountService,
SsoUserDetailsBuilder userDetailsBuilder) {
this.userAccountService = userAccountService;
this.userDetailsBuilder = userDetailsBuilder;
}
@Override
public UserDetails loadUserDetails(AzureAuthenticationToken token) throws UsernameNotFoundException {
String username = token.getName();
Long tenantId = Long.valueOf(String.valueOf(token.getCredentials()));
LOGGER.debug("load auth2 user, username={}, tenantId={},token={}", username, tenantId, token);
SsoUser user = userAccountService.findLoginUser(username, UserType.ofDefault());
Assert.notNull(user, "User is Not Exists");
List<Long> organizationIdList = userAccountService.findUserLegalOrganization(user.getId());
if (!organizationIdList.contains(tenantId)){
throw new UsernameNotFoundException(LoginExceptions.USERNAME_NOT_FOUND.value());
}
return userDetailsBuilder.buildUserDetails(user);
}
}
|
#!/bin/bash
#
# travis-build.sh - A script to build and/or release SciJava-based projects.
#
dir="$(dirname "$0")"
success=0
checkSuccess() {
# Log non-zero exit code.
test $1 -eq 0 || echo "==> FAILED: EXIT CODE $1" 1>&2
# Record the first non-zero exit code.
test $success -eq 0 && success=$1
}
# Build Maven projects.
if [ -f pom.xml ]
then
echo travis_fold:start:scijava-maven
echo "= Maven build ="
echo
echo "== Configuring Maven =="
# NB: Suppress "Downloading/Downloaded" messages.
# See: https://stackoverflow.com/a/35653426/1207769
export MAVEN_OPTS="$MAVEN_OPTS -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn"
# Populate the settings.xml configuration.
mkdir -p "$HOME/.m2"
settingsFile="$HOME/.m2/settings.xml"
customSettings=.travis/settings.xml
if [ -f "$customSettings" ]
then
cp "$customSettings" "$settingsFile"
else
cat >"$settingsFile" <<EOL
<settings>
<servers>
<server>
<id>scijava.releases</id>
<username>travis</username>
<password>\${env.MAVEN_PASS}</password>
</server>
<server>
<id>scijava.snapshots</id>
<username>travis</username>
<password>\${env.MAVEN_PASS}</password>
</server>
<server>
<id>sonatype-nexus-releases</id>
<username>scijava-ci</username>
<password>\${env.OSSRH_PASS}</password>
</server>
</servers>
EOL
# NB: Use maven.scijava.org instead of Central if defined in repositories.
# This hopefully avoids intermittent "ReasonPhrase:Forbidden" errors
# when the Travis build pings Maven Central; see travis-ci/travis-ci#6593.
grep -A 2 '<repository>' pom.xml | grep -q 'maven.scijava.org' &&
cat >>"$settingsFile" <<EOL
<mirrors>
<mirror>
<id>scijava-mirror</id>
<name>SciJava mirror</name>
<url>https://maven.scijava.org/content/groups/public/</url>
<mirrorOf>central</mirrorOf>
</mirror>
</mirrors>
EOL
cat >>"$settingsFile" <<EOL
<profiles>
<profile>
<id>gpg</id>
<activation>
<file>
<exists>\${env.HOME}/.gnupg</exists>
</file>
</activation>
<properties>
<gpg.keyname>\${env.GPG_KEY_NAME}</gpg.keyname>
<gpg.passphrase>\${env.GPG_PASSPHRASE}</gpg.passphrase>
</properties>
</profile>
</profiles>
</settings>
EOL
fi
# Determine whether deploying will be possible.
deployOK=
ciURL=$(mvn -q -Denforcer.skip=true -Dexec.executable=echo -Dexec.args='${project.ciManagement.url}' --non-recursive validate exec:exec 2>&1)
if [ $? -ne 0 ]
then
echo "No deploy -- could not extract ciManagement URL"
echo "Output of failed attempt follows:"
echo "$ciURL"
else
ciRepo=${ciURL##*/}
ciPrefix=${ciURL%/*}
ciOrg=${ciPrefix##*/}
if [ "$TRAVIS_SECURE_ENV_VARS" != true ]
then
echo "No deploy -- secure environment variables not available"
elif [ "$TRAVIS_PULL_REQUEST" != false ]
then
echo "No deploy -- pull request detected"
elif [ "$TRAVIS_REPO_SLUG" != "$ciOrg/$ciRepo" ]
then
echo "No deploy -- repository fork: $TRAVIS_REPO_SLUG != $ciOrg/$ciRepo"
# TODO: Detect travis-ci.org versus travis-ci.com?
else
echo "All checks passed for artifact deployment"
deployOK=1
fi
fi
# Install GPG on OSX/macOS
if [ "$TRAVIS_OS_NAME" = osx ]
then
HOMEBREW_NO_AUTO_UPDATE=1 brew install gnupg2
fi
# Import the GPG signing key.
keyFile=.travis/signingkey.asc
key=$1
iv=$2
if [ "$key" -a "$iv" -a -f "$keyFile.enc" ]
then
# NB: Key and iv values were given as arguments.
echo
echo "== Decrypting GPG keypair =="
openssl aes-256-cbc -K "$key" -iv "$iv" -in "$keyFile.enc" -out "$keyFile" -d
checkSuccess $?
fi
if [ "$deployOK" -a -f "$keyFile" ]
then
echo
echo "== Importing GPG keypair =="
gpg --batch --fast-import "$keyFile"
checkSuccess $?
fi
# Run the build.
BUILD_ARGS='-B -Djdk.tls.client.protocols="TLSv1,TLSv1.1,TLSv1.2"'
if [ "$deployOK" -a "$TRAVIS_BRANCH" = master ]
then
echo
echo "== Building and deploying master SNAPSHOT =="
mvn -Pdeploy-to-scijava $BUILD_ARGS deploy
checkSuccess $?
elif [ "$deployOK" -a -f release.properties ]
then
echo
echo "== Cutting and deploying release version =="
mvn -B $BUILD_ARGS release:perform
checkSuccess $?
echo "== Invalidating SciJava Maven repository cache =="
curl -fsLO https://raw.githubusercontent.com/scijava/scijava-scripts/master/maven-helper.sh &&
gav=$(sh maven-helper.sh gav-from-pom pom.xml) &&
ga=${gav%:*} &&
echo "--> Artifact to invalidate = $ga" &&
echo "machine maven.scijava.org" > "$HOME/.netrc" &&
echo " login travis" >> "$HOME/.netrc" &&
echo " password $MAVEN_PASS" >> "$HOME/.netrc" &&
sh maven-helper.sh invalidate-cache "$ga"
checkSuccess $?
else
echo
echo "== Building the artifact locally only =="
mvn $BUILD_ARGS install javadoc:javadoc
checkSuccess $?
fi
echo travis_fold:end:scijava-maven
fi
# Configure conda environment, if one is needed.
if [ -f environment.yml ]
then
echo travis_fold:start:scijava-conda
echo "= Conda setup ="
condaDir=$HOME/miniconda
condaSh=$condaDir/etc/profile.d/conda.sh
if [ ! -f "$condaSh" ]; then
echo
echo "== Installing conda =="
if [ "$TRAVIS_PYTHON_VERSION" = "2.7" ]; then
wget https://repo.continuum.io/miniconda/Miniconda2-latest-Linux-x86_64.sh -O miniconda.sh
else
wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh
fi
rm -rf "$condaDir"
bash miniconda.sh -b -p "$condaDir"
checkSuccess $?
fi
echo
echo "== Updating conda =="
. "$condaSh" &&
conda config --set always_yes yes --set changeps1 no &&
conda update -q conda &&
conda info -a
checkSuccess $?
echo
echo "== Configuring environment =="
condaEnv=travis-scijava
test -d "$condaDir/envs/$condaEnv" && condaAction=update || condaAction=create
conda env "$condaAction" -n "$condaEnv" -f environment.yml &&
conda activate "$condaEnv"
checkSuccess $?
echo travis_fold:end:scijava-conda
fi
# Execute Jupyter notebooks.
if which jupyter >/dev/null 2>/dev/null
then
echo travis_fold:start:scijava-jupyter
echo "= Jupyter notebooks ="
# NB: This part is fiddly. We want to loop over files even with spaces,
# so we use the "find ... -print0 | while read $'\0' ..." idiom.
# However, that runs the piped expression in a subshell, which means
# that any updates to the success variable will not persist outside
# the loop. So we suppress all stdout inside the loop, echoing only
# the final value of success upon completion, and then capture the
# echoed value back into the parent shell's success variable.
success=$(find . -name '*.ipynb' -print0 | {
while read -d $'\0' nbf
do
echo 1>&2
echo "== $nbf ==" 1>&2
jupyter nbconvert --execute --stdout "$nbf" >/dev/null
checkSuccess $?
done
echo $success
})
echo travis_fold:end:scijava-jupyter
fi
exit $success
|
const path = require('path')
const os = require('os')
export let getConfig = () => {
let cfg = {
port: 1917,
routes: {
testConnection: 'test-connection',
patchesFor: 'patches-for',
createPatchFile: 'create-patch', // TODO: just POST on routes.patches/:id
openFileNative: 'open-file',
setPatchOptions: 'set-options',
patches: 'patches',
openStorage: 'open-storage'
},
accomodatingUrlMatching: true,
recentUrlsHistoryLength: 500,
// storageDir: path.join(process.cwd(), '/patches/'),
// optionsJsonPath: path.join(process.cwd(), '/patches/options.json'),
excessLengthIndicator: '_mane-patch',
specialCommentTokenMatchList: 'patch-urls',
specialCommentTokenWhenToRun: 'when-to-run',
fsCacheDir: '.cache',
fsCacheMatchListsFilename: 'fs-cache-matchlists.json',
patchDefaultOptions: {
on: true,
whenToRun: 'dom'
},
// Patch filenames
maxFilenameLength: 60,
shortIdLength: 6,
// Enums
patchJsonSchema: {
UserJavascriptAndCSS: 1,
Mane: 2
},
assetTypes: {
Js: 1,
Css: 2
},
verbosity: 1
}
cfg.storageDir = path.join(os.homedir(), '/.mane-patches')
cfg.optionsJsonPath = path.join(cfg.storageDir, 'options.json')
cfg.fsCacheFilePath = path.join(cfg.storageDir, cfg.fsCacheDir, cfg.fsCacheMatchListsFilename)
return cfg
}
|
#!/bin/bash
# shellcheck disable=SC1091
set -o errexit
set -o nounset
set -o pipefail
# set -o xtrace # Uncomment this line for debugging purpose
# Load libraries
. /opt/bitnami/scripts/libos.sh
. /opt/bitnami/scripts/libfs.sh
. /opt/bitnami/scripts/libnginx.sh
# Load NGINX environment variables
. /opt/bitnami/scripts/nginx-env.sh
# Ensure NGINX environment variables settings are valid
nginx_validate
# Ensure NGINX is stopped when this script ends
trap "nginx_stop" EXIT
# Ensure NGINX daemon user exists when running as 'root'
am_i_root && ensure_user_exists "$NGINX_DAEMON_USER" --group "$NGINX_DAEMON_GROUP"
# Run init scripts
nginx_custom_init_scripts
# Fix logging issue when running as root
! am_i_root || chmod o+w "$(readlink /dev/stdout)" "$(readlink /dev/stderr)"
# Initialize NGINX
nginx_initialize
|
package com.cisco.clmsbackend.controllers;
import java.util.List;
import javax.validation.Valid;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.CrossOrigin;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import com.cisco.clmsbackend.model.Admin;
import com.cisco.clmsbackend.model.LeaveStatus;
import com.cisco.clmsbackend.model.UserLeave;
import com.cisco.clmsbackend.service.AdminService;
import com.cisco.clmsbackend.service.UserLeaveService;
import com.fasterxml.jackson.core.JsonProcessingException;
@CrossOrigin(origins = "*", maxAge = 3600)
@RestController
@RequestMapping("/api/admin")
public class AdminController {
@Autowired
private UserLeaveService userLeaveService;
@Autowired
private AdminService adminService;
@GetMapping("/{username}")
public ResponseEntity<List<UserLeave>> getUserLeave(@PathVariable final String username)
throws JsonProcessingException {
System.out.println("USERLEAVE/{username} HITTTTTTTTTTTTTT");
List<UserLeave> ul = userLeaveService.findUserLeaveByUsername(username);
if (ul.isEmpty()) {
return ResponseEntity.notFound().build();
}
return ResponseEntity.ok().body(ul);
}
@GetMapping("/all")
public ResponseEntity<List<UserLeave>> getStatus(
@RequestParam(value = "status", required = false) LeaveStatus status) {
System.out.println("USERLEAVE/all param HITTTTTTTTTTTTTT");
List<UserLeave> ul;
if (status == null) {
ul = userLeaveService.findAll();
} else {
ul = userLeaveService.findByStatus(status);
}
if (ul.isEmpty()) {
return ResponseEntity.notFound().build();
}
return ResponseEntity.ok().body(ul);
}
@PutMapping("/{id}")
public ResponseEntity<UserLeave> updateUserLeaveAdmin(@PathVariable final String id,
@Valid @RequestBody UserLeave userLeave) throws JsonProcessingException {
UserLeave ul = userLeaveService.findUserLeaveById(Long.parseLong(id));
if (ul == null)
return ResponseEntity.notFound().build();
LeaveStatus incomingStatus = userLeave.getStatus();
String incomingRemark = userLeave.getRemark();
if (incomingStatus == null || incomingRemark == null)
return ResponseEntity.badRequest().build();
if (ul.getStatus() == LeaveStatus.APPROVED || ul.getStatus() == LeaveStatus.REJECTED)
return ResponseEntity.status(HttpStatus.NOT_ACCEPTABLE).build();
ul.setStatus(incomingStatus);
ul.setRemark(incomingRemark);
userLeaveService.saveUserLeave(ul);
return ResponseEntity.ok().body(ul);
}
// ------------------------------ Admin related API --------------------------------------- //
@GetMapping("/admins")
public ResponseEntity<List<Admin>> getAdmins() {
System.out.println("admins get HITTTTTTTTTTTTTT");
return ResponseEntity.ok().body(adminService.findAll());
}
@PostMapping("/admins")
public ResponseEntity<Admin> addAdmin(@Valid @RequestBody Admin admin) {
System.out.println("admins add HITTTTTTTTTTTTTTTTTTTT");
if (adminService.findAdminByUsername(admin.getUsername()) == null) {
return ResponseEntity.ok().body(adminService.saveAdmin(admin));
}
return ResponseEntity.status(HttpStatus.CONFLICT).build();
}
@DeleteMapping("/admins/{username}")
public ResponseEntity<?> deleteAdmin(@PathVariable final String username) {
System.out.println("admins delete HITTTTTTTTTTTTTTTT");
if (adminService.findAdminByUsername(username) != null) {
System.out.println("delete----------------------->>found null--> " + username);
adminService.deleteAdminByUsername(username);
return ResponseEntity.ok().build();
}
return ResponseEntity.status(HttpStatus.NOT_ACCEPTABLE).build();
}
}
|
#!/bin/bash
dieharder -d 209 -g 403 -S 3128473974
|
import React from 'react';
import moment from 'moment';
export const loadingScreen = (height = 50, textAlign = 'center', style = {}) => (
<div style={{height: `${height}px`}} className={`text-${textAlign}`}>
<i className='fa fa-spin fa-spinner fa-2x' style={Object.assign({color: 'orangered'}, style)} />
</div>
);
export const msToMinuteSeconds = ms => {
const minutes = Math.floor(ms / 60000);
const seconds = ((ms % 60000) / 1000).toFixed(0);
return parseInt(seconds, 10) === 60 ? minutes + 1 + ':00' : minutes + ':' + (seconds < 10 ? '0' : '') + seconds;
};
export function cleanDatesFromAPI(dates) {
let possibles = [];
for (let i = 0; i < dates.length; i++) {
possibles[i] = moment(parseInt(dates[i], 10));
}
return possibles;
}
|
//Step 1: Define user's profile and dietary needs
//This should include age, gender, height, weight, activity levels, and dietary requirements.
//Step 2: Analyze user's profile and generate a list of recommended foods
//This list should include foods that meet the user's dietary needs, preferences, and caloric requirements.
//Step 3: Monitor user's progress and adjust food recommendations accordingly
//Recommendations should be modified as the user's profile and dietary needs change over time. |
<gh_stars>1-10
package br.com.agateownz.foodsocial.modules.shared;
import org.springframework.core.io.ClassPathResource;
import org.springframework.mock.web.MockMultipartFile;
public class MultipartFileMockBuilders {
public static MockMultipartFile mockMultipartFile(String name) throws Exception {
return new MockMultipartFile(name, new ClassPathResource("data/image.png").getInputStream());
}
}
|
<filename>app/components/CheckBox/index.js
import React from 'react';
import PropTypes from 'prop-types';
import Wrapper from './style';
export const CheckBox = ({ className, id, label, checked, change }) => {
return (
<Wrapper className={className}>
<input
key={id}
type="checkbox"
id={id}
checked={checked}
name={id}
onChange={() => {}}
/>
<label htmlFor="active" onClick={change}>
{label}
</label>
</Wrapper>
);
};
CheckBox.propTypes = {
className: PropTypes.string,
id: PropTypes.string,
label: PropTypes.string,
checked: PropTypes.bool,
change: PropTypes.func,
};
export default CheckBox;
|
<reponame>frunz005/RapydScript
function dir(item) {
var arr;
arr = [];
for (var i in item) {
arr.push(i);
}
return arr;
}
function ՐՏ_Iterable(iterable) {
var tmp;
if (iterable.constructor === [].constructor || iterable.constructor === "".constructor || (tmp = Array.prototype.slice.call(iterable)).length) {
return tmp || iterable;
}
if (Set && iterable.constructor === Set) {
return Array.from(iterable);
}
return Object.keys(iterable);
}
function range(start, stop, step) {
var length, idx, range;
if (arguments.length <= 1) {
stop = start || 0;
start = 0;
}
step = arguments[2] || 1;
length = Math.max(Math.ceil((stop - start) / step), 0);
idx = 0;
range = new Array(length);
while (idx < length) {
range[idx++] = start;
start += step;
}
return range;
}
function ՐՏ_eq(a, b) {
var ՐՏitr1, ՐՏidx1;
var i;
if (a === b) {
return true;
}
if (a === void 0 || b === void 0 || a === null || b === null) {
return false;
}
if (a.constructor !== b.constructor) {
return false;
}
if (Array.isArray(a)) {
if (a.length !== b.length) {
return false;
}
for (i = 0; i < a.length; i++) {
if (!ՐՏ_eq(a[i], b[i])) {
return false;
}
}
return true;
} else if (a.constructor === Object) {
if (Object.keys(a).length !== Object.keys(b).length) {
return false;
}
ՐՏitr1 = ՐՏ_Iterable(a);
for (ՐՏidx1 = 0; ՐՏidx1 < ՐՏitr1.length; ՐՏidx1++) {
i = ՐՏitr1[ՐՏidx1];
if (!ՐՏ_eq(a[i], b[i])) {
return false;
}
}
return true;
} else if (Set && a.constructor === Set || Map && a.constructor === Map) {
if (a.size !== b.size) {
return false;
}
for (i of a) {
if (!b.has(i)) {
return false;
}
}
return true;
} else if (a.constructor === Date) {
return a.getTime() === b.getTime();
} else if (typeof a.__eq__ === "function") {
return a.__eq__(b);
}
return false;
}
var fs, path, rapydscript;
fs = require("fs");
path = require("path");
rapydscript = require("../lib/rapydscript");
function read_whole_file(filename, cb) {
var chunks;
if (!filename) {
chunks = [];
process.stdin.setEncoding("utf-8");
process.stdin.on("data", function(chunk) {
chunks.push(chunk);
}).on("end", function() {
cb(null, chunks.join(""));
});
process.openStdin();
} else {
fs.readFile(filename, "utf-8", cb);
}
}
module.exports = function(start_time, argv, base_path, src_path, lib_path) {
var files, metrics, num_of_files, dropDecorators, dropImports, parseOpts;
files = argv.files.slice(0);
metrics = {};
num_of_files = files.length || 1;
dropDecorators = argv.drop_decorators.split(/\s*,\s*/);
dropImports = argv.drop_imports.split(/\s*,\s*/);
parseOpts = {
filename: "?",
readfile: fs.readFileSync,
auto_bind: argv.auto_bind,
es6: argv.ecmascript6,
libdir: path.join(src_path, "lib"),
import_dirs: rapydscript.get_import_dirs(argv.import_path),
dropDecorators: dropDecorators,
dropImports: dropImports,
dropDocstrings: argv.drop_docstrings,
beautify: argv.beautify,
private_scope: !argv.bare,
omit_baselib: argv.omit_baselib,
strict_names: argv.strict_names,
indent_min: parseInt(argv.indent_min)
};
if (!argv.omit_baselib) {
parseOpts.baselib = rapydscript.parse_baselib(src_path, parseOpts.es6);
}
if (argv.comments) {
if (/^\//.test(argv.comments)) {
parseOpts.comments = new Function("return(" + argv.comments + ")")();
} else if (argv.comments === "all") {
parseOpts.comments = true;
} else {
parseOpts.comments = function(node, comment) {
var text, type;
text = comment.value;
type = comment.type;
if (type === "comment:multiline") {
return /@preserve|@license|@cc_on/i.test(text);
}
};
}
}
function write_output(output) {
if (argv.output) {
fs.writeFileSync(argv.output, output, "utf8");
} else if (argv.execute) {
if (argv.beautify) {
console.log("\n------------ Compilation -------------\n");
console.log(output);
console.log("\n------------ Execution -------------\n");
}
require("vm").runInNewContext(output, {
"console": console,
"process": process,
"require": require,
"root": typeof window === "object" ? window : global
}, {
"filename": files[0]
});
} else {
console.log(output);
}
}
function time_it(name, cont) {
var t1, ret, spent;
t1 = new Date().getTime();
ret = cont();
spent = new Date().getTime() - t1;
if (metrics[name]) {
metrics[name] += spent;
} else {
metrics[name] = spent;
}
return ret;
}
function compile_single_file(err, code) {
var output, i;
if (err) {
console.error("ERROR: can't read file: " + files[0]);
process.exit(1);
}
parseOpts.filename = files[0];
parseOpts.basedir = path.dirname(files[0]);
if (argv.stats) {
time_it("parse", function() {
var toplevel;
toplevel = rapydscript.parse(code, parseOpts);
});
time_it("generate", function() {
output = rapydscript.output(toplevel, parseOpts);
});
} else {
output = rapydscript.compile(code, parseOpts);
}
write_output(output);
files = files.slice(1);
if (files.length) {
setImmediate(read_whole_file, files[0], compile_single_file);
return;
}
if (argv.stats) {
console.error(rapydscript.string_template("Timing information (compressed {count} files):", {
count: num_of_files
}));
for (i in metrics) {
if (metrics.hasOwnProperty(i)) {
console.error(rapydscript.string_template("- {name}: {time}s", {
name: i,
time: (metrics[i] / 1e3).toFixed(3)
}));
}
}
}
}
if (files.filter(function(el) {
return el === "-";
}).length > 1) {
console.error("ERROR: Can read a single file from STDIN (two or more dashes specified)");
process.exit(1);
}
setImmediate(read_whole_file, files[0], compile_single_file);
}; |
<filename>frontend/app/containers/SignPage/index.js<gh_stars>0
/*
* Sign Page
*/
import React from 'react';
import {
updateValidateForm,
updateDataToSign,
updateSelectedRow,
} from "./actions";
import {createStructuredSelector} from "reselect";
import {makeSelectETHAccount} from "../App/selectors";
import {
makeSelectValidateForm,
makeSelectDataToSign,
makeSelectSignature,
makeSelectSelectedRow,
makeSelectRows,
} from "./selectors";
import {connect} from "react-redux";
import reducer from './reducer';
import {withStyles} from "@material-ui/core/styles";
import styles from "./styles";
import withWidth from "@material-ui/core/withWidth";
import {compose} from "redux";
import TextField from "@material-ui/core/TextField";
import Snackbar from "@material-ui/core/Snackbar";
import IconButton from "@material-ui/core/IconButton";
import CloseIcon from "@material-ui/icons/Close";
import injectReducer from 'utils/injectReducer';
import Table from "@material-ui/core/Table";
import TableBody from "@material-ui/core/TableBody";
import WalletRow from "../../components/new-wallet-row";
import Paper from "@material-ui/core/Paper";
import ApproveRow from "../../components/approve-row";
import {approveActionThunk, getRequestsThunk} from "./middlewares";
/* eslint-disable react/prefer-stateless-function */
export class SignPage extends React.PureComponent {
constructor(props) {
super(props);
this.handleSnackbarClose = this.handleSnackbarClose.bind(this);
this.clickApproveRow = this.clickApproveRow.bind(this);
}
handleSnackbarClose(event, reason) {
if (reason === 'clickaway') {
return;
}
const {formValidate, onUpdateValidateForm} = this.props;
if (formValidate.snackBar) {
onUpdateValidateForm(null);
}
}
clickApproveRow(row) {
const { onUpdateSelectedRow, selectedRow } = this.props;
if (selectedRow && selectedRow.withdrawId === row.withdrawId) {
onUpdateSelectedRow(null);
} else {
onUpdateSelectedRow(row);
}
}
componentDidMount() {
const {onUpdateRows} = this.props;
onUpdateRows();
}
render() {
const {classes, rows, formValidate, selectedRow, onUpdateRequest} = this.props;
let snackBarDisplay = classes.snackBarContent;
if (formValidate && formValidate.snackBar && formValidate.snackBar.isSuccess) {
snackBarDisplay = classes.snackBarContentSuccess;
}
return (
<div className={classes.root}>
{formValidate && formValidate.snackBar && (formValidate.snackBar.isError || formValidate.snackBar.isSuccess)&&
<Snackbar
className={classes.snackBar}
ContentProps={{
className: snackBarDisplay,
}}
open={formValidate.snackBar.isError || formValidate.snackBar.isSuccess}
autoHideDuration={2000}
message={formValidate.snackBar.message}
onClose={this.handleSnackbarClose}
anchorOrigin={{
vertical: 'top',
horizontal: 'right',
}}
action={
<>
<IconButton size="small" aria-label="close" color="inherit" onClick={this.handleSnackbarClose}>
<CloseIcon fontSize="small"/>
</IconButton>
</>
}
/>
}
<Paper className={classes.paper}>
<div className={classes.tableWrapper}>
{!rows || rows.length === 0
?
<div style={{display: 'flex', justifyContent: 'center', alignItems: 'center'}}>
<h4>No Records!</h4>
</div>
:
<Table className={classes.tableInsideStyle}>
<TableBody>
{rows.map(row => (
<ApproveRow
key={"Approve_" + row.withdrawId}
row={row}
onUpdateSelectedRow={this.clickApproveRow}
selectedRow={selectedRow}
onUpdateRequest={onUpdateRequest}
/>
))}
</TableBody>
</Table>
}
</div>
</Paper>
</div>
);
}
}
export function mapDispatchToProps(dispatch) {
return {
onUpdateValidateForm: (validateForm) => dispatch(updateValidateForm(validateForm)),
onUpdateDataToSign: (dateToSign, ethAccount) => dispatch(updateDataToSign(dateToSign, ethAccount)),
onUpdateSelectedRow: (selectedRow) => dispatch(updateSelectedRow(selectedRow)),
onUpdateRequest: (selectedRow, type) => dispatch(approveActionThunk(selectedRow, type)),
onUpdateRows: () => dispatch(getRequestsThunk()),
}
}
const mapStateToProps = createStructuredSelector({
ethAccount: makeSelectETHAccount(),
formValidate: makeSelectValidateForm(),
signature: makeSelectSignature(),
dataToSign: makeSelectDataToSign(),
selectedRow: makeSelectSelectedRow(),
rows: makeSelectRows(),
});
const withConnect = connect(
mapStateToProps,
mapDispatchToProps,
);
const withReducer = injectReducer({ key: 'sign', reducer });
const withStylesWalletPage = withStyles(styles);
const withWidthWalletPage = withWidth();
export default compose(
withReducer,
withConnect,
withStylesWalletPage,
withWidthWalletPage,
)(SignPage); |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.