text stringlengths 1 1.05M |
|---|
import 'regenerator-runtime/runtime'
import React from 'react'
import { Route, Switch, BrowserRouter } from 'react-router-dom'
import { Provider } from "react-redux"
import { createStore, applyMiddleware } from "redux"
import createSagaMiddleware from 'redux-saga'
import rootReducer from "store/reducers"
import rootSaga from "store/sagas"
import Signin from "components/Register/Signin"
import Signup from "components/Register/Signup"
import Signout from "components/Register/Signout"
import MajorView from "components/Layout/MajorView"
import './App.css'
const sagaMiddleware = createSagaMiddleware()
let store = createStore(rootReducer,
applyMiddleware(sagaMiddleware)
)
sagaMiddleware.run(rootSaga)
function App() {
return (
<div className="app-wrapper" id="root_app">
<Provider store={store}>
<BrowserRouter>
<Switch>
<Route exact path="/signin/" component={Signin} />
<Route exact path="/signup/" component={Signup} />
<Route exact path="/signout/" component={Signout} />
<Route component={MajorView} />
</Switch>
</BrowserRouter>
</Provider>
</div>
);
}
export default App;
|
import React, { useCallback } from 'react';
import { useRoute, useNavigation, RouteProp } from '@react-navigation/native';
import { Formik } from 'formik';
import IAchievement from 'modules/selectedGame/domain/entities/IAchievement';
import useRequestAchievementUnlockController from 'modules/selectedGame/infra/controllers/useRequestAchievementUnlockController';
import RequestAchievementUnlockSchema from 'modules/selectedGame/view/validation/RequestAchievementUnlockSchema';
import { Input } from 'shared/view/components';
import { useSessionContext } from 'shared/view/contexts';
import { useToastContext } from 'shared/view/contexts';
import { Container, Title, Form, Footer } from './styles';
const initialValues = {
information: '',
};
type RequestAchievementUnlockParams = RouteProp<
{
requestAchievementUnlock: {
achievement: IAchievement;
};
},
'requestAchievementUnlock'
>;
const RequestAchievementUnlock: React.FC = () => {
const {
params: { achievement },
} = useRoute<RequestAchievementUnlockParams>();
const { goBack } = useNavigation();
const toast = useToastContext();
const session = useSessionContext();
const {
loading,
requestAchievement,
} = useRequestAchievementUnlockController();
const onSubmit = useCallback(
async values => {
const result = await requestAchievement({
id: achievement.id,
information: values.information,
playerId: session.playerId,
});
if (result !== null) {
toast.showSuccess('Conquista requisitada!');
return goBack();
}
},
[goBack, toast, achievement.id, session.playerId, requestAchievement],
);
return (
<Container>
<Title>Requisitar Conquista</Title>
<Title>{achievement.name}</Title>
<Formik
initialValues={initialValues}
validationSchema={RequestAchievementUnlockSchema}
onSubmit={onSubmit}
>
<Form>
<Input
name="information"
multiline={true}
placeholder="Como desbloqueou a conquista?"
fullWidth
/>
<Footer.Container>
<Footer.Back outline onPress={goBack}>
Voltar
</Footer.Back>
<Footer.Button loading={loading}>Confirmar</Footer.Button>
</Footer.Container>
</Form>
</Formik>
</Container>
);
};
export default RequestAchievementUnlock;
|
#!/bin/bash -e
#bdereims@vmware.com
git pull
cd containers
make build-all
exit 0
|
import { IsEmail, IsNotEmpty, MaxLength, MinLength, Validate } from 'class-validator';
import { ApiProperty } from '@nestjs/swagger';
import { RegistrationNumber } from 'src/auxiliary/validators/registrationNumber.validator';
/**
* **Create User DTO**
*
* [[CreateUserDto]] is responsible for handling input and validating the same while
* creating a new user for a problem
*
* @category User
*/
export class CreateUserDto {
/** name of the user] */
@ApiProperty({ description: 'full name of user', example: '<NAME>' })
@IsNotEmpty()
@MinLength(3)
@MaxLength(255)
name: string;
/** college registration number of user */
@ApiProperty({ description: 'registration number of user', example: '19BCE2669' })
@IsNotEmpty()
@Validate(RegistrationNumber)
registrationNumber: string;
/** college email of user */
@ApiProperty({ description: 'college email of user', example: '<EMAIL>' })
@IsNotEmpty()
@IsEmail({ domain_specific_validation: true })
collegeEmail: string;
/** password of user */
@ApiProperty({ description: 'password of user', example: '<PASSWORD>' })
@MinLength(8)
@IsNotEmpty()
password: string;
}
export class CreateUserDtoWithCaptcha extends CreateUserDto {
@IsNotEmpty()
token: string;
}
|
export default ngModule => {
ngModule.constant('API_ROUTES', {
users: 'users/',
signIn: 'users/sign_in/',
register: 'users/register',
reset: 'users/reset',
recintos: 'locals/',
canchas: 'fields/',
regions: 'regions/',
reservations: 'reservations/',
sendMessage: 'message/'
});
};
|
package org.insightcentre.nlp.saffron.authors;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class AuthorsApplication {
/**
* The entry point of application.
*
* @param args the input arguments
*/
public static void main(String[] args) {
SpringApplication.run(AuthorsApplication.class, args);
}
}
|
durdnndir=exp_dnn/tts_dnn_dur_3_delta_quin5/
dnndir=exp_dnn/tts_dnn_train_3_deltasc2_quin5/
datadir=data/eval_test
spk=slt
srate=48000
delta_order=2
mcep_order=60
bndap_order=25
voice_thresh=0.5
alpha=0.55
fftlen=4096
[ -f path.sh ] && . ./path.sh;
. parse_options.sh || exit 1;
rm -rf $datadir
mkdir -p $datadir
awk 'BEGIN{print "<parent>"}{print}END{print "</parent>"}' > $datadir/text.xml
# Generate CEX features for test set.
tpdb=$KALDI_ROOT/idlak-data/en/ga/
idlaktxp --pretty --tpdb=$tpdb $datadir/text.xml - \
| idlakcex --pretty --cex-arch=default --tpdb=$tpdb - $datadir/text_full.xml
python $KALDI_ROOT/idlak-voice-build/utils/idlak_make_lang.py --mode 2 -r "test" \
$datadir/text_full.xml data/full/cex.ark.freq $datadir/cex.ark > $datadir/cex_output_dump
# Generate input feature for duration modelling
cat $datadir/cex.ark \
| awk '{print $1, "["; $1=""; na = split($0, a, ";"); for (i = 1; i < na; i++) for (state = 0; state < 5; state++) print a[i], state; print "]"}' \
| copy-feats ark:- ark,scp:$datadir/in_durfeats.ark,$datadir/in_durfeats.scp
# Duration based test set
lbldurdir=lbldur$datadir
mkdir -p $lbldurdir
cp $datadir/in_durfeats.scp $lbldurdir/feats.scp
cut -d ' ' -f 1 $lbldurdir/feats.scp | awk -v spk=$spk '{print $1, spk}' > $lbldurdir/utt2spk
utils/utt2spk_to_spk2utt.pl $lbldurdir/utt2spk > $lbldurdir/spk2utt
#steps/compute_cmvn_stats.sh $lbldurdir $lbldurdir $lbldurdir
# Generate label with DNN-generated duration
# 1. forward pass through duration DNN
duroutdir=$durdnndir/tst_forward_tmp/
rm -rf $duroutdir
utils/make_forward_fmllr.sh $durdnndir $lbldurdir $duroutdir ""
# 2. make the duration consistent, generate labels with duration information added
(echo '#!MLF!#'; for cmp in $duroutdir/cmp/*.cmp; do
cat $cmp | awk -v nstate=5 -v id=`basename $cmp .cmp` 'BEGIN{print "\"" id ".lab\""; tstart = 0 }
{
pd += $2;
sd[NR % nstate] = $1}
(NR % nstate == 0){
mpd = pd / nstate;
smpd = 0;
for (i = 1; i <= nstate; i++) smpd += sd[i % nstate];
rmpd = int((smpd + mpd) / 2 + 0.5);
# Normal phones
if (int(sd[0] + 0.5) == 0) {
for (i = 1; i <= 3; i++) {
sd[i % nstate] = int(sd[i % nstate] / smpd * rmpd + 0.5);
}
if (sd[3] <= 0) sd[3] = 1;
for (i = 4; i <= nstate; i++) sd[i % nstate] = 0;
}
# Silence phone
else {
for (i = 1; i <= nstate; i++) {
sd[i % nstate] = int(sd[i % nstate] / smpd * rmpd + 0.5);
}
if (sd[0] <= 0) sd[0] = 1;
}
if (sd[1] <= 0) sd[1] = 1;
smpd = 0;
for (i = 1; i <= nstate; i++) smpd += sd[i % nstate];
for (i = 1; i <= nstate; i++) {
if (sd[i % nstate] > 0) {
tend = tstart + sd[i % nstate] * 50000;
print tstart, tend, int(NR / 5), i-1;
tstart = tend;
}
}
pd = 0;
}'
done) > $datadir/synth_lab.mlf
# 3. Turn them into DNN input labels (i.e. one sample per frame)
python utils/make_fullctx_mlf_dnn.py $datadir/synth_lab.mlf $datadir/cex.ark $datadir/feat.ark
copy-feats ark:$datadir/feat.ark ark,scp:$datadir/in_feats.ark,$datadir/in_feats.scp
lbldir=lbl$datadir
mkdir -p $lbldir
cp $datadir/in_feats.scp $lbldir/feats.scp
cut -d ' ' -f 1 $lbldir/feats.scp | awk -v spk=$spk '{print $1, spk}' > $lbldir/utt2spk
utils/utt2spk_to_spk2utt.pl $lbldir/utt2spk > $lbldir/spk2utt
# steps/compute_cmvn_stats.sh $dir $dir $dir
# 4. Forward pass through big DNN
outdir=$dnndir/tst_forward_tmp/
rm -rf $outdir
utils/make_forward_fmllr.sh $dnndir $lbldir $outdir ""
# 5. Vocoding
# NB: these are the settings for 48k
mkdir -p $outdir/wav_mlpg/; for cmp in $outdir/cmp/*.cmp; do
utils/mlsa_synthesis_63_mlpg.sh --voice_thresh $voice_thresh --alpha $alpha --fftlen $fftlen --srate $srate --bndap_order $bndap_order --mcep_order $mcep_order --delta_order $delta_order $cmp $outdir/wav_mlpg/`basename $cmp .cmp`.wav data/train/var_cmp.txt
done
echo "Done. Samples are in $outdir/wav_mlpg/"
|
<gh_stars>0
package com.infamous.framework.http.core;
class ParamPart extends BodyPart<String> {
public ParamPart(String name, Object value, String contentType) {
super(name, value == null ? "" : String.valueOf(value), contentType);
}
@Override
public boolean isFile() {
return false;
}
}
|
# -*- coding: utf-8 -*-
"""Gio.Notification() com Gio.Application().
No Microsoft Windows é exibido:
```bash
(MainWindow.py:6376): GLib-GIO-WARNING **: 15:10:59.027: Notifications are not yet supported on Windows.
```
"""
import gi
gi.require_version(namespace='Gtk', version='3.0')
gi.require_version(namespace='Notify', version='0.7')
from gi.repository import Gio, Gtk
class MainWindow(Gtk.ApplicationWindow):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.application = kwargs['application']
self.set_title(title='Gio.Notification')
self.set_default_size(width=1366 / 2, height=768 / 2)
self.set_position(position=Gtk.WindowPosition.CENTER)
self.set_default_icon_from_file(filename='../../assets/icons/icon.png')
# Criando headerbar.
headerbar = Gtk.HeaderBar.new()
headerbar.set_title(title='Gio Notification')
headerbar.set_subtitle(subtitle='Exibindo notificações.')
headerbar.set_show_close_button(setting=True)
self.set_titlebar(titlebar=headerbar)
vbox = Gtk.Box.new(orientation=Gtk.Orientation.VERTICAL, spacing=12)
vbox.set_border_width(border_width=12)
self.add(widget=vbox)
self.button = Gtk.Button.new_with_label(label='Abrir notificação')
self.button.connect('clicked', self.show_notification)
vbox.pack_start(child=self.button, expand=False, fill=False, padding=0)
self.show_all()
def show_notification(self, widget):
notification = Gio.Notification.new(title='Título do aplicativo')
self.application.send_notification(None, notification)
class Application(Gtk.Application):
def __init__(self):
super().__init__(application_id='br.natorsc.Exemplo',
flags=Gio.ApplicationFlags.FLAGS_NONE)
def do_startup(self):
Gtk.Application.do_startup(self)
def do_activate(self):
win = self.props.active_window
if not win:
win = MainWindow(application=self)
win.present()
def do_shutdown(self):
Gtk.Application.do_shutdown(self)
if __name__ == '__main__':
import sys
app = Application()
app.run(sys.argv)
|
<reponame>Skalar/quickbooks-ruby<filename>spec/support/oauth_helpers.rb
module OauthHelpers
def construct_oauth
FakeWeb.allow_net_connect = false
oauth_consumer = OAuth::Consumer.new("app_key", "app_secret", {
:site => "https://oauth.intuit.com",
:request_token_path => "/oauth/v1/get_request_token",
:authorize_url => "https://appcenter.intuit.com/Connect/Begin",
:access_token_path => "/oauth/v1/get_access_token"
})
OAuth::AccessToken.new(oauth_consumer, "token", "secret")
end
def construct_service(model)
@service = "Quickbooks::Service::#{model.to_s.camelcase}".constantize.new
@service.access_token = construct_oauth
@service.company_id = "9991111222"
end
end
RSpec.configure do |config|
config.include(OauthHelpers)
end
|
(
echo "DROP database $1 ;";
echo "CREATE DATABASE $1 WITH OWNER $2 ;";
echo "\q";
) | sudo -u postgres psql
rm -f cell_database/migrations/*
touch cell_database/migrations/__init__.py
rm -f cycling/migrations/*
touch cycling/migrations/__init__.py
rm -f filename_database/migrations/*
touch filename_database/migrations/__init__.py
rm -f machine_learning/migrations/*
touch machine_learning/migrations/__init__.py
python3 manage.py makemigrations
python3 manage.py migrate
python3 manage.py edit_database_filename_database --mode add_category
python3 manage.py edit_database_filename_database --mode add_charger_drive_profile
python3 manage.py edit_database_filename_database --mode add_experiment_type
python3 manage.py edit_database_filename_database --mode display --model Category
python3 manage.py edit_database_filename_database --mode display --model ChargerDriveProfile
python3 manage.py edit_database_filename_database --mode display --model ExperimentType
python3 manage.py edit_database_filename_database --mode just_add_files --data_dir=$3
python3 manage.py edit_database_filename_database --mode just_parse_database_files
python3 manage.py import_and_process_raw_neware --DEBUG |
#!/bin/bash
#@libertyunix
#Simple bash script to 1st ping all ips in a range, then discover services, pipe output to a file for each services, then further explore these services
#Sry I suk @code
echo "Enter Client Name"
read clientname
mkdir $clientname
cd $clientname
echo "Enter IP range: "
read ips
nmap -sn -vv -oG tmp_scan $ips
cat tmp_scan | grep "Up" | cut -d " " -f2 > discoveredhost.txt
rm tmp_scan
nmap -sS --top-ports=100 -v -iL discoveredhost.txt -oG servicesup -oX metasploit-$clientname.xml
clear
echo "[*] Cleaning up and organizing"
cat servicesup | grep "22/open" | cut -d " " -f2 > ssh.txt
cat servicesup | grep "25/open" | cut -d " " -f2 > smtp.txt
cat servicesup | grep "21/open" | cut -d " " -f2 > ftp.txt
cat servicesup | grep "80/open" | cut -d " " -f2 > http.txt
cat servicesup | grep "443/open" | cut -d " " -f2 > https.txt
cat servicesup | grep "445/open" | cut -d " " -f2 > smb.txt
cat servicesup | grep "3389/open"| cut -d " " -f2 > rdp.txt
echo "[*] Happy Hunting Running Further Servicve Enumeration[*]"
echo "CHECKING OUT SSH WITH HYDRA"
sleep 2
hydra -L wordlist -P passlist -f -o sshhydra.txt -M ssh.txt
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package fr.calamus.common.db.core;
import fr.calamus.common.db.model.RequestBuilder;
import fr.calamus.common.model.BaseEntityMap;
import fr.calamus.common.model.EntityMap;
import fr.calamus.common.model.EntityMapWithIntId;
import fr.calamus.common.model.EntityMapWithStringId;
import fr.calamus.common.tools.CommonDateFormats;
import fr.calamus.common.tools.ListsAndArrays;
import fr.calamus.common.tools.ToolBox;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.json.JSONObject;
/**
*
* @author haerwynn
*/
public class EntitiesAccess {
//private static SimpleDateFormat pgDateFormatter;
//private static SimpleDateFormat pgTimestampFormatter;
protected final Log log;
private final String table;
private DbAccess dba;
private int maxLines;
public EntitiesAccess(String table, DbAccess dba) {
log = LogFactory.getLog(getClass());
this.table = table;
this.dba = dba;
maxLines=0;
}
public void setMaxLines(int maxLines) {
this.maxLines = maxLines;
}
public int getMaxLines() {
return maxLines;
}
public String escapeString(String s) {
return ToolBox.echapperStringPourHSql(s);
}
protected boolean stringIsEmpty(Object s) {
if (s == null) {
return true;
}
return ("" + s).trim().length() < 1;
}
public RequestBuilder newRequestBuilder() {
RequestBuilder r = new RequestBuilder(table);
if(maxLines>0)r.setLimit(maxLines);
return r;
}
protected RequestBuilder newRequestBuilder(String other) {
RequestBuilder r = new RequestBuilder(other);
if(maxLines>0)r.setLimit(maxLines);
return r;
}
protected DbAccess dba() {
return dba;
}
public String getTable() {
return table;
}
protected RequestBuilder maybeAddOffsetAndLimit(RequestBuilder rb, Integer offset, Integer limit) {
if (rb == null) {
return null;
}
if (offset != null) {
rb.setOffset(offset);
}
if (limit != null) {
rb.setLimit(limit);
}
return rb;
}
protected List<BaseEntityMap> toBaseEntityMaps(List<Map<String, Object>> lm) {
if (lm == null) {
return null;
}
List<BaseEntityMap> l = new ArrayList<>();
for (int i = 0; i < lm.size(); i++) {
l.add(new BaseEntityMap(lm.get(i)));
}
return l;
}
protected String getInsertValues(List<String> colonnes, EntityMap e) {
List<String> vals = new ArrayList<>();
boolean logDates = false;
for (int i = 0; i < colonnes.size(); i++) {
String v;
String c = colonnes.get(i);
if (logDates && c.startsWith("date")) {
log.debug("colonne " + c + " : ");
if (e.get(c) != null) {
log.debug(" " + e.get(c).getClass().getName());
}
}
if (e.get(c) == null) {
v = "null";
} else if (e.get(c) instanceof Number) {
v = "" + e.get(c);
} else if (e.get(c) instanceof Date) {
v = escapeString(CommonDateFormats.pgDateFormatter().format(e.get(c)));
} else if (e.get(c) instanceof String) {
v = escapeString((String) e.get(c));
} else {
v = escapeString("" + e.get(c));
}
vals.add(v);
}
return ListsAndArrays.mergeList(vals, ",");
}
protected String getUpdateValues(List<String> colonnes, EntityMap e) {
List<String> vals = new ArrayList<>();
for (int i = 0; i < colonnes.size(); i++) {
String v;
String c = colonnes.get(i);
if (e.get(c) == null) {
v = "null";
} else if (e.get(c) instanceof Long || e.get(c) instanceof Integer) {
v = "" + e.get(c);
} else if (e.get(c) instanceof Boolean) {
v = ((Boolean)e.get(c))?"true":"false";
} else if (e.get(c) instanceof Timestamp) {
v = escapeString(CommonDateFormats.pgTimestampFormatter().format(e.get(c)));
} else if (e.get(c) instanceof Date) {
v = escapeString(CommonDateFormats.pgDateFormatter().format(e.get(c)));
} else if (e.get(c) instanceof String) {
v = escapeString((String) e.get(c));
} else {
v = escapeString("" + e.get(c));
}
vals.add(c + "=" + v);
}
return ListsAndArrays.mergeList(vals, ",");
}
public int count(){
RequestBuilder r = new RequestBuilder(table).select("count(*)");
Map m = dba().selectOne(r.request());
return extractCountFromMap(m);
}
public int extractCountFromMap(Map<String, Object> m) {
try {
return ((Long) m.get("count")).intValue();
} catch (Exception e) {
log.error(e);
return 0;
}
}
public EntityMap toEntityMap(JSONObject o){
if(o==null)return null;
EntityMap e=new EntityMap(new ArrayList<>(o.keySet()));
for(String col:e.cols()){
e.put(col, o.opt(col));
}
return e;
}
public EntityMap toEntityMap(JSONObject o, List<String>cols){
if(o==null)return null;
EntityMap e=new EntityMap(cols);
for(String col:e.cols()){
e.put(col, o.opt(col));
}
return e;
}
public EntityMapWithIntId toEntityMapWithIntId(JSONObject o, String pkCol){
if(o==null)return null;
List<String> cols = new ArrayList<>(o.keySet());
cols.remove(pkCol);
cols.add(0, pkCol);
EntityMapWithIntId e=new EntityMapWithIntId(cols,cols);
for(String col:e.cols()){
e.put(col, o.opt(col));
}
return e;
}
public EntityMapWithStringId toEntityMapWithStringId(JSONObject o, String pkCol){
if(o==null)return null;
List<String> cols = new ArrayList<>(o.keySet());
cols.remove(pkCol);
cols.add(0, pkCol);
EntityMapWithStringId e=new EntityMapWithStringId(cols,cols);
for(String col:e.cols()){
e.put(col, o.opt(col));
}
return e;
}
public String objectToWildCardedString(Object o){
if(o==null)return "%";
if((""+o).trim().isEmpty())return "%";
String s = "%" + o.toString().trim().replaceAll(" +", "%") + "%";
if(s.equals("%%%"))s="%";
if(s.equals("%%"))s="%";
return s;
}
public int stringToInt(String s, int def) {
try{
return Integer.parseInt(s);
}catch(Exception e){
return def;
}
}
public double stringToDouble(String s, double def) {
try{
return Double.parseDouble(s);
}catch(Exception e){
return def;
}
}
public boolean jsonHasNonEmptyValue(JSONObject o, String key) {
return o!=null && key!=null
&& o.has(key) && !ToolBox.isEmpty(o.optString(key));
}
public String objectToSimplifiedString(Object o){
if(o==null)return "";
if((""+o).trim().isEmpty())return "";
String s = ToolBox.simplifierString(o.toString()).replaceAll(" +", " ");
return s.trim();
}
public String objectToWildCardedSimplifiedString(Object o){
if(o==null)return "%";
if((""+o).trim().isEmpty())return "%";
String s = "%" + ToolBox.simplifierString(o.toString()).replaceAll(" +", "%") + "%";
if(s.equals("%%%"))s="%";
if(s.equals("%%"))s="%";
return s;
}
/**
*
* @param col the column
* @param val the value (must be escaped if not a number nor null ! )
* @param where
* @return true if updated
*/
public boolean update(String col, String val, String where){
String req="update "+getTable()+" set "+col+"="+val;
if(where!=null)req+=" where "+where;
return dba().executeUpdate(req)>=0;
}
public boolean delete(String where){
String req="delete from "+getTable()+" where "+where;
return dba().executeUpdate(req)>=0;
}
public int insert(EntityMapWithIntId e) {
if (e.getId() < 0) {
int id = dba().getMax(getTable(), e.getIdKey());
if (id < 0) {
id = 0;
}
id++;
e.setId(id);
}
List<String>cols=e.cols();
String req = "insert into " + getTable() + "(" + ListsAndArrays.mergeList(cols, ",") + ")values(" + getInsertValues(cols, e) + ")";
return dba().executeUpdate(req);
}
public int update(EntityMapWithIntId e) {
if (e.getId() < 0) {
int id = dba().getMax(getTable(), e.getIdKey());
if (id < 0) {
id = 0;
}
id++;
e.setId(id);
}
List<String>cols=e.colsNoId();
String req = "update " + getTable() + " set " + getUpdateValues(cols, e) + " where "+e.getIdKey()+"="+e.getId();
return dba().executeUpdate(req);
}
public int update(EntityMap e, String where) {
List<String>cols=e.cols();
String req = "update " + getTable() + " set " + getUpdateValues(cols, e) + " where "+where;
return dba().executeUpdate(req);
}
public int update(EntityMapWithStringId e) {
if (e.getId() == null)return -1;
List<String>cols=e.colsNoId();
String req = "update " + getTable() + " set " + getUpdateValues(cols, e) + " where "+e.getIdKey()+"="+escapeString(e.getId());
return dba().executeUpdate(req);
}
public int insert(EntityMap e) {
List<String>cols=e.cols();
String req = "insert into " + getTable() + "(" + ListsAndArrays.mergeList(cols, ",") + ")values(" + getInsertValues(cols, e) + ")";
return dba().executeUpdate(req);
}
}
|
#!/bin/bash -ex
#export DJANGO_SETTINGS_MODULE=cloudbook_host_server.settings.testing
#lexport PYTHONPATH=$WORKSPACE/venv/lib/python2.7
stop_server(){
pkill -f "manage.py runserver" || return 0
}
stop_server
echo "after stopserver"
find src -name *.pyc | xargs rm
virtualenv "$WORKSPACE/env"
source "$WORKSPACE/env/bin/activate"
if [ "$1" = "-profile" ]; then
PY_PROFILE="-m cProfile -o cprof.out"
fi
mkdir -p "$WORKSPACE/logs"
pip install --download-cache=/tmp -r $WORKSPACE/requirements.txt || exit 23
# download the fixtures
rm -rf "$WORKSPACE/cross7-data" 2>/dev/null
git clone git@bitbucket.org:litedesk/cross7-data.git
rm -f "$WORKSPACE/app.db" 2>/dev/null
cp "$WORKSPACE/src/litedesk_service_api/local_settings.py.sample" "$WORKSPACE/src/litedesk_service_api/local_settings.py"
cat <<EOT >> "$WORKSPACE/src/litedesk_service_api/local_settings.py"
import os
# TODO BASE_DIR is also defined in the settings.py
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))
if os.environ.get('JENKINS_HOME'):
_FRONT_END_ROOT = os.path.abspath(os.path.join(BASE_DIR, '..','..', '_cross7','workspace', 'ui'))
else:
_FRONT_END_ROOT = os.path.abspath(os.path.join(BASE_DIR, '..', 'cross7-front', 'ui'))
STATIC_ROOT = os.path.abspath(os.path.join(BASE_DIR, '..', 'static'))
STATICFILES_DIRS = [
_FRONT_END_ROOT,
os.path.abspath(os.path.join(BASE_DIR, 'venv', 'lib', 'python2.7', 'site-packages', 'rest_framework', 'static'))
]
EXTRA_STATIC_ROOTS = (
('fonts', os.path.join(_FRONT_END_ROOT, 'fonts')),
('img', os.path.join(_FRONT_END_ROOT, 'img')),
)
MEDIA_ROOT = os.path.abspath(os.path.join(BASE_DIR, '..', 'media'))
MEDIA_URL = '/media/'
SITE = {
'host_url': 'http://localhost:8000',
'name': 'Crosseven'
}
EMAIL_USE_TLS = True
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_PORT = 587
EMAIL_HOST_USER = 'litedesk.opensource@gmail.com'
EMAIL_HOST_PASSWORD = '$LITEDESK_GMAIL_PASSWORD'
EOT
python "$WORKSPACE/src/manage.py" migrate || exit 23
python "$WORKSPACE/src/manage.py" loaddata "$WORKSPACE/cross7-data/fixtures/app_bootstrap.json" || exit 23
echo "after loaddata"
python "$WORKSPACE/src/manage.py" load_users || exit 23
echo "after load_users"
/usr/local/sbin/daemonize -E BUILD_ID=dontKillMe "$WORKSPACE/env/bin/python" "$WORKSPACE/src/manage.py" runserver
echo "started server"
#python $PY_PROFILE "$WORKSPACE/src/manage.py" taskname || exit 23
|
#!/bin/sh
`dirname $0`/launcher/jlaunch.sh `dirname $0`/examples.conf jlibs.examples.xml.xsd.XSDOutlinePanelTest $*
|
Tests.prototype.StorageTests = function()
{
module("HTML 5 Storage");
test("should exist", function() {
expect(1);
ok(typeof(window.openDatabase) == "function", "Database is defined");
});
test("Should open a database", function() {
var db = openDatabase("Database", "1.0", "HTML5 Database API example", 200000);
ok(db != null, "Database should be opened");
});
}
|
import {get} from "lodash"
VXApp = { ...VXApp, ...{
/**
* Get key information from the current user as necessary to establish subscriptions.
*
* @param {string} userId Optional user ID.
* @return {object} Result object.
*/
getSubscriptionParameters(userId) {
try {
userId = userId || Meteor.userId()
if (!userId) {
OLog.debug("vxapp.js getSubscriptionParameters security check failed user is not logged in")
return { success : false, icon : "EYE", key : "common.alert_security_check_failed" }
}
if (Meteor.isClient) {
if (Util.getCurrentDomainId(userId) === "UNKNOWN") {
return { success : false, icon : "TRIANGLE", key : "common.alert_subscriptions_not_ready" }
}
}
const subscriptionParameters = {}
subscriptionParameters.tenantId = Util.getCurrentTenantId(userId)
subscriptionParameters.domainId = Util.getCurrentDomainId(userId)
subscriptionParameters.userId = userId
subscriptionParameters.email = Util.getUserEmail(userId)
subscriptionParameters.tenantIds = Util.getTenantIds(userId)
subscriptionParameters.domainIds = Util.getDomainIds(userId)
subscriptionParameters.domainIdsOfCurrentTenant = Util.getDomainIdsOfCurrentTenant(userId)
subscriptionParameters.superAdmin = Util.isUserSuperAdmin(userId)
subscriptionParameters.preferenceLogsDefeatTenantFilters = Util.isPreference("LOGS_DEFEAT_TENANT_FILTERS", userId)
subscriptionParameters.preferenceAllMembersAndDomains = Util.isPreference("ALL_MEMBERS_AND_DOMAINS", userId)
subscriptionParameters.preferenceDomainSubscription = Util.isPreference("DOMAIN_SUBSCRIPTION", userId)
return { success : true, icon : "ENVELOPE", key : "common.alert_transaction_success", subscriptionParameters : subscriptionParameters }
}
catch (error) {
OLog.error(`vxapp.js getSubscriptionParameters unexpected error=${error}`)
return { success : false, type : "ERROR", icon : "BUG", key : "common.alert_unexpected_error", variables : { error : error.toString() } }
}
},
/**
* Return a dual-mode object with search criteria. The client criteria is used to
* populate session variables to control Mini-MongoDB finds. The server criteria is used to
* control subscriptions on the server.
*
* @param {string} subscriptionName Subscription name (e.g., "current_cards").
* @param {object} subscriptionParameters Subscription parameters object.
* @param {object} criteria MongoDB criteria.
* @param {object} options MongoDB options.
* @return {object} Dual-mode publish request object.
*/
makePublishingRequest(subscriptionName, subscriptionParameters, criteria, options) {
const publishRequest = {}
_.each(["client", "server"], side => {
const mode = VXApp.getPublishingMode(side, subscriptionParameters)
publishRequest[side] = {}
publishRequest[side].criteria = $.extend(true, {}, criteria)
publishRequest[side].options = options
publishRequest[side].extra = {}
publishRequest[side].extra.subscriptionName = subscriptionName
publishRequest[side].extra.mode = mode
VXApp.adjustPublishingRequest(publishRequest[side], subscriptionParameters.userId, subscriptionParameters)
})
return publishRequest
},
/**
* Infer the current publishing mode based on the route.
*
* DEFEAT - no adjustment to tenants and domains.
* TEXAS - user sees all tenants.
* TEAM - user sees all domains in a single tenant.
* DOMAIN - user sees a single domain.
*
* @param {string} side Side client or server.
* @param {object} subscriptionParameters Subscription parameters object.
* @return {string} Publishing mode.
*/
getPublishingMode(side, subscriptionParameters) {
if (Util.isRoutePath("/log") || Util.isRoutePath("/events")) {
return subscriptionParameters.superAdmin && subscriptionParameters.preferenceLogsDefeatTenantFilters ? "DEFEAT" : "TEXAS"
}
if (UX.iosIsRoutePathOnStack("/users-domains") || UX.iosIsRoutePathOnStack("/domains-users") || Util.isRoutePath("/user/") || Util.isRoutePath("/domain/")) {
return subscriptionParameters.superAdmin && subscriptionParameters.preferenceAllMembersAndDomains ? "DEFEAT" : "TEAM"
}
if (UX.iosIsRoutePathOnStack("/tenants") || Util.isRoutePath("/tenant/") || Util.isRoutePath("/domains")) {
return subscriptionParameters.superAdmin ? "DEFEAT" : "TEAM"
}
if (Util.isRoutePath("/tenants") || Util.isRoutePath("/tenant/") || Util.isRoutePath("/domains")) {
return "TEXAS"
}
return "DOMAIN"
},
/**
* Adjust a supplied publishing request object to limit visibility based on the publishing mode.
*
* @param {object} request Publishing request object including criteria, options, extra.
* @param {string} userId User ID requesting that records be published.
* @param {object} subscriptionParameters Subscription parameters (required on client only).
*/
adjustPublishingRequest(request, userId, subscriptionParameters) {
if (!userId) {
OLog.error(`vxapp.js adjustPublishingRequest no userId specified, request=${OLog.errorString(request)}`)
return request
}
if (Meteor.isClient && !subscriptionParameters) {
OLog.error(`vxapp.js adjustPublishingRequest on client subscriptionParameters are required, request=${OLog.errorString(request)}`)
return request
}
if (Meteor.isServer) {
let result = VXApp.getSubscriptionParameters(userId)
if (!result.success) {
OLog.error("vxapp.js adjustPublishingRequest on client unable to get subscription parameters")
return request
}
subscriptionParameters = result.subscriptionParameters
}
if (request.extra.subscriptionName === "current_tenants") {
if (request.extra.mode === "DEFEAT") {
request.criteria.dateRetired = { $exists : false }
}
else if (request.extra.mode === "DOMAIN") {
request.criteria = { _id : subscriptionParameters.tenantId }
}
else if (request.extra.mode === "TEAM") {
request.criteria = { _id : subscriptionParameters.tenantId }
}
else if (request.extra.mode === "TEXAS") {
request.criteria = { _id : { $in: subscriptionParameters.tenantIds } }
}
else {
request.criteria = { _id : subscriptionParameters.tenantId }
}
}
else if (request.extra.subscriptionName === "current_domains") {
if (request.extra.mode === "DEFEAT") {
request.criteria.dateRetired = { $exists : false }
}
else if (request.extra.mode === "DOMAIN") {
request.criteria = { _id : subscriptionParameters.domainId }
}
else if (request.extra.mode === "TEAM") {
request.criteria = { tenant : subscriptionParameters.tenantId, dateRetired : { $exists : false } }
}
else if (request.extra.mode === "TEXAS") {
request.criteria = { tenant: { $in: subscriptionParameters.tenantIds }, dateRetired : { $exists : false } }
}
else {
request.criteria = { _id : subscriptionParameters.domainId }
}
}
else if (request.extra.subscriptionName === "current_users") {
if (request.extra.mode === "DEFEAT") {
// Simply leave criteria alone.
}
else if (request.extra.mode === "DOMAIN") {
request.criteria = { "profile.domains": { $elemMatch : { domainId : subscriptionParameters.domainId } } }
}
else if (request.extra.mode === "TEAM") {
request.criteria = { "profile.tenants": { $elemMatch : { tenantId : subscriptionParameters.tenantId } } }
}
else if (request.extra.mode === "TEXAS") {
request.criteria = { "profile.tenants": { $elemMatch : { tenantId : { $in: subscriptionParameters.tenantIds } } } }
}
else {
request.criteria = { "profile.domains": { $elemMatch : { domainId : subscriptionParameters.domainId } } }
}
}
// Standard is to limit based on a field "domain" which is commonly used in most record types.
// WARNING: default behavior is really to adjust and not replace the criteria:
else {
request.criteria = request.criteria || {}
if (request.extra.mode === "DEFEAT") {
// Simply leave criteria alone.
}
else if (request.extra.mode === "DOMAIN") {
request.criteria.domain = subscriptionParameters.domainId
}
else if (request.extra.mode === "TEAM") {
request.criteria.domain = { $in: subscriptionParameters.domainIdsOfCurrentTenant }
}
else if (request.extra.mode === "TEXAS") {
request.criteria.domain = { $in: subscriptionParameters.domainIds }
}
else {
request.criteria.domain = subscriptionParameters.domainId
}
}
return request
},
/**
* Get the current user domain object.
*
* @return {object} User domain object.
*/
getCurrentUserDomain(userId) {
if (!userId) {
return
}
let fieldList = {}
fieldList.fields = {}
fieldList.fields["profile.domains"] = 1
let user = Meteor.users.findOne(userId, fieldList)
if (!user) {
return
}
let domainId = Util.getCurrentDomainId(userId)
return _.findWhere(user.profile.domains, { domainId: domainId } )
},
/**
* Return the name of the specified field in a record.
*
* @param {object} coll MongoDB collection.
* @param {string} recordId Record ID.
* @param {string} fieldName Field name (e.g., "name").
* @param {string} defaultValue Optional default value.
* @return {string} Field value.
*/
fetchRecordField(coll, recordId, fieldName, defaultValue) {
if (!recordId) {
return
}
let fieldList = {}
fieldList.fields = {}
fieldList.fields[fieldName] = 1
let record = coll.findOne({ _id : recordId }, fieldList)
if (!record) {
OLog.error("vxapp.js fetchRecordField unable to find recordId=" + recordId)
return
}
return record[fieldName] ? record[fieldName] : defaultValue
},
/**
* Set the status of a subsystem and notify the user via email if the state has changed.
*
* @param {string} subsystem Subsystem (e.g., "MAILGUN", "TWILIO").
* @param {string} record Record with status to be set.
* @param {string} status Subsystem status (e.g., GREEN, RED).
* @param {string} key i18n key of message to associate with status change.
* @param {object} variables i18n variables.
* @param {number} minimumMinutesBetweenChange Minimum time required before status change allowed (to "throttle" quick changes).
*/
setSubsystemStatus(subsystem, record, status, key, variables, minimumMinutesBetweenChange) {
try {
if (VXApp.isSubsystemStatusChangeIgnored(subsystem, record, status, key, variables)) {
OLog.debug(`vxapp.js setSubsystemStatus recordId=${record._id} ${subsystem} ${status} ${key} *ignored*`)
return
}
if (VXApp.isSubsystemStatusEqual(subsystem, record, status, key, variables)) {
return
}
OLog.debug(`vxapp.js setSubsystemStatus recordId=${record._id} ${subsystem} ${status} ${key} *changed* record=${OLog.debugString(record)}`)
let subsystemStatus = VXApp.getSubsystemStatus(subsystem, record)
if (subsystemStatus) {
if (minimumMinutesBetweenChange) {
let dateRecent = moment().subtract(minimumMinutesBetweenChange, "minutes")
if (subsystemStatus.date > dateRecent) {
OLog.debug(`vxapp.js setSubsystemStatus recordId=${record._id} ${subsystem} ${status} ${key} was updated *recently* ${subsystemStatus.date} *wait*"`)
return
}
}
}
const subsystemStatusArray = record.subsystemStatus || []
if (!subsystemStatus) {
subsystemStatus = {}
subsystemStatusArray.push(subsystemStatus)
}
else {
let index = _.indexOf(_.pluck(subsystemStatusArray, "subsystem"), subsystem)
subsystemStatusArray[index] = subsystemStatus
}
subsystemStatus.subsystem = subsystem
subsystemStatus.status = status
subsystemStatus.date = new Date()
subsystemStatus.key = key
if (variables) {
subsystemStatus.variables = variables
}
else {
delete subsystemStatus.variables
}
const modifier = {}
modifier.$set = {}
modifier.$set.subsystemStatus = subsystemStatusArray
OLog.debug(`vxapp.js setSubsystemStatus recordId=${record._id} ${subsystem} ${status} ${key} modifier=${JSON.stringify(modifier)}`)
let eventData = VXApp.onSubsystemStatusChange(subsystem, record, modifier)
let localizedIdentifier = Util.i18n(`codes.subsystemIdentifier.${subsystem}`, { key : eventData.recordIdentifier } )
let localizedStatus = Util.i18n(`codes.subsystemStatus.${status}`)
let subject = Util.i18n("common.mail_subsystem_subject", { subsystemIdentifier : localizedIdentifier, subsystemStatus : localizedStatus })
let message = Util.i18n(key, variables)
let eventType, text
switch (status) {
case "RED" : {
eventType = "SUBSYSTEM_STATUS_RED"
text = Util.i18n("common.mail_subsystem_red", { subsystemIdentifier : localizedIdentifier, subsystemStatus : localizedStatus, message : message } )
break
}
case "YELLOW" : {
eventType = "SUBSYSTEM_STATUS_YELLOW"
text = Util.i18n("common.mail_subsystem_green", { subsystemIdentifier : localizedIdentifier, subsystemStatus : localizedStatus, message : message } )
break
}
case "GREEN" : {
eventType = "SUBSYSTEM_STATUS_GREEN"
text = Util.i18n("common.mail_subsystem_green", { subsystemIdentifier : localizedIdentifier, subsystemStatus : localizedStatus, message : message } )
break
}
}
VXApp.createEvent(eventType, eventData.domainId, eventData, { genericSubject: subject, genericMessage : text } )
}
catch (error) {
OLog.error(`vxapp.js setSubsystemStatus unexpected error=${error}`)
}
},
/**
* Template method to perform DB updates necessary to update subsystem status and return event data
* based on subsystem.
*
* @param {string} subsystem Subsystem (e.g., "MAILGUN", "TWILIO").
* @param {string} record Record with status to be set.
* @param {object} modifier Modifier for MongoDB.
* @return {object} Event data object.
*/
onSubsystemStatusChange(subsystem, record, modifier) {
OLog.debug("vxapp.js onSubsystemStatusChange subsystem=" + subsystem + " modifier=" + OLog.debugString(modifier))
let eventData = {}
eventData.subsystem = subsystem
switch (subsystem) {
case "TEMPLATE" : {
eventData.recordIdentifier = record.name
eventData.templateId = record._id
eventData.domainId = record.domain
Templates.update(record._id, modifier)
break
}
case "MAILGUN" : {
eventData.domainId = record._id
Domains.update(record._id, modifier)
break
}
case "TWILIO" : {
eventData.domainId = record._id
Domains.update(record._id, modifier)
break
}
default : {
eventData = {}
OLog.error("vxapp.js onSubsystemStatusChange unrecognized subsystem=" + subsystem)
break
}
}
return eventData
},
/**
* Get the status of a subsystem.
*
* @param {string} subsystem Subsystem (e.g., "LIME_LIGHT_INSTANCE", "GATEWAY").
* @param {string} record Record with status to be retrieved.
* @return {string} Subsystem status object.
*/
getSubsystemStatus(subsystem, record) {
return _.findWhere(record.subsystemStatus, { subsystem : subsystem })
},
/**
* Convenience function to determine whether a given subsystem is green.
*
* @param {string} subsystem Subsystem (e.g., "LIME_LIGHT_INSTANCE", "GATEWAY").
* @param {object} record Record with subsystemStatus.
* @return {boolean} True if subsystem is GREEN.
*/
isSubsystemGreen(subsystem, record) {
return VXApp.isSubsystemStatus(subsystem, record, "GREEN")
},
/**
* Convenience function to determine whether a given subsystem is yellow.
*
* @param {string} subsystem Subsystem (e.g., "LIME_LIGHT_INSTANCE", "GATEWAY").
* @param {object} record Record with subsystemStatus.
* @return {boolean} True if subsystem is YELLOW.
*/
isSubsystemYellow(subsystem, record) {
return VXApp.isSubsystemStatus(subsystem, record, "YELLOW")
},
/**
* Convenience function to determine whether a given subsystem is red.
*
* @param {string} subsystem Subsystem (e.g., "LIME_LIGHT_INSTANCE", "GATEWAY").
* @param {object} record Record with subsystemStatus.
* @return {boolean} True if subsystem is RED.
*/
isSubsystemRed(subsystem, record) {
return VXApp.isSubsystemStatus(subsystem, record, "RED")
},
/**
* Convenience function to determine whether a given subsystem is gray.
*
* @param {string} subsystem Subsystem (e.g., "SERVER").
* @param {object} record Record with subsystemStatus.
* @return {boolean} True if subsystem is GRAY.
*/
isSubsystemGray(subsystem, record) {
return VXApp.isSubsystemStatus(subsystem, record, "GRAY")
},
/**
* Determine whether a given subsystem is in a particular state.
*
* @param {string} subsystem Subsystem (e.g., "LIME_LIGHT_INSTANCE", "GATEWAY").
* @param {object} record Record with subsystemStatus.
* @param {string} status Test status (e.g., "RED").
* @return {boolean} True if subsystem status matches test status.
*/
isSubsystemStatus(subsystem, record, status) {
let subsystemStatus = VXApp.getSubsystemStatus(subsystem, record)
if (!subsystemStatus) {
return status === "GRAY"
}
return subsystemStatus.status === status
},
/**
* Determine whether the subsystem status currently matches supplied values.
*
* @param {string} subsystem Subsystem (e.g., "LIME_LIGHT_INSTANCE", "GATEWAY").
* @param {object} record Record with subsystem status to be tested.
* @param {string} status Subsystem status (e.g., GREEN, RED).
* @param {string} key i18n key of message to associate with status change.
* @param {object} variables i18n variables.
* @return {boolean} True if subsystem status is equal.
*/
isSubsystemStatusEqual(subsystem, record, status, key, variables) {
let subsystemStatus = VXApp.getSubsystemStatus(subsystem, record)
if (!subsystemStatus) {
return false
}
if (subsystemStatus.status !== status) {
return false
}
if (subsystemStatus.key !== key) {
return false
}
if (!subsystemStatus.variables && !variables) {
return true
}
let v1 = JSON.stringify(subsystemStatus.variables)
let v2 = JSON.stringify(variables)
return v1 === v2
},
/**
* Determine whether the subsystem status change should be ignored.
*
* @param {string} subsystem Subsystem (e.g., "LIME_LIGHT_INSTANCE", "GATEWAY").
* @param {object} record Record with subsystem status to be tested.
* @param {string} status Subsystem status (e.g., GREEN, RED).
* @param {string} key i18n key of message to associate with status change.
* @param {object} variables i18n variables.
* @return {boolean} True if subsystem status is equal.
*/
isSubsystemStatusChangeIgnored(subsystem, record, status, key, variables) {
// https://github.com/nodejs/node-v0.x-archive/issues/5545, it is possible that this
// bug may be fixed in a later version of node.js
if (key === "common.status_limelight_error" && variables && variables.errorString) {
if (variables.errorString.indexOf("ENOTFOUND") >= 0) {
return true
}
if (variables.errorString.indexOf("ECONNRESET") >= 0) {
return true
}
}
return false
},
/**
* Get the decoration icon class.
*
* @param {string} subsystem Subsystem (e.g., "TEMPLATE").
* @param {object} record Record with subsystemStatus.
* @param {string} size Size (i.e., "small", "medium").
* @return {string} Icon decoration class.
*/
getSubsystemStatusDecorationIconClassName(subsystem, record, size) {
if (!record) {
return
}
let subsystemStatus = VXApp.getSubsystemStatus(subsystem, record)
let prefix = "entity-decoration-icon-" + size + " fa " + (size === "medium" ? "fa-lg " : " ")
if (subsystemStatus && subsystemStatus.status) {
switch(subsystemStatus.status) {
case "GREEN" : return prefix + "fa-envelope"
case "YELLOW" : return prefix + "fa-warning"
case "RED" : return prefix + "fa-warning"
}
}
return prefix + "fa-wrench"
},
/**
* Get the subsystem color (green, yellow, red, gray).
*
* @param {string} subsystem Subsystem (e.g., "LIME_LIGHT_INSTANCE", "GATEWAY").
* @param {object} record Record with subsystemStatus.
* @return {string} Subsystem status color.
*/
getSubsystemStatusDecorationColor(subsystem, record) {
if (!record) {
return
}
let subsystemStatus = VXApp.getSubsystemStatus(subsystem, record)
if (subsystemStatus && subsystemStatus.status) {
switch(subsystemStatus.status) {
case "GREEN" : return "green"
case "YELLOW" : return "yellow"
case "RED" : return "red"
}
}
return "gray"
},
/**
* Get the decoration tooltip.
*
* @param {string} subsystem Subsystem (e.g., "TEMPLATE").
* @param {object} record Record with subsystemStatus.
* @return {string} Decoration tooltip.
*/
getSubsystemStatusDecorationTooltip(subsystem, record) {
if (!record) {
return
}
return VXApp.subsystemStatusMessage(subsystem, record)
},
/**
* Return value (body) of a specified function.
*
* @param {?} functionOrId Function record or ID.
* @return {string} Value (body) of function.
*/
fetchFunctionValue(functionOrId) {
return VXApp.fetchFunctionField(functionOrId, "value")
},
/**
* Return the value of the specified field in a function record.
*
* @param {?} functionOrId Function record or ID.
* @param {string} fieldName Field name (e.g., "name").
* @param {string} defaultValue Optional default value.
* @return {string} Field value.
*/
fetchFunctionField(functionOrId, fieldName, defaultValue) {
let funktion
if (!functionOrId) {
return
}
if (_.isObject(functionOrId)) {
funktion = functionOrId
}
else {
const fieldList = {}
fieldList.fields = {}
fieldList.fields[fieldName] = 1
funktion = Functions.findOne(functionOrId, fieldList)
if (!funktion) {
OLog.error(`vxapp.js fetchFunctionField unable to find functionOrId=${functionOrId}`)
return
}
}
return funktion[fieldName] ? funktion[fieldName] : defaultValue
},
/**
* Return the function anchor from the current tenant.
*
* @param {string} tenantId Optional tenant ID.
* @return {string} Function anchor or null.
*/
functionAnchor(tenantId) {
tenantId = tenantId || Util.getCurrentTenantId(Meteor.userId())
return Util.fetchTenantField(tenantId, "functionAnchor")
},
/**
* Given a domain ID return the tenant function anchor property.
*
* @param {string} domainId Domain ID.
* @return {string} Function anchor from tenant.
*/
functionAnchorForDomain(domainId) {
const tenantId = Util.getTenantId(domainId)
return VXApp.functionAnchor(tenantId)
},
/**
* Given a function anchor and domain ID get form a guaranteed-unique object name
* for this domain.
*
* @param {string} functionAnchor User-declared function anchor in tenant.
* @param {string} domainId Domain ID.
* @return {string} Guaranteed-unique object "container" for functions.
*/
qualifiedFunctionAnchor(functionAnchor, domainId) {
return `${functionAnchor}_${domainId}`
},
/**
* Given a qualified function anchor, return fully-qualified function anchor.
*
* @param {string} qualifiedFunctionAnchor Qualified function anchor.
* @return Fully-qualified function anchor.
*/
fullyQualifiedFunctionAnchor(qualifiedFunctionAnchor) {
return `FunctionAnchors.${qualifiedFunctionAnchor}`
},
/**
* Test the upload status against a particular value or set of values.
*
* @param {string} uploadType Upload type.
* @param {?} status Status to use for testing or (string or array).
* @param {string} domainId Optional domain ID.
* @return {boolean} True if upload status matches the specified value(s).
*/
isUploadStatus(uploadType, status, domainId) {
const uploadStats = VXApp.findUploadStats(uploadType, domainId)
if (!uploadStats) {
return false
}
if (_.isArray(status)) {
return _.contains(status, uploadStats.status)
}
if (_.isString(status)) {
return uploadStats.status === status
}
return false
},
/**
* Determine whether upload is progress.
*
* @param {string} uploadType Upload type.
* @param {string} domainId Optional Domain ID.
* @return {boolean} True if upload is in progress.
*/
isUploadInProgress(uploadType, domainId) {
const uploadStats = VXApp.findUploadStats(uploadType, domainId)
if (!uploadStats) {
return false;
}
return _.contains( [ "ACTIVE", "TRANSMITTING", "WAITING", "INSERTING" ], uploadStats.status)
},
/**
* Determine whether upload is inserting.
*
* @param {string} uploadType Upload type.
* @param {string} domainId Optional Domain ID.
* @return {boolean} True if upload is inserting.
*/
isUploadInserting : function(uploadType, domainId) {
const uploadStats = VXApp.findUploadStats(uploadType, domainId)
if (!uploadStats) {
return false
}
return _.contains( [ "INSERTING" ], uploadStats.status);
},
/**
* Determine whether upload stats exist (i.e., have not been cleared)
*
* @param {string} uploadType Upload type.
* @param {string} domainId Optional Domain ID.
* @return {boolean} True if upload stats exist.
*/
isUploadStats(uploadType, domainId) {
const uploadStats = VXApp.findUploadStats(uploadType, domainId)
if (!uploadStats) {
return false;
}
return uploadStats.status !== "CLEARED"
},
/**
* Determine whether upload has ended.
*
* @param {string} uploadType Upload type.
* @param {string} domainId Optional Domain ID.
* @return {boolean} True if upload has ended.
*/
isUploadEnded(uploadType, domainId) {
const uploadStats = VXApp.findUploadStats(uploadType, domainId)
if (!uploadStats) {
return false
}
return _.contains( ["COMPLETED", "COMPLETED_WITH_ERRORS", "STOPPED", "FAILED", "CLEARED"], uploadStats.status)
},
/**
* Determine whether any upload errors exist.
*
* @param {string} uploadType Upload type.
* @param {string} domainId Optional Domain ID.
* @return {boolean} True if any messages exist.
*/
isUploadErrors(uploadType, domainId) {
const uploadStats = VXApp.findUploadStats(uploadType, domainId)
if (!uploadStats) {
return false
}
if (uploadStats.status === "CLEARED") {
return false
}
if (!uploadStats.messages) {
return false
}
return uploadStats.messages.length > 0
},
/**
* Set upload status.
*
* @param {string} uploadType Upload type.
* @param {string} status Upload status to set.
* @param {string} domainId Optional Domain ID.
*/
setUploadStatus(uploadType, status, domainId) {
const uploadStats = VXApp.findUploadStats(uploadType, domainId)
if (!uploadStats) {
return
}
const modifier = {}
modifier.$set = {}
modifier.$set.status = status
UploadStats.update(uploadStats._id, modifier)
},
/**
* Get upload status.
*
* @param {string} uploadType Upload type.
* @param {string} domainId Optional Domain ID.
* @return {object} Upload stats object.
*/
getUploadStatus(uploadType, domainId) {
const uploadStats = VXApp.findUploadStats(uploadType, domainId)
if (!uploadStats) {
return
}
return uploadStats.status
},
/**
* Get progress bar class.
*
* @param {string} uploadType Upload type.
* @param {string} domainId Optional Domain ID.
* @return {string} Progress bar class.
*/
progressBarClass(uploadType, domainId) {
const uploadStats = VXApp.findUploadStats(uploadType, domainId)
if (!uploadStats) {
return
}
switch (uploadStats.status) {
case "TRANSMITTING" :
return "progress-bar-success"
case "WAITING" :
return "progress-bar-success progress-bar-striped"
case "STOPPED" :
return "progress-bar-danger"
case "FAILED" :
return "progress-bar-danger"
case "COMPLETED_WITH_ERRORS" :
return "progress-bar-warning"
}
// Otherwise brand primary:
return "progress-bar-info"
},
/**
* Get progress bar active indicator.
*
* @param {string} uploadType Upload type.
* @param {string} domainId Optional Domain ID.
* @return {string} Progress bar active indicator.
*/
progressBarActive(uploadType, domainId) {
const uploadStats = VXApp.findUploadStats(uploadType, domainId)
if (!uploadStats) {
return
}
return uploadStats.status === "WAITING" ? "active" : ""
},
/**
* Return text to be displayed on progress bar.
*
* @param {string} uploadType Upload type.
* @param {string} domainId Optional Domain ID.
* @return {string} Progress bar active indicator.
*/
uploadProgress(uploadType, domainId) {
const uploadStats = VXApp.findUploadStats(uploadType, domainId);
if (!uploadStats) {
return;
}
const percentComplete = VXApp.computeUploadCompletionPercentage(uploadType, domainId)
switch (uploadStats.status) {
case "TRANSMITTING" : {
return Util.i18n("common.label_upload_status_sending", { percentComplete: percentComplete })
}
case "WAITING" : {
return Util.i18n("common.label_upload_status_waiting")
}
case "INSERTING" : {
return Util.i18n("common.label_upload_status_inserting", { percentComplete: percentComplete })
}
case "COMPLETED" : {
return Util.i18n("common.label_upload_status_completed", { percentComplete: percentComplete })
}
case "COMPLETED_WITH_ERRORS" : {
return Util.i18n("common.label_upload_status_completed_with_errors", { percentComplete: percentComplete })
}
case "STOPPED" : {
return Util.i18n("common.label_upload_status_stopped")
}
case "FAILED" : {
return Util.i18n("common.label_upload_status_failed")
}
}
},
/**
* Return percent complete as an integer number.
*
* @param {string} uploadType Upload type.
* @param {string} domainId Optional Domain ID.
* @return {number} Percent complete.
*/
percentComplete(uploadType, domainId) {
return VXApp.computeUploadCompletionPercentage(uploadType, domainId)
},
/**
* Compute the upload completion percentage.
*
* @param {string} uploadType Upload type.
* @param {string} domainId Optional Domain ID.
* @return {number} Completion percentage.
*/
computeUploadCompletionPercentage(uploadType, domainId) {
const uploadStats = VXApp.findUploadStats(uploadType, domainId)
if (!uploadStats) {
return;
}
// For these cases, return 100% so we have a full bar (looks nice):
if (_.contains( ["WAITING", "STOPPED", "FAILED" ], uploadStats.status)) {
return 100;
}
if (uploadStats.processed > 0 && uploadStats.total > 0) {
return Math.floor(uploadStats.processed * 100 / uploadStats.total)
}
return 0
},
/**
* Find Upload Stats object.
*
* @param {string} uploadType Upload type.
* @param {string} domainId Optional Domain ID.
* @return {object} Upload Stats object or null.
*/
findUploadStats(uploadType, domainId) {
domainId = domainId || Util.getCurrentDomainId(Meteor.userId())
if (!domainId) {
OLog.error(`vxapp.js findUploadStats unable to infer domainId from userId=${Meteor.userId()}`)
return;
}
return UploadStats.findOne( { domain : domainId, uploadType : uploadType } )
},
/**
* Validate all of the paths in the header row to ensure that the are properly represented in the
* import schema.
*
* @param {object} importSchema Import schema.
* @param {array} headerArray Array of columns of first row.
* @param {array} messages Array of messages.
* @param {string} fieldIdKey i18n bundle key of field-identifier message.
*/
validatePaths(importSchema, headerArray, messages, fieldIdKey) {
let valid = true
headerArray.forEach((path) => {
if (path === "command") {
return
}
const importSchemaPath = RecordImporter.importSchemaPath(path)
const definition = get(importSchema, importSchemaPath)
if (!(definition && VXApp.isDefinition(definition))) {
valid = false
const message = {}
message.index = 0
message.fieldIdKey = fieldIdKey
message.fieldIdVariables = { path: path }
message.result = { success : false, icon : "TRIANGLE", key : "common.invalid_header_path" }
messages.push(message)
}
})
return valid
},
/**
* Determine whether the given object is truly a definition, specifically that it has
* a bindingType, a tell-tale sign that this is a definition and not a higher-level node.
*
* @param {object} object Object to be tested.
* @return {boolean } True if the object is a leaf object.
*/
isDefinition(object) {
return Object.keys(object).includes("bindingType")
},
/**
* Return the index of the command column.
*
* @param {array} headerArray Array of columns of first row.
* @param {array} messages Array of messages.
* @param {string} fieldIdKey i18n bundle key of field-identifier message.
*/
validateCommandColumn(headerArray, messages, fieldIdKey) {
const commandColumnIndex = headerArray.indexOf("command")
if (commandColumnIndex < 0) {
const message = {}
message.index = 0
message.fieldIdKey = fieldIdKey
message.fieldIdVariables = { path: "error" }
message.result = { success : false, icon : "TRIANGLE", key : "common.invalid_header_path" }
messages.push(message)
}
return commandColumnIndex
},
/**
* For definitions that have the lookup attribute, lookup and return the effective value
* from another record. In such instances, the spreadsheet cell contains a key used to
* refer another collection. This is akin to a foreign-key lookup.
*
* @param {string} value Value (key) from spreadsheet cell.
* @param {object} definition Import schema definition.
* @param {number} index Index of row containing value.
* @param {array} messages Array of messages.
* @param {string} fieldIdKey i18n bundle key of field-identifier message.
* @param {object} fieldIdVariables Variables to insert into field-identifier message.
* @return {?} Value looked up from another record.
*/
lookupValue(uploadStats, value, definition, index, messages, fieldIdKey,
fieldIdVariables) {
const coll = Util.getCollection(definition.collection)
const partialValueRegex = new RegExp(value, "i")
const selector = {}
if (definition.collection === "users") {
selector["profile.domains.domainId"] = uploadStats.domain
}
else {
selector.domain = uploadStats.domain
}
selector[definition.retiredDatePath] = { $exists: false }
selector[definition.keyPropertyName] = partialValueRegex
const record = coll.findOne(selector)
if (record) {
return record[definition.returnProperty]
}
const result = { success : false, icon : "TRIANGLE", key : "common.invalid_key" }
VXApp.validateCreateMessage(result, index, messages, fieldIdKey, fieldIdVariables, value)
return false
},
/**
* Ensure required fields have values.
*
* @param {string} value Value to test.
* @param {number} index Index of row containing value.
* @param {array} messages Array of messages.
* @param {string} fieldIdKey i18n bundle key of field-identifier message.
* @param {object} fieldIdVariables Variables to insert into field-identifier message.
*/
validateRequired(value, index, messages, fieldIdKey, fieldIdVariables) {
if (!!value) {
return true
}
const result = { success : false, icon : "TRIANGLE", key : "common.invalid_required_field_missing" }
VXApp.validateCreateMessage(result, index, messages, fieldIdKey, fieldIdVariables, value)
return false
},
/**
* Generalized validation for import, conditionally adds a message
* to the supplied messages array in place.
*
* @param {string} value Value to test.
* @param {function} fn Validation function.
* @param {number} index Index of row containing value.
* @param {array} messages Array of messages.
* @param {string} fieldIdKey i18n bundle key of field-identifier message.
* @param {object} fieldIdVariables Variables to insert into field-identifier message.
*/
validateRule(value, fn, index, messages, fieldIdKey, fieldIdVariables) {
const result = fn(value)
if (result.success) {
return true
}
VXApp.validateCreateMessage(result, index, messages, fieldIdKey, fieldIdVariables, value)
return false
},
/**
* Lookup the code matching a supplied value.
*
* @param {object} definition Import schema definition.
* @param {string} value Value to use to lookup code.
* @param {array} codeArray Standard code array bearing codes and localizations.
* @param {number} index Index of row containing value.
* @param {array} messages Array of messages.
* @param {string} fieldIdKey i18n bundle key of field-identifier message.
* @param {object} fieldIdVariables Variables to insert into field-identifier message.
*/
lookupCode(definition, value, codeArray, index, messages, fieldIdKey, fieldIdVariables) {
for (const codeElement of codeArray) {
if (codeElement.code.toUpperCase() === value.toUpperCase()) {
return codeElement.code
}
if (codeElement.localized.toUpperCase() === value.toUpperCase()) {
return codeElement.code
}
if (definition.partial) {
if (codeElement.code.toUpperCase().includes(value.toUpperCase())) {
return codeElement.code
}
if (codeElement.localized.toUpperCase().includes(value.toUpperCase())) {
return codeElement.code
}
}
}
const result = { success : false, icon : "TRIANGLE", key : "common.invalid_code_value" }
VXApp.validateCreateMessage(result, index, messages, fieldIdKey, fieldIdVariables, value)
return null
},
/**
* Validate the row command.
*
* @param {string} value Value to test.
* @param {number} index Index of row containing command.
* @param {array} messages Array of messages.
* @param {string} fieldIdKey i18n bundle key of field-identifier message.
* @param {object} fieldIdVariables Variables to insert into field-identifier message.
*/
validateCommand(command, index, messages, fieldIdKey, fieldIdVariables) {
if (["create", "update", "retire"].includes(command)) {
return true
}
const result = { success : false, icon : "TRIANGLE", key : "common.invalid_command" }
VXApp.validateCreateMessage(result, index, messages, fieldIdKey, fieldIdVariables, command)
return false
},
/**
* Add a validation message indicating that the record specified by the keyPath was not found
* in an update operation.
*
* @param {number} index Index of row containing command.
* @param {array} messages Array of messages.
* @param {string} fieldIdKey i18n bundle key of field-identifier message.
* @param {object} fieldIdVariables Variables to insert into field-identifier message.
* @param {string} value Key value
*/
validateRecordNotFound(index, messages, fieldIdKey, fieldIdVariables, value) {
const result = { success : false, icon : "TRIANGLE", key : "common.invalid_key" }
VXApp.validateCreateMessage(result, index, messages, fieldIdKey, fieldIdVariables, value)
return false
},
/**
* Generalized validation for import, conditionally mutates the array of messages to add a message.
*
* @param {object} result Result returned from validation function.
* @param {number} index Index of row containing value.
* @param {array} messages Array of messages.
* @param {string} fieldIdKey i18n bundle key of field-identifier message.
* @param {object} fieldIdVariables Variables to insert into field-identifier message.
* @param {string} value Value responsible for the validation error.
*/
validateCreateMessage(result, index, messages, fieldIdKey, fieldIdVariables, value) {
const message = {}
message.index = index
message.fieldIdKey = fieldIdKey
message.fieldIdVariables = fieldIdVariables
message.result = result
message.value = value
messages.push(message)
},
/**
* Return the name field of a recently-cloned record.
*
* @param {string} originalName Original name of cloned record.
*/
cloneName(originalName) {
return Util.i18n("common.template_clone_name", { originalName })
}
}}
|
#!/bin/bash
cp -R -u -p .env.development .env
sleep 1
composer install
chown -R www-data: /var/www/
chown -R 1000: /var/www/.docker/dbdata
chmod 777 -R /var/www/
php artisan migrate
sleep 1
php artisan cache:clear
#php artisan key:generate
sleep 1
php artisan config:cache
php-fpm
|
#!/usr/bin/env sh
set -ex
# Set up the exact set of dependant packages
case "$(uname -s)" in
CYGWIN*)
### From ocaml-ci-scripts
# default setttings
SWITCH="${OPAM_COMP}"
OPAM_URL='https://dl.dropboxusercontent.com/s/b2q2vjau7if1c1b/opam64.tar.xz'
OPAM_ARCH=opam64
if [ "$PROCESSOR_ARCHITECTURE" != "AMD64" ] && \
[ "$PROCESSOR_ARCHITEW6432" != "AMD64" ]; then
OPAM_URL='https://dl.dropboxusercontent.com/s/eo4igttab8ipyle/opam32.tar.xz'
OPAM_ARCH=opam32
fi
curl -fsSL -o "${OPAM_ARCH}.tar.xz" "${OPAM_URL}"
tar -xf "${OPAM_ARCH}.tar.xz"
"${OPAM_ARCH}/install.sh"
PATH="/usr/x86_64-w64-mingw32/sys-root/mingw/bin:${PATH}"
export PATH
### Custom
export REPO_ROOT=$(git rev-parse --show-toplevel)
export OPAM_REPO=$(cygpath.exe -w "${REPO_ROOT}/repo/win32")
export OPAMROOT=$(cygpath.exe -w "${REPO_ROOT}/_build/opam")
;;
esac
REPO_ROOT=$(git rev-parse --show-toplevel)
if [ -z "${OPAMROOT}" ]; then
OPAMROOT=${REPO_ROOT}/_build/opam
fi
export OPAMROOT
export OPAMYES=1
export OPAMCOLORS=1
# if a compiler is specified, use it; otherwise use the system compiler
if [ -n "${OPAM_COMP}" ]; then
OPAM_COMP_ARG="--comp=${OPAM_COMP}"
OPAM_SWITCH_ARG="--switch=${OPAM_COMP}"
fi
opam init -v -n "${OPAM_COMP_ARG}" "${OPAM_SWITCH_ARG}" local "${OPAM_REPO}"
echo opam configuration is:
opam config env
eval $(opam config env)
export PATH="${OPAMROOT}/${OPAM_COMP}/bin:${PATH}"
opam install depext -y -v
opam install depext-cygwinports -y || true
OPAMBUILDTEST=1 opam depext -u vpnkit
# Debug a failure to find stringext's archive
OPAMVERBOSE=1 opam install stringext -y
# Don't run all the unit tests of all upstream packages in the universe
# for speed. As a special exception we will run the tests for tcpip
OPAMVERBOSE=1 opam install --deps-only tcpip -y
OPAMVERBOSE=1 opam install tcpip -t
opam install $(ls -1 ${OPAM_REPO}/packages/upstream) -y
OPAMVERBOSE=1 opam install alcotest charrua-client-mirage -y
OPAMVERBOSE=1 opam install --deps-only vpnkit -y
|
# frozen_string_literal: true
RSpec.describe Malfunction::Malfunction::Context, type: :concern do
include_context "with an example malfunction"
it { is_expected.to delegate_method(:contextualized?).to(:class) }
it { is_expected.to delegate_method(:allow_nil_context?).to(:class) }
describe ".contextualized?" do
subject { example_malfunction_class }
context "with contextualize" do
before { example_malfunction_class.__send__(:contextualize, :contextualized_as) }
it { is_expected.to be_contextualized }
end
context "without contextualize" do
it { is_expected.not_to be_contextualized }
end
end
describe ".allow_nil_context?" do
subject { example_malfunction_class }
context "with allow_nil in contextualize" do
before { example_malfunction_class.__send__(:contextualize, :contextualized_as, allow_nil: true) }
it { is_expected.to be_allow_nil_context }
end
context "without allow_nil in contextualize" do
it { is_expected.not_to be_allow_nil_context }
end
end
describe ".contextualize" do
subject(:contextualize) { example_malfunction_class.__send__(:contextualize, contextualized_as, allow_nil: true) }
let(:contextualized_as) { Faker::Internet.domain_word.to_sym }
it "contextualizes properly" do
expect { contextualize }.
to change { example_malfunction_class.contextualized_as }.to(contextualized_as).
and change { example_malfunction_class.contextualized? }.to(true).
and change { example_malfunction_class.allow_nil_context? }.to(true).
and change { example_malfunction_class.method_defined?(contextualized_as) }.to(true)
end
it "is an alias to context" do
contextualize
expect(example_malfunction.public_send(contextualized_as)).to eq context
end
end
describe ".inherited" do
subject(:inherited_malfunction_class) { Class.new(child_malfunction_class) }
let(:child_malfunction_class) { Class.new(example_malfunction_class) }
let(:sibling_malfunction_class) { Class.new(example_malfunction_class) }
let(:not_inherited_malfunction_class) { Class.new(example_malfunction_class) }
let(:inherited_sibling_malfunction_class) { Class.new(sibling_malfunction_class) }
let(:contextualized_as) { Faker::Internet.domain_word.to_sym }
before do
child_malfunction_class.__send__(:contextualize, contextualized_as, allow_nil: true)
sibling_malfunction_class.__send__(:contextualize, contextualized_as, allow_nil: false)
end
it "inherits contextualization" do
expect(example_malfunction_class).not_to be_contextualized
expect(not_inherited_malfunction_class).not_to be_contextualized
expect(child_malfunction_class).to be_contextualized
expect(inherited_malfunction_class).to be_contextualized
expect(sibling_malfunction_class).to be_contextualized
expect(inherited_sibling_malfunction_class).to be_contextualized
end
it "inherits contextualized_as" do
expect(child_malfunction_class.contextualized_as).to eq contextualized_as
expect(inherited_malfunction_class.contextualized_as).to eq contextualized_as
expect(sibling_malfunction_class.contextualized_as).to eq contextualized_as
expect(inherited_sibling_malfunction_class.contextualized_as).to eq contextualized_as
end
it "inherits contextualization nil rules" do
expect(example_malfunction_class).not_to be_allow_nil_context
expect(not_inherited_malfunction_class).not_to be_allow_nil_context
expect(child_malfunction_class).to be_allow_nil_context
expect(inherited_malfunction_class).to be_allow_nil_context
expect(sibling_malfunction_class).not_to be_allow_nil_context
expect(inherited_sibling_malfunction_class).not_to be_allow_nil_context
end
end
end
|
#include <stm32f4xx.h>
#include "SysControl.h"
int main(void) {
#ifdef IWDG_ENABLE
// Halt independent watch dog while debugging.
DBGMCU->APB1FZ |= DBGMCU_APB1_FZ_DBG_IWDG_STOP;
#endif
SysControl ctrl;
ctrl.run(); // No return from here.
return 0;
}
|
#!/bin/bash
function is_int() { return $(test "$@" -eq "$@" > /dev/null 2>&1); }
ssh-add -D
git init
git config --global --unset user.name
git config --global --unset user.email
git config user.name "0187773933"
git config user.email "collincerbus@student.olympic.edu"
ssh-add -k /Users/morpheous/.ssh/githubWinStitch
LastCommit=$(git log -1 --pretty="%B" | xargs)
# https://stackoverflow.com/a/3626205
if $(is_int "${LastCommit}");
then
NextCommitNumber=$((LastCommit+1))
else
echo "Not an integer Resetting"
NextCommitNumber=1
fi
git add .
git commit -m "$NextCommitNumber"
git remote add origin git@github.com:0187773933/stsutility.git
gitresult=$(git push origin master | tail -1)
echo $gitresult
# currentversion=$(head -2 setup.py | tail -1 | awk -F "'" '{print $2}' | awk -F '.' '{print $3}')
# echo $currentversion
# currentversion=$((currentversion+1))
# echo $currentversion
# echo "import setuptools
# VERSION='0.0.$currentversion'
# setuptools.setup(
# name='stsutility',
# version=VERSION,
# author='636',
# author_email='win.stitch.23@gmail.com',
# description='636 Utility Package',
# url='https://github.com/0187773933/stsutility',
# packages=setuptools.find_packages(),
# classifiers=[
# 'Programming Language :: Python :: 3',
# 'License :: OSI Approved :: MIT License',
# 'Operating System :: OS Independent',
# ],
# python_requires='>=3.6',
# setup_requires=['numpy','pandas','Pint'],
# install_requires=[
# 'json',
# 'pathlib',
# 'time',
# 'Pint',
# 'math',
# 'decimal',
# 'operator',
# 'pandas',
# 'numpy',
# 'pprint',
# ],
# )
# " > setup.py
# rm -rf build
# rm -rf dist
# python3 setup.py sdist bdist_wheel
# #python3 -m twine upload --repository-url https://upload.pypi.org/legacy/ dist/*
# python3 -m twine upload dist/* |
<gh_stars>10-100
import mx from '../../multiplex';
import * as mocks from './_mocks';
import {
qmodule,
qtest
} from '../../qunit';
qmodule('linq-take-while');
var limit = 3;
var count = limit;
function simpleNumericPredicate(t) {
return t <= limit;
}
qtest('basic "take-while" test', function (assert) {
assert.equal(mx(mocks.array).takeWhile(simpleNumericPredicate).count(), count, 'Test takeWhile numbers in an array are less than 10');
assert.equal(mx(mocks.enumerable).takeWhile(simpleNumericPredicate).count(), count, 'Test takeWhile numbers in an enumerable are less than 10');
assert.equal(mx(mocks.array).takeWhile(function (t) {
return t < 10;
}).count(), mocks.array.length, 'Test takeWhile numbers in an array are less than 10');
assert.equal(mx(mocks.array).takeWhile(function (t) {
return t > 10;
}).count(), 0, 'Test takeWhile numbers in an array are greater than 10');
assert.equal(mx([]).takeWhile(simpleNumericPredicate).count(), 0, 'Test takeWhile over an empty iterable results false');
});
qtest('collections "take-while" method tests', function (assert) {
assert.equal(mocks.collection.takeWhile(simpleNumericPredicate).count(), count, 'Test takeWhile numbers in a Collection are less than 10');
assert.equal(mocks.list.takeWhile(simpleNumericPredicate).count(), count, 'Test takeWhile numbers in a List are less than 10');
assert.equal(mocks.readOnlyCollection.takeWhile(simpleNumericPredicate).count(), count, 'Test takeWhile numbers in a ReadOnlyCollection are less than 10');
assert.equal(mocks.linkedList.takeWhile(simpleNumericPredicate).count(), count, 'Test takeWhile numbers in a LinkedList are less than 10');
assert.equal(mocks.hashSet.takeWhile(simpleNumericPredicate).count(), count, 'Test takeWhile numbers in a HashSet are less than 10');
assert.equal(mocks.stack.takeWhile(simpleNumericPredicate).count(), count, 'Test takeWhile numbers in a Stack are less than 10');
assert.equal(mocks.queue.takeWhile(simpleNumericPredicate).count(), count, 'Test takeWhile numbers in a Queue are less than 10');
assert.equal(mocks.set.takeWhile(simpleNumericPredicate).count(), count, 'Test takeWhile numbers in a Set are less than 10');
assert.equal(mocks.map.takeWhile(function (t) {
return t[0] <= limit;
}).count(), count, 'Test takeWhile numbers in a Map are less than 10');
assert.equal(mocks.dictionary.takeWhile(function (t) {
return t.key <= limit;
}).count(), count, 'Test takeWhile numbers in a Dictionary are less than 10');
assert.equal(mocks.lookup.takeWhile(function (t) {
return t.key <= limit;
}).count(), count, 'Test takeWhile numbers in a Lookup are less than 10');
assert.equal(mocks.sortedList.takeWhile(function (t) {
return t.key <= limit;
}).count(), count, 'Test takeWhile numbers in a SortedList are less than 10');
});
qtest('"take-while" method validations', function (assert) {
assert.throws(function () {
mx([1]).takeWhile();
}, 'without predicate');
assert.throws(function () {
mx([1]).takeWhile(1);
}, 'non-function predicate');
}); |
python3 ../python/train_v2.py \
-b 1 \
-n base_embeddings_only_standard_vocab \
-ms base \
-e 4 \
-s 512 \
-jc '../../assin/T5_configs_json/ptt5-standard-vocab-base-config.json' \
--train_embedding_only
|
<reponame>mikhail-timofeev-epam/ITNight2019
import { ACTION_SIMPLE_SIGN_IN } from "./SignInActionTypes";
import { NavigationActions } from "react-navigation";
import apiAction from "./ApiActions";
import { AUTH_TYPES } from "../constants/index";
export const simpleSignIn = authData => dispatch => {
const payload = {
email: authData.email,
phone: authData.phone,
source: AUTH_TYPES.EMAIL,
};
const metaInfo = {
name: "",
email: authData.email,
typeAuthorization: "",
phone: authData.phone,
idVK: "",
};
dispatch(apiAction.registerUser(payload, metaInfo));
};
|
// Copyright 2020 The Kubernetes Authors.
// SPDX-License-Identifier: Apache-2.0
package filter
import (
"fmt"
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
"sigs.k8s.io/cli-utils/pkg/common"
)
// PreventRemoveFilter implements ValidationFilter interface to determine
// if an object should not be pruned (deleted) because of a
// "prevent remove" annotation.
type PreventRemoveFilter struct{}
// Name returns the preferred name for the filter. Usually
// used for logging.
func (prf PreventRemoveFilter) Name() string {
return "PreventRemoveFilter"
}
// Filter returns true if the passed object should NOT be pruned (deleted)
// because the "prevent remove" annotation is present; otherwise returns
// false. Never returns an error.
func (prf PreventRemoveFilter) Filter(obj *unstructured.Unstructured) (bool, string, error) {
for annotation, value := range obj.GetAnnotations() {
if common.NoDeletion(annotation, value) {
reason := fmt.Sprintf("annotation prevents deletion (annotation: %q, value: %q)", annotation, value)
return true, reason, nil
}
}
return false, "", nil
}
|
<gh_stars>0
const FileSystem = require('fs')
const __ = require('./const')
const Receiver = require('./receiver')
const Try = require('./fn.try.catch')
const Return = require('./fn.try.return')
const WebAccount = require('./web.account')
const Web = require('./fn.web')
const WebUI = require('./web.ui')
const WebRequest = Web.WebRequest
const WebResponse = Web.WebResponse
const { json } = require('./fn.json')
const Acc = require('./data/acc')
const Post = require('./data/post')
const PostPointer = require('./data/post.pointer')
const utf8Encoding = {encoding: 'utf-8'}
/** @type {string[]} Post template */
const wPost = Return(() => WebUI.extract(
FileSystem.readFileSync(`${WebUI.wDir}html/post.html`, utf8Encoding),
[
'avatar',
'time',
'name',
'text',
'content',
'link-like',
'like',
'link-mention',
'mention',
]
))
/** @type {string} Post submission template */
const wPostSubmit = FileSystem.readFileSync(`${WebUI.wDir}html/post-submit.html`, utf8Encoding)
/** Posting handler web UI interface */
const WebPost = class {
/** @type {number} Current position of timeline post */
currentTimeline = 0
/** @type {WebAccount) */
webAccount
/** @type {Receiver} */
receiver
/**
* Post template
* @param param0
* @returns {string}
*/
templatePost ({
avatar,
time,
name,
text,
content,
linkLike,
like,
linkMention,
mention,
}) {
wPost[1] = typeof avatar === 'string' ? avatar : '/web/img/avatar5.png'
wPost[3] = typeof time === 'number' ? new Date(time).toUTCString() : '',
wPost[5] = typeof name === 'string' ? name.replace(/</g, "<").replace(/>/g, ">") : ''
wPost[7] = typeof text === 'string' ? text.replace(/</g, "<").replace(/>/g, ">") : ''
wPost[9] = typeof content === 'string' ? content : ''
wPost[11] = typeof linkLike === 'string' ? linkLike : '#'
wPost[13] = typeof like === 'number' ? `${like}` : '0'
wPost[15] = typeof linkMention === 'string' ? linkMention : '#'
wPost[17] = typeof mention === 'number' ? `${mention}` : '0'
if (wPost[7].length < 16)
wPost[7] = `<h1>${wPost[7]}</h1>`
return wPost.join('')
}
/**
* Post submission template
* @returns {string}
*/
templatePostSubmit () {
return wPostSubmit
}
/**
* Render specified post, in full page
* @param {WebRequest} req
* @param {WebResponse} res
*/
async post (req, res) {
res.send(
WebUI.body({
avatar: this.webAccount.avatar,
body: await this.renderPost(req.params.pub, req.params.number, true),
title: 'Posts - '
})
)
}
/**
* Render timeline post
* @param {WebResponse} res
* @returns {Promise<string>}
*/
async timeline (res) {
if (this.webAccount.active === undefined)
return res.send(WebUI.login())
let pointerLocation = `timeline.${this.currentTimeline}`
let storage = this.receiver.storage.promise
if (this.currentTimeline >= 0)
this.currentTimeline--
else
return res.send('')
if (!await storage.access(pointerLocation))
return res.send('')
let postPointer = new PostPointer(await storage.read(pointerLocation))
res.send(await this.renderPost(postPointer.owner, postPointer.pos))
}
/**
* Render specified post
* @param {string} pub Public key
* @param {number} num Post number
* @param {boolean} showMention Show mentions?
* @returns {Promise<string>}
*/
async renderPost (pub, num, showMention) {
let postLocation = `${pub}.${num}`
let likeCountLocation = `${postLocation}.likes`
let mentionCountLocation = `${postLocation}.mentions`
let storage = this.receiver.storage.promise
if (!await storage.access(postLocation))
return WebUI.header('-')
if (!await storage.access(likeCountLocation))
await storage.write(likeCountLocation, 0)
if (!await storage.access(mentionCountLocation))
await storage.write(mentionCountLocation, 0)
/** @type {number} */
let likeCount = await storage.read(likeCountLocation)
/** @type {number} */
let mentionCount = await storage.read(mentionCountLocation)
let owner = new Acc(await storage.read(pub))
let post = new Post(await storage.read(postLocation))
let mentions = ''
if (showMention) {
mentions = WebUI.header('Mentions', 3) + '<hr>'
if (mentionCount > 0) {
for (let m = 0; m < mentionCount; m++) {
let mentionPointerLocation = `${postLocation}.mention.${m}`
if (!await storage.access(mentionPointerLocation))
continue
let mentionPointer = new PostPointer(await storage.read(mentionPointerLocation))
mentions += await this.renderPost(mentionPointer.owner, mentionPointer.pos)
}
}
}
return this.templatePost({
avatar: `/data/png/${pub}.avatar`,
name: owner.name,
time: post.time,
text: post.text,
// TODO: implement content (media, mention, tag)
linkLike: `/like/${pub}/${num}`,
like: likeCount,
linkMention:`/mention/${pub}/${num}`,
mention: mentionCount
})
+ mentions
}
/**
* Create a post
* @param {WebRequest} req
* @param {WebResponse} res
*/
async postSubmit (req, res) {
if (typeof this.webAccount.active === 'undefined')
return res.send('No active accounts present')
if (Try(() => req.body = json(req.body)))
return res.send('Malformed JSON string')
if (typeof req.body.text !== 'string')
return res.send('Some of parameters have invalid data type')
if (req.body.text.length > 144)
return res.send('Text length cannot exceeds 144 characters')
// Create a new post
let ownerPub = this.webAccount.active.key.public
let post = new Post()
let postCountLocation = `${ownerPub}.posts`
let storage = this.receiver.storage.promise
if (!await storage.access(postCountLocation))
await storage.write(postCountLocation, 0)
/** @type {number} */
let postCount = await storage.read(postCountLocation)
post.owner = ownerPub
post.number = postCount
post.text = req.body.text
post.sign(this.webAccount.active.key.private, this.webAccount.active.key.password)
//Add to timeline
/** @type {number} */
let timelineCount = await storage.read('posts')
let timeline = new PostPointer()
timeline.owner = ownerPub
timeline.pos = postCount
//Save to disk
await storage.write(`${ownerPub}.${postCount}`, post.export())
await storage.write(`timeline.${timelineCount}`, timeline.export())
let currentPostCount = postCount
postCount++
timelineCount++
await storage.write(postCountLocation, postCount)
await storage.write('posts', timelineCount)
//Broadcast
await this.receiver.broadcast(ownerPub, __.BROADCAST_AMOUNT, [
'post',
ownerPub,
currentPostCount,
post.media,
post.mediaType,
post.mention,
post.tag,
post.text,
post.time,
post.signature
])
res.send('Post submission is successful!')
}
}
module.exports = WebPost |
#!/bin/sh
set -e -x
/usr/lib/postgresql/10/bin/initdb ${PGDATA}
echo "archive_mode = on" >> /var/lib/postgresql/10/main/postgresql.conf
echo "archive_command = '\
AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE \
AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY \
AWS_ENDPOINT=http://s3:9000 \
AWS_S3_FORCE_PATH_STYLE=true \
WALG_COMPRESSION_METHOD=brotli \
WALG_DELTA_MAX_STEPS=6 \
WALG_UPLOAD_CONCURRENCY=10 \
WALG_DISK_RATE_LIMIT=41943040 \
WALG_NETWORK_RATE_LIMIT=10485760 \
PGSSLMODE=allow \
PGDATABASE=postgres \
PGHOST=/var/run/postgresql \
WALE_FILE_PREFIX=file://localhost/tmp \
/usr/bin/timeout 600 wal-e wal-push %p'" >> /var/lib/postgresql/10/main/postgresql.conf
echo "archive_timeout = 600" >> /var/lib/postgresql/10/main/postgresql.conf
/usr/lib/postgresql/10/bin/pg_ctl -D ${PGDATA} -w start
/tmp/scripts/wait_while_pg_not_ready.sh
mkdir /tmp/spaces
mkdir /tmp/spaces/space
mkdir /tmp/spaces/space2
psql -c "create tablespace space location '/tmp/spaces/space';"
psql -c "create table cinemas (id integer, name text, location text) tablespace space;"
psql -c "insert into cinemas (id, name, location) values (1, 'Inseption', 'USA');"
psql -c "insert into cinemas (id, name, location) values (2, 'Taxi', 'France');"
psql -c "insert into cinemas (id, name, location) values (3, 'Spirited Away', 'Japan');"
psql -c "create tablespace space2 location '/tmp/spaces/space2';"
psql -c "create table series (id integer, name text) tablespace space2;"
psql -c "insert into series (id, name) values (1, 'Game of Thrones');"
psql -c "insert into series (id, name) values (2, 'Black mirror');"
psql -c "insert into series (id, name) values (3, 'Sherlock');"
psql -c "create table users (id integer, name text, password text);"
psql -c "insert into users (id, name, password) values(1, 'ismirn0ff', 'password');"
psql -c "insert into users (id, name, password) values(2, 'tinsane', 'qwerty');"
psql -c "insert into users (id, name, password) values(3, 'godjan', 'g0djan');"
psql -c "insert into users (id, name, password) values(4, 'x4m', 'borodin');"
pg_dumpall -f /tmp/dump1
sleep 1
AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE \
AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY \
AWS_ENDPOINT=http://s3:9000 \
AWS_S3_FORCE_PATH_STYLE=true \
WALG_COMPRESSION_METHOD=brotli \
WALG_DELTA_MAX_STEPS=6 \
WALG_UPLOAD_CONCURRENCY=10 \
WALG_DISK_RATE_LIMIT=41943040 \
WALG_NETWORK_RATE_LIMIT=10485760 \
PGSSLMODE=allow \
PGDATABASE=postgres \
PGHOST=/var/run/postgresql \
WALE_FILE_PREFIX=file://localhost/tmp \
wal-e backup-push ${PGDATA}
pkill -9 postgres
cd /tmp/basebackups_005
# Find json from wal-e backup and copy part of it with tablespace specification
cat `ls | grep .json` | jq .spec > /tmp/restore_spec.json
mkdir /tmp/conf_files
cp -t /tmp/conf_files/ ${PGDATA}/postgresql.conf ${PGDATA}/pg_hba.conf ${PGDATA}/pg_ident.conf
cp -r /tmp/spaces /tmp/spaces_backup
rm -rf /tmp/spaces/*
rm -rf ${PGDATA}
AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE \
AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY \
AWS_ENDPOINT=http://s3:9000 \
AWS_S3_FORCE_PATH_STYLE=true \
WALG_COMPRESSION_METHOD=brotli \
WALG_DELTA_MAX_STEPS=6 \
WALG_UPLOAD_CONCURRENCY=10 \
WALG_DISK_RATE_LIMIT=41943040 \
WALG_NETWORK_RATE_LIMIT=10485760 \
PGSSLMODE=allow \
PGDATABASE=postgres \
PGHOST=/var/run/postgresql \
WALE_FILE_PREFIX=file://localhost/tmp \
wal-g backup-fetch --restore-spec /tmp/restore_spec.json ${PGDATA} LATEST
echo "restore_command = 'echo \"WAL file restoration: %f, %p\"&& \
AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE \
AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY \
AWS_ENDPOINT=http://s3:9000 \
AWS_S3_FORCE_PATH_STYLE=true \
WALG_COMPRESSION_METHOD=brotli \
WALG_DELTA_MAX_STEPS=6 \
WALG_UPLOAD_CONCURRENCY=10 \
WALG_DISK_RATE_LIMIT=41943040 \
WALG_NETWORK_RATE_LIMIT=10485760 \
PGSSLMODE=allow \
PGDATABASE=postgres \
PGHOST=/var/run/postgresql \
WALE_FILE_PREFIX=file://localhost/tmp \
/usr/bin/wal-g wal-fetch \"%f\" \"%p\"'" > ${PGDATA}/recovery.conf
cp -t ${PGDATA} /tmp/conf_files/postgresql.conf /tmp/conf_files/pg_hba.conf /tmp/conf_files/pg_ident.conf
/usr/lib/postgresql/10/bin/pg_ctl -D ${PGDATA} -w start
/tmp/scripts/wait_while_pg_not_ready.sh
pg_dumpall -f /tmp/dump2
diff /tmp/dump1 /tmp/dump2
diff -r /tmp/spaces_backup /tmp/spaces
../scripts/drop_pg.sh
rm -rf /tmp/conf_files
echo "Tablespaces work!!!"
|
# -*- coding: utf-8 -*-
import torch.nn as nn
import torch.nn.functional as f
class MnistCNN(nn.Module):
""" CNN Network architecture. """
def __init__(self):
super(MnistCNN, self).__init__()
self.conv1 = nn.Conv2d(1, 10, kernel_size=5)
self.conv2 = nn.Conv2d(10, 20, kernel_size=5)
self.conv2_drop = nn.Dropout2d()
self.fc1 = nn.Linear(320, 50)
self.fc2 = nn.Linear(50, 10)
def forward(self, x):
x = f.relu(f.max_pool2d(self.conv1(x), 2))
x = f.relu(f.max_pool2d(self.conv2_drop(self.conv2(x)), 2))
x = x.view(-1, 320)
x = f.relu(self.fc1(x))
x = f.dropout(x, training=self.training)
x = self.fc2(x)
return f.log_softmax(x, dim=1)
# Linear Regression Model
class LinearRegression(nn.Module):
def __init__(self):
super(LinearRegression, self).__init__()
self.linear = nn.Linear(1, 1) # input and output is 1 dimension
def forward(self, x):
return self.linear(x)
class LeNetForMNIST(nn.Module):
def __init__(self):
super(LeNetForMNIST, self).__init__()
self.conv1 = nn.Conv2d(1, 6, 5)
self.conv2 = nn.Conv2d(6, 16, 5)
self.fc1 = nn.Linear(16 * 4 * 4, 120)
self.fc2 = nn.Linear(120, 84)
self.fc3 = nn.Linear(84, 10)
def forward(self, x):
x = f.max_pool2d(f.relu(self.conv1(x)), (2, 2))
x = f.max_pool2d(f.relu(self.conv2(x)), 2)
x = x.view(x.size()[0], -1)
x = f.relu(self.fc1(x))
x = f.relu(self.fc2(x))
x = self.fc3(x)
return x
class AlexNetForCIFAR(nn.Module):
def __init__(self, num_classes=10):
super(AlexNetForCIFAR, self).__init__()
self.features = nn.Sequential(
nn.Conv2d(3, 64, kernel_size=11, stride=4, padding=5),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2, stride=2),
nn.Conv2d(64, 192, kernel_size=5, padding=2),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2, stride=2),
nn.Conv2d(192, 384, kernel_size=3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(384, 256, kernel_size=3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(256, 256, kernel_size=3, padding=1),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2, stride=2),
)
self.classifier = nn.Linear(256, num_classes)
def forward(self, x):
x = self.features(x)
x = x.view(x.size(0), -1)
x = self.classifier(x)
return f.log_softmax(x, dim=1)
class RNNModel(nn.Module):
"""Container module with an encoder, a recurrent module, and a decoder."""
def __init__(self, rnn_type, ntoken, ninp, nhid, nlayers, dropout=0.5, tie_weights=False):
super(RNNModel, self).__init__()
self.drop = nn.Dropout(dropout)
self.encoder = nn.Embedding(ntoken, ninp)
if rnn_type in ['LSTM', 'GRU']:
self.rnn = getattr(nn, rnn_type)(ninp, nhid, nlayers, dropout=dropout)
else:
try:
nonlinearity = {'RNN_TANH': 'tanh', 'RNN_RELU': 'relu'}[rnn_type]
except KeyError:
raise ValueError("""An invalid option for `--model` was supplied,
options are ['LSTM', 'GRU', 'RNN_TANH' or 'RNN_RELU']""")
self.rnn = nn.RNN(ninp, nhid, nlayers, nonlinearity=nonlinearity, dropout=dropout)
self.decoder = nn.Linear(nhid, ntoken)
# Optionally tie weights as in:
# "Using the Output Embedding to Improve Language Models" (Press & Wolf 2016)
# https://arxiv.org/abs/1608.05859
# and
# "Tying Word Vectors and Word Classifiers: A Loss Framework for Language Modeling" (Inan et al. 2016)
# https://arxiv.org/abs/1611.01462
if tie_weights:
if nhid != ninp:
raise ValueError('When using the tied flag, nhid must be equal to emsize')
self.decoder.weight = self.encoder.weight
self.init_weights()
self.rnn_type = rnn_type
self.nhid = nhid
self.nlayers = nlayers
def init_weights(self):
initrange = 0.1
self.encoder.weight.data.uniform_(-initrange, initrange)
self.decoder.bias.data.zero_()
self.decoder.weight.data.uniform_(-initrange, initrange)
# noinspection PyShadowingBuiltins
def forward(self, input, hidden):
emb = self.drop(self.encoder(input))
output, hidden = self.rnn(emb, hidden)
output = self.drop(output)
decoded = self.decoder(output.view(output.size(0) * output.size(1), output.size(2)))
return decoded.view(output.size(0), output.size(1), decoded.size(1)), hidden
def init_hidden(self, bsz):
weight = next(self.parameters())
if self.rnn_type == 'LSTM':
return (weight.new_zeros(self.nlayers, bsz, self.nhid),
weight.new_zeros(self.nlayers, bsz, self.nhid))
else:
return weight.new_zeros(self.nlayers, bsz, self.nhid)
|
#!/bin/bash
set -e
if [[ "$OSTYPE" == "darwin"* ]]; then
realpath() { [[ $1 = /* ]] && echo "$1" || echo "$PWD/${1#./}"; }
ROOT=$(dirname $(dirname $(realpath "$0")))
else
ROOT=$(dirname $(dirname $(readlink -f $0)))
fi
# Unit Tests
if [[ "$OSTYPE" == "darwin"* ]]; then
cd $ROOT ; ulimit -n 4096 ; ATOM_SHELL_INTERNAL_RUN_AS_NODE=1 \
./.build/electron/Electron.app/Contents/MacOS/Electron \
node_modules/mocha/bin/_mocha $*
else
cd $ROOT ; ATOM_SHELL_INTERNAL_RUN_AS_NODE=1 \
./.build/electron/electron \
node_modules/mocha/bin/_mocha $*
fi
# Integration Tests
if [[ "$SKIP_INTEGRATION_TESTS" == "" ]]; then
./scripts/code.sh $ROOT/extensions/vscode-api-tests/testWorkspace --extensionDevelopmentPath=$ROOT/extensions/vscode-api-tests --extensionTestsPath=$ROOT/extensions/vscode-api-tests/out
fi |
<filename>packages/arbiter-x-bitfinex/src/model/index.js
export * from './Balance';
export * from './Order';
export * from './Ticker';
|
#!/bin/bash
function backoff {
local max_attempts=${ATTEMPTS-5}
local timeout=${TIMEOUT-1}
local attempt=0
local exitCode=0
while (( $attempt < $max_attempts ))
do
set +e
"$@"
exitCode=$?
set -e
if [[ $exitCode == 0 ]]
then
break
fi
echo "Failure! Retrying in $timeout.." 1>&2
sleep $timeout
attempt=$(( attempt + 1 ))
timeout=$(( timeout * 2 ))
done
if [[ $exitCode != 0 ]]
then
echo "You've failed me for the last time! ($@)" 1>&2
fi
return $exitCode
}
|
var group__DAP__Config__Initialization__gr =
[
[ "DAP_SETUP", "group__DAP__Config__Initialization__gr.html#ga6d4656af10c47ec0d6c0389be2dbb4bb", null ],
[ "RESET_TARGET", "group__DAP__Config__Initialization__gr.html#gaa0feceaeee69d69927d8a9beb0272dd7", null ]
]; |
#!/bin/bash
echo "~~~> OS: $OS"
LINUX="L"
OSX="O"
WINDOWS="W"
echo "~~~> Will build the following executables:"
if [[ "$OS" == *"$OSX"* ]]; then
echo " OSX"
fi
if [[ "$OS" == *"$WINDOWS"* ]]; then
echo " WINDOWS"
fi
if [[ "$OS" == *"$LINUX"* ]]; then
echo " LINUX"
fi
#--- fail if any command returns a non-zero result
set -e
#--- this corrects and issue with go mod tidy
export GOSUMDB=off
#--- set module mode
export G0111MODULE=onS
export GOBIN=/go/bin
export PATH=$GOPATH/bin:$PATH
export WORKING_DIR="/cloud-suitability-analyzer"
export OUTPUT_DIR="$WORKING_DIR/go/exe"
pushd ${WORKING_DIR}/go > /dev/null
#--- build the requested platform executables
#--- version needs to be automated, using command line for now.
export LD_FLAGS="-X \"main.Version=$VERSION\""
echo "OS: $OS"
echo "OSX: $OSX"
if [[ "$OS" == *"$OSX"* ]]; then
echo "~~~> Building osx version"
env CGO_ENABLED=1 CC=o64-clang GOOS=darwin GOARCH=amd64 go build -ldflags "${LD_FLAGS}" -o ${OUTPUT_DIR}/csa csa.go >&2
chmod +x ${OUTPUT_DIR}/csa
fi
if [[ "$OS" == *"$WINDOWS"* ]]; then
echo "~~~> Building windows version"
env CGO_ENABLED=1 CC=x86_64-w64-mingw32-gcc GOOS=windows GOARCH=amd64 go build -ldflags "${LD_FLAGS}" -o ${OUTPUT_DIR}/csa.exe csa.go >&2
chmod +x ${OUTPUT_DIR}/csa.exe
fi
if [[ "$OS" == *"$LINUX"* ]]; then
echo "~~~> Building linux version"
env CGO_ENABLED=1 CC=musl-gcc go build -ldflags "-linkmode external -extldflags \"-static\" ${LD_FLAGS}" -o ${OUTPUT_DIR}/csa-l csa.go >&2
chmod +x ${OUTPUT_DIR}/csa-l
fi |
def prime_factors(n):
factors = []
i = 2
while (i * i <= n):
if (n % i == 0):
factors.append(i)
n //= i
else:
i += 1
if (n > 1):
factors.append(n)
return factors |
def word_frequency(text):
# Remove punctuation and convert text to lowercase
text = text.lower().replace('.', '').replace(',', '').replace('!', '').replace('?', '')
words = text.split()
word_count = {}
for word in words:
if word in word_count:
word_count[word] += 1
else:
word_count[word] = 1
# Sort the word frequencies in descending order
sorted_word_count = sorted(word_count.items(), key=lambda x: x[1], reverse=True)
# Print the word frequencies
for word, count in sorted_word_count:
print(f"{word}: {count}")
# Example usage
input_text = "The cat and the dog played in the garden. The cat chased the dog."
word_frequency(input_text) |
<filename>www/actors/water.js
/**
* @fileoverview Provide the Water class.
* @author <EMAIL> (<NAME>)
*/
/**
* Constructor for the Water class.
* @constructor
* @extends {ace.BaseClass}
*/
ace.Water = function(game, room) {
ace.base(this, game, room);
this.name = 'Water';
};
ace.inherits(ace.Water, ace.Obstacle);
/**
* What to do every frame.
* @param {ace.Runner} The game Runner.
*/
ace.Water.prototype.onTick = function(game) {
// TODO(scott): Wait for ladder usage.
};
|
'use strict';
const chalk = require('chalk');
const helpers = require('../helpers');
describe(`logging`, () => {
it(`should change default schema`, async function () {
const logger = jest.fn();
const differ = helpers.getDiffer({ logging: logger });
await differ.sync();
expect(logger).toHaveBeenCalled();
expect(logger.mock.calls).toEqual([
[`Postgres Differ > ${chalk.green('Sync started')}`],
[`Postgres Differ > Database does not need updating`],
[`Postgres Differ > ${chalk.green('Sync successful')}`],
]);
});
});
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.badrobots.y2012.technetium.commands.autonomousCommands;
import com.badrobots.y2012.technetium.commands.CommandBase;
/*
* @author 1014 Programming Team
*/
public class AutoOrient extends CommandBase
{
double count = 0;
double wantedAngle, currentAngle;
public AutoOrient(double a)
{
requires(sensors);
requires(driveTrain);
wantedAngle = a;
}
// Called just before this Command runs the first time
protected void initialize()
{
currentAngle = sensors.getGyroAngle();
wantedAngle += currentAngle;
}
// Called repeatedly when this Command is scheduled to run
protected void execute()
{
currentAngle = sensors.getGyroAngle();
//System.out.println("Angle " + currentAngle);
if(Math.abs(currentAngle - wantedAngle) < 3)
{
count++;
driveTrain.autoMechanumDrive(0, 0, 0);
}
else if(currentAngle > wantedAngle)
{
driveTrain.autoMechanumDrive(0, 0, -.3);
count = 0;
}
else if (currentAngle < wantedAngle)
{
driveTrain.autoMechanumDrive(0, 0, .3);
count = 0;
}
}
// Make this return true when this Command no longer needs to run execute()
protected boolean isFinished()
{
return count > 50;//TODO: THIS REALLY NEEDS TO BE CALIBRATED
}
// Called once after isFinished returns true
protected void end()
{
driveTrain.autoMechanumDrive(0, 0, 0);
}
// Called when another command which requires one or more of the same
// subsystems is scheduled to run
protected void interrupted()
{
driveTrain.autoMechanumDrive(0, 0, 0);
}
} |
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.closeCircled = void 0;
var closeCircled = {
"viewBox": "0 0 512 512",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "path",
"attribs": {
"d": "M256,33C132.3,33,32,133.3,32,257c0,123.7,100.3,224,224,224c123.7,0,224-100.3,224-224C480,133.3,379.7,33,256,33z\r\n\t\t M364.3,332.5c1.5,1.5,2.3,3.5,2.3,5.6c0,2.1-0.8,4.2-2.3,5.6l-21.6,21.7c-1.6,1.6-3.6,2.3-5.6,2.3c-2,0-4.1-0.8-5.6-2.3L256,289.8\r\n\t\tl-75.4,75.7c-1.5,1.6-3.6,2.3-5.6,2.3c-2,0-4.1-0.8-5.6-2.3l-21.6-21.7c-1.5-1.5-2.3-3.5-2.3-5.6c0-2.1,0.8-4.2,2.3-5.6l75.7-76\r\n\t\tl-75.9-75c-3.1-3.1-3.1-8.2,0-11.3l21.6-21.7c1.5-1.5,3.5-2.3,5.6-2.3c2.1,0,4.1,0.8,5.6,2.3l75.7,74.7l75.7-74.7\r\n\t\tc1.5-1.5,3.5-2.3,5.6-2.3c2.1,0,4.1,0.8,5.6,2.3l21.6,21.7c3.1,3.1,3.1,8.2,0,11.3l-75.9,75L364.3,332.5z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M256,33C132.3,33,32,133.3,32,257c0,123.7,100.3,224,224,224c123.7,0,224-100.3,224-224C480,133.3,379.7,33,256,33z\r\n\t\t M364.3,332.5c1.5,1.5,2.3,3.5,2.3,5.6c0,2.1-0.8,4.2-2.3,5.6l-21.6,21.7c-1.6,1.6-3.6,2.3-5.6,2.3c-2,0-4.1-0.8-5.6-2.3L256,289.8\r\n\t\tl-75.4,75.7c-1.5,1.6-3.6,2.3-5.6,2.3c-2,0-4.1-0.8-5.6-2.3l-21.6-21.7c-1.5-1.5-2.3-3.5-2.3-5.6c0-2.1,0.8-4.2,2.3-5.6l75.7-76\r\n\t\tl-75.9-75c-3.1-3.1-3.1-8.2,0-11.3l21.6-21.7c1.5-1.5,3.5-2.3,5.6-2.3c2.1,0,4.1,0.8,5.6,2.3l75.7,74.7l75.7-74.7\r\n\t\tc1.5-1.5,3.5-2.3,5.6-2.3c2.1,0,4.1,0.8,5.6,2.3l21.6,21.7c3.1,3.1,3.1,8.2,0,11.3l-75.9,75L364.3,332.5z"
},
"children": []
}]
}]
}]
};
exports.closeCircled = closeCircled; |
<filename>src/utils/pad.js
module.exports = function pad (content) {
return (
content
.trim()
.split(/\n\r?/)
.map(line => ' ' + line)
.join('\n') + '\n'
)
}
|
<reponame>wuximing/dsshop
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var tslib_1 = require("tslib");
var util_1 = require("@antv/util");
var formatter_1 = require("../../util/formatter");
var LabelParser = /** @class */ (function () {
function LabelParser(cfg) {
this.config = {};
var plot = cfg.plot, rest = tslib_1.__rest(cfg, ["plot"]);
this.plot = plot;
this.originConfig = rest;
this.init(cfg);
}
LabelParser.prototype.getConfig = function () {
return this.config;
};
LabelParser.prototype.init = function (cfg) {
var _this = this;
util_1.assign(this.config, cfg);
this.config.callback = function (val) {
return _this.parseCallBack(val);
};
};
LabelParser.prototype.parseCallBack = function (val) {
var labelProps = this.originConfig;
var theme = this.plot.getPlotTheme();
var config = tslib_1.__assign({}, labelProps);
this.parseOffset(labelProps, config);
if (labelProps.position) {
if (util_1.isFunction(labelProps.position)) {
config.position = labelProps.position(val);
}
else {
config.position = labelProps.position;
}
}
this.parseFormatter(config);
if (labelProps.style) {
if (util_1.isFunction(labelProps.style)) {
config.textStyle = labelProps.style(val);
}
else {
config.textStyle = labelProps.style;
}
}
config.textStyle = util_1.deepMix({}, util_1.get(theme, 'label.style'), config.textStyle);
if (labelProps.autoRotate) {
config.autoRotate = labelProps.autoRotate;
}
return config;
};
LabelParser.prototype.parseOffset = function (props, config) {
var mapper = ['offset', 'offsetX', 'offsetY'];
var count = 0;
util_1.each(mapper, function (m) {
if (util_1.has(props, m)) {
config[m] = props[m];
count++;
}
});
// 如用户没有设置offset,而label position又为middle时,则默认设置offset为0
if (count === 0 && util_1.get(props, 'position') === 'middle') {
config.offset = 0;
}
};
LabelParser.prototype.parseFormatter = function (config) {
var labelProps = this.originConfig;
config.content = function (data, index) {
// @ts-ignore
var text = data[labelProps.fields[0]];
return formatter_1.combineFormatter(formatter_1.getNoopFormatter(), formatter_1.getPrecisionFormatter(labelProps.precision), formatter_1.getSuffixFormatter(labelProps.suffix), labelProps.formatter
? labelProps.formatter
: formatter_1.getNoopFormatter())(text, data, index);
};
};
return LabelParser;
}());
exports.default = LabelParser;
//# sourceMappingURL=parser.js.map |
def removeduplicates(list_of_string):
result = []
for item in list_of_string:
if item not in result:
result.append(item)
result.sort()
return result
list_of_string = [['b', 'a', 'd'], ['c', 'a', 'd'], ['a', 'f', 'g']]
print(removeduplicates(list_of_string)) |
import { Matrix } from 'matrix-calculus';
import { T } from '../../layers';
import { Signals, SignalsErrors } from '../../signals';
declare type Data = {
output: Signals;
errors: SignalsErrors;
layersPair: T.LayersPair;
};
declare const calcDelta: ({ output, errors, layersPair }: Data) => Matrix;
export default calcDelta;
|
import React, {useState} from 'react';
import Box from '@mui/material/Box';
import Stack from '@mui/material/Stack';
import InputLabel from '@mui/material/InputLabel';
import MenuItem from '@mui/material/MenuItem';
import FormControl from '@mui/material/FormControl';
import Select, { SelectChangeEvent } from '@mui/material/Select';
import TextField from '@mui/material/TextField'
import IconButton from '@mui/material/IconButton';
import AddBoxIcon from '@mui/icons-material/AddBox';
import Tooltip from '@mui/material/Tooltip'
import { useAxios } from '../../hooks/useAxios';
import {Category} from '../../common/types';
import Snackbar from '@mui/material/Snackbar';
type TaskInputProps = {
categories: Category[];
updateTasks: () => void;
}
type ResponsePostTask = {
id:number
}
const TaskInput = (props:TaskInputProps) => {
const {
categories,
updateTasks
} = props;
const [category, setCategory] = React.useState<string>('');
const [task, setTask] = React.useState('');
const [openSnackbarSuccess, setOpenSnackbarSuccess] = useState(false)
const [openSnackbarError, setOpenSnackbarError] = useState(false)
const handleChange = (event: SelectChangeEvent) => {
setCategory(event.target.value as string);
};
const {
commit: commitTask,
loading: loadingTask,
response: taskId,
error: errorTask,
} = useAxios<ResponsePostTask>({
method: 'POST',
path: 'tarefas'
});
const notifyUser = () => {
if(errorTask) {
return setOpenSnackbarError(true)
}
updateTasks()
return setOpenSnackbarSuccess(true);
}
const saveTask = () => {
commitTask({
"descricao": task,
"id_categoria": category
}, notifyUser)
};
return (
<>
<Stack direction={'row'} spacing={2} alignItems="center" justifyContent='center'>
<Box sx={{ minWidth: 120 }}>
<FormControl fullWidth>
<InputLabel id="select-label">Categoria</InputLabel>
<Select
labelId="select-label"
id="select"
value={category}
label="Categoria"
onChange={handleChange}
>
{categories.map(cat => (
<MenuItem key={cat.id} value={cat.id}>{cat.descricao}</MenuItem>
))}
</Select>
</FormControl>
</Box>
<Box display='flex' flexGrow={1}>
<TextField
id="task-text"
label="O que você precisa fazer?"
fullWidth
value={task}
onChange={(event) => setTask(event.target.value)}
/>
</Box>
<Box>
<Tooltip title="Adicionar tarefa">
<span>
<IconButton aria-label="add" onClick={saveTask} disabled={category === '' || task ===''} >
<AddBoxIcon fontSize="large" color={!(category === '' || task ==='') ? 'primary' : undefined} />
</IconButton>
</span>
</Tooltip>
</Box>
</Stack>
<Snackbar
open={openSnackbarSuccess}
autoHideDuration={6000}
onClose={(
event: React.SyntheticEvent | React.MouseEvent,
reason?: string,
) => {
if (reason === 'clickaway') {
return;
}
setOpenSnackbarSuccess(false);
}}
message="Tarefa criada com sucesso!"
/>
<Snackbar
open={openSnackbarError}
autoHideDuration={6000}
onClose={(
event: React.SyntheticEvent | React.MouseEvent,
reason?: string,
) => {
if (reason === 'clickaway') {
return;
}
setOpenSnackbarError(false);
}}
message="Ocorreu algum erro"
/>
</>
);
}
export default TaskInput; |
<gh_stars>1-10
#ifndef SPRITE_H_INCLUDED
#define SPRITE_H_INCLUDED
#include "Shader.h"
class Mesh;
class Sprite {
private:
Shader::Uniform* modelMatrixUniform;
Shader::Uniform* scaleUniform;
Shader::Uniform* rotationMatrixUniform;
Shader::Uniform* colorUniform;
Mesh* mesh;
Vector3f position;
Vector2f scale;
float rotation;
Matrix4x4f modelMatrix;
// Stores the roll rotations of the sprite.
Matrix4x4f rotationMatrix;
Vector4f color;
public:
Sprite(Shader* shd);
~Sprite();
void setScale(float scale);
void setPosition(const Vector3f& pos);
void addRotation(float rad);
void setOpacity(float value);
void update();
void render() const;
};
#endif // SPRITE_H_INCLUDED
|
<gh_stars>0
package com.atjl.retry.api.option;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
@ApiModel(value = "只使用分页功能 的 选项")
public class PageOption {
public PageOption() {
}
@ApiModelProperty(value = "服务名(必填),如果使用registeBean方式注册,服务名必须为简写且首字母小写的类名,即bean的默认id")
private String serviceName;
@ApiModelProperty(value = "取数方式:自定义取数(需要实现RetryServiceCustomGetDatas接口,如果使用默认后置处理服务,结果对象需要设置 主键、上次执行时间、执行次数,后置服务会自动更新)")
private GetDataType getDataType = GetDataType.DEFAULT;
@ApiModelProperty(value = "后置处理类型:只用默认、只用自定义;先默认 再自定义;先自定义 再默认;无")
private RetryAfterType afterType = RetryAfterType.DEFAULT;
@ApiModelProperty(value = "如果重试数据量超过分页大小,则分页查询,如果retryService的executeService条件参数 实现了PageIntReq,并且传递了值,则用条件参数的值")
private int pageSize = 10;
@ApiModelProperty(value = "是否批量处理,需要实现 ExecuteBatchService")
private boolean batchProcess = false;
/**
* general pre service associate
*/
@ApiModelProperty(value = "open general pre service")
private boolean generalPreService = false;
@ApiModelProperty(value = "execute general pre service fail,still continue=true")
private boolean generalPreServiceFailContinue = true;
/**
* check duplicate run associate
*/
@ApiModelProperty(value = "check duplicate execute by process log",notes = "can't process concurrent situation")
private boolean checkRepeatExecute = true;
@ApiModelProperty(value = "检查重复执行的间隔,单位秒,默认60秒内不能重复执行")
private int checkRepeatExecuteInterval = 60;
/**
* ############## getter && setter ####################
*/
public boolean isCheckRepeatExecute() {
return checkRepeatExecute;
}
public void setCheckRepeatExecute(boolean checkRepeatExecute) {
this.checkRepeatExecute = checkRepeatExecute;
}
public int getCheckRepeatExecuteInterval() {
return checkRepeatExecuteInterval;
}
public void setCheckRepeatExecuteInterval(int checkRepeatExecuteInterval) {
this.checkRepeatExecuteInterval = checkRepeatExecuteInterval;
}
public boolean isGeneralPreService() {
return generalPreService;
}
public void setGeneralPreService(boolean generalPreService) {
this.generalPreService = generalPreService;
}
public String getServiceName() {
return serviceName;
}
public void setServiceName(String serviceName) {
this.serviceName = serviceName;
}
public GetDataType getGetDataType() {
return getDataType;
}
public void setGetDataType(GetDataType getDataType) {
this.getDataType = getDataType;
}
public RetryAfterType getAfterType() {
return afterType;
}
public void setAfterType(RetryAfterType afterType) {
this.afterType = afterType;
}
public int getPageSize() {
return pageSize;
}
public void setPageSize(int pageSize) {
this.pageSize = pageSize;
}
public boolean isBatchProcess() {
return batchProcess;
}
public void setBatchProcess(boolean batchProcess) {
this.batchProcess = batchProcess;
}
public boolean isGeneralPreServiceFailContinue() {
return generalPreServiceFailContinue;
}
public void setGeneralPreServiceFailContinue(boolean generalPreServiceFailContinue) {
this.generalPreServiceFailContinue = generalPreServiceFailContinue;
}
}
|
def removeVowels(str):
vowels = ('a', 'e', 'i', 'o', 'u')
res = ""
for char in str:
if char.lower() not in vowels:
res += char
return res
print(removeVowels("Hello World")) |
use rustc_middle::ty::{Ty, TyCtxt, AdtDef, subst::SubstsRef};
use rustc_middle::ty::layout::TyLayout;
use rustc_data_structures::fx::FxHashSet;
fn analyze_adt(ty: Ty<'_>, tcx: TyCtxt<'_>, visited_types: &mut FxHashSet<Ty<'_>>) -> (bool, bool) {
if visited_types.contains(&ty) {
return (false, false);
}
visited_types.insert(ty);
match ty.kind() {
ty::Adt(adt_def, substs) => {
let is_root = adt_def.did.is_local();
let is_fat = adt_def.variants.iter().any(|variant| {
let crate_name = tcx.crate_name(variant.def_id.krate).as_str();
let variant_name = variant.ident.name.as_str();
crate_name == "bronze" && variant_name == "GcRef"
});
(is_root, is_fat)
}
_ => (false, false)
}
} |
#!/bin/bash
#
# modify by hiyang @ 2016-12-19
clear
echo "${CBLUE} Init CentOS${CEND}"
# closed Unnecessary services and remove obsolete rpm package
echo "STEP 1: ${CMSG}closed Unnecessary services and remove obsolete rpm package${CEND}"
[ "${CentOS_RHEL_version}" == '7' ] && [ "$(systemctl is-active NetworkManager.service)" == 'active' ] && NM_flag=1
[ "${NM_flag}" == '1' ] && systemctl enable NetworkManager.service
for Service in sshd network crond messagebus irqbalance syslog rsyslog;do
chkconfig --level 3 ${Service} on 2> /dev/null
done
for Service in $(chkconfig --list | grep 3:on | awk '{print $1}' | grep -vE 'nginx|httpd|tomcat|mysqld|php-fpm|pureftpd|redis-server|memcached|supervisord|aegis|NetworkManager');do
chkconfig --level 3 ${Service} off 2> /dev/null
done && echo -e "${CMSG}Step 1 is successfully!${CEND}\n"
# Close SELINUX
echo "STEP 2: ${CMSG}Close SELINUX${CEND}"
setenforce 0
sed -i 's/^SELINUX=.*$/SELINUX=disabled/' /etc/selinux/config && echo -e "${CMSG}Step 2 is successfully!${CEND}\n"
# Custom profile
echo "STEP 3: ${CMSG}Custom profile${CEND}"
cat >> /etc/profile.d/oneinstack.sh << EOF
HISTSIZE=3000
PROMPT_COMMAND="history -a"
HISTTIMEFORMAT="%F %T \$(whoami) "
PS1="\[\e[37;40m\][\[\e[32;40m\]\u\[\e[37;40m\]@\h \[\e[35;40m\]\W\[\e[0m\]]\\\\$ "
alias l='ls -AFhlt'
alias lh='l | head'
alias vi=vim
alias rz='rz -be'
GREP_OPTIONS="--color=auto"
alias grep='grep --color'
alias egrep='egrep --color'
alias fgrep='fgrep --color'
EOF
if [ -z "$(grep ^'PROMPT_COMMAND=' /etc/bashrc)" ]; then
cat >> /etc/bashrc << EOF
PROMPT_COMMAND='{ msg=\$(history 1 | { read x y; echo \$y; });logger "[euid=\$(whoami)]":\$(who am i):[\`pwd\`]"\$msg"; }'
EOF
echo -e "${CMSG}Step 3 is successfully!${CEND}\n"
fi
# /etc/security/limits.conf
echo "STEP 4: ${CMSG}Modify limits.conf${CEND}"
if [ -e /etc/security/limits.d/*nproc.conf ] && rename nproc.conf nproc.conf_bk /etc/security/limits.d/*nproc.conf && sed -i '/^# End of file/,$d' /etc/security/limits.conf; then
cat >> /etc/security/limits.conf << EOF
# End of file
* soft nproc 65535
* hard nproc 65535
* soft nofile 65535
* hard nofile 65535
EOF
echo -e "${CMSG}Step 4 is successfully!${CEND}\n"
fi
# /etc/hosts
echo "STEP 4: ${CMSG}Modify hosts and timezone and update time${CEND}"
[ "$(hostname -i | awk '{print $1}')" != "127.0.0.1" ] && sed -i "s@127.0.0.1.*localhost@&\n127.0.0.1 $(hostname)@g" /etc/hosts
# Set timezone
rm -rf /etc/localtime
ln -s /usr/share/zoneinfo/Asia/Shanghai /etc/localtime
# Update time
ntpdate pool.ntp.org
[ ! -e "/var/spool/cron/root" -o -z "$(grep 'ntpdate' /var/spool/cron/root)" ] && { echo "*/20 * * * * $(which ntpdate) pool.ntp.org > /dev/null 2>&1" >> /var/spool/cron/root;chmod 600 /var/spool/cron/root; } && echo -e "${CMSG}Step 4 is successfully!${CEND}\n"
# Set DNS
#cat >> /etc/resolv.conf << EOF
#nameserver 114.114.114.114
#nameserver 8.8.8.8
#EOF
# ip_conntrack table full dropping packets
[ ! -e "/etc/sysconfig/modules/iptables.modules" ] && { echo modprobe ip_conntrack > /etc/sysconfig/modules/iptables.modules; chmod +x /etc/sysconfig/modules/iptables.modules; }
modprobe ip_conntrack
echo options nf_conntrack hashsize=131072 > /etc/modprobe.d/nf_conntrack.conf
# /etc/sysctl.conf
echo "STEP 5: ${CMSG}Modify sysctl.conf${CEND}"
[ ! -e "/etc/sysctl.conf_bk" ] && /bin/mv /etc/sysctl.conf{,_bk}
cat >> /etc/sysctl.conf << EOF
fs.file-max=65535
net.ipv4.tcp_max_tw_buckets = 60000
net.ipv4.tcp_sack = 1
net.ipv4.tcp_window_scaling = 1
net.ipv4.tcp_rmem = 4096 87380 4194304
net.ipv4.tcp_wmem = 4096 16384 4194304
net.ipv4.tcp_max_syn_backlog = 65536
net.core.netdev_max_backlog = 32768
net.core.somaxconn = 32768
net.core.wmem_default = 8388608
net.core.rmem_default = 8388608
net.core.rmem_max = 16777216
net.core.wmem_max = 16777216
net.ipv4.tcp_timestamps = 0
net.ipv4.tcp_synack_retries = 2
net.ipv4.tcp_syn_retries = 2
net.ipv4.tcp_tw_recycle = 1
#net.ipv4.tcp_tw_len = 1
net.ipv4.tcp_tw_reuse = 1
net.ipv4.tcp_mem = 94500000 915000000 927000000
net.ipv4.tcp_max_orphans = 3276800
net.ipv4.tcp_tw_recycle = 1
net.ipv4.ip_local_port_range = 1024 65000
net.nf_conntrack_max = 6553500
net.netfilter.nf_conntrack_max = 6553500
net.netfilter.nf_conntrack_tcp_timeout_close_wait = 60
net.netfilter.nf_conntrack_tcp_timeout_fin_wait = 120
net.netfilter.nf_conntrack_tcp_timeout_time_wait = 120
net.netfilter.nf_conntrack_tcp_timeout_established = 3600
EOF
# ipv6 is forbidden
[ "${CentOS_RHEL_version}" == '7' ] && cat >> /etc/sysctl.conf << EOF
net.ipv6.conf.all.disable_ipv6 = 1
net.ipv6.conf.default.disable_ipv6 = 1
EOF
sysctl -p && echo -e "${CMSG}Step 5 is successfully!${CEND}\n"
if [ "${CentOS_RHEL_version}" == '5' ]; then
sed -i 's@^[3-6]:2345:respawn@#&@g' /etc/inittab
sed -i 's@^ca::ctrlaltdel@#&@' /etc/inittab
sed -i 's@LANG=.*$@LANG="en_US.UTF-8"@g' /etc/sysconfig/i18n
elif [ "${CentOS_RHEL_version}" == '6' ]; then
sed -i 's@^ACTIVE_CONSOLES.*@ACTIVE_CONSOLES=/dev/tty[1-2]@' /etc/sysconfig/init
sed -i 's@^start@#start@' /etc/init/control-alt-delete.conf
sed -i 's@LANG=.*$@LANG="en_US.UTF-8"@g' /etc/sysconfig/i18n
elif [ "${CentOS_RHEL_version}" == '7' ]; then
sed -i 's@LANG=.*$@LANG="en_US.UTF-8"@g' /etc/locale.conf
fi
. /etc/profile
|
export OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES
source /usr/local/opt/asdf/asdf.sh
|
#!/bin/bash
export CFLAGS="-O2 -fPIC"
./configure --prefix=$PREFIX \
--enable-shared \
--enable-python \
--with-jasper=$PREFIX
make
make install
|
#!/bin/bash
SCRIPT=$(readlink -f "$0") && cd $(dirname "$SCRIPT")
# --- Script Init ---
set -e
set -o pipefail
mkdir -p log
rm -R -f log/*
# --- Setup run dirs ---
find output/* ! -name '*summary-info*' -type f -exec rm -f {} +
mkdir output/full_correlation/
rm -R -f work/*
mkdir work/kat/
mkdir work/full_correlation/
mkdir work/full_correlation/kat/
rm -R -f /tmp/%FIFO_DIR%/
mkdir -p /tmp/%FIFO_DIR%/fifo/
mkdir /tmp/%FIFO_DIR%/fifo/full_correlation/
mkdir work/gul_S1_summaryleccalc
mkdir work/gul_S1_summaryaalcalc
mkdir work/gul_S2_summaryleccalc
mkdir work/gul_S2_summaryaalcalc
mkdir work/full_correlation/gul_S1_summaryleccalc
mkdir work/full_correlation/gul_S1_summaryaalcalc
mkdir work/full_correlation/gul_S2_summaryleccalc
mkdir work/full_correlation/gul_S2_summaryaalcalc
mkfifo /tmp/%FIFO_DIR%/fifo/gul_P1
mkfifo /tmp/%FIFO_DIR%/fifo/gul_P2
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_summary_P1
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_summaryeltcalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_eltcalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_summarysummarycalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_summarycalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_summarypltcalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_pltcalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S2_summary_P1
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S2_summaryeltcalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S2_eltcalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S2_summarysummarycalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S2_summarycalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S2_summarypltcalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S2_pltcalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_summary_P2
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_summaryeltcalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_eltcalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_summarysummarycalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_summarycalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_summarypltcalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_pltcalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S2_summary_P2
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S2_summaryeltcalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S2_eltcalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S2_summarysummarycalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S2_summarycalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S2_summarypltcalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S2_pltcalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summary_P1
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summaryeltcalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_eltcalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summarysummarycalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summarycalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summarypltcalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_pltcalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summary_P1
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summaryeltcalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_eltcalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summarysummarycalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summarycalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summarypltcalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_pltcalc_P1
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summary_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summaryeltcalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_eltcalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summarysummarycalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summarycalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summarypltcalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_pltcalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summary_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summaryeltcalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_eltcalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summarysummarycalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summarycalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summarypltcalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_pltcalc_P2
# --- Do ground up loss computes ---
eltcalc < /tmp/%FIFO_DIR%/fifo/gul_S1_summaryeltcalc_P1 > work/kat/gul_S1_eltcalc_P1 & pid1=$!
summarycalctocsv < /tmp/%FIFO_DIR%/fifo/gul_S1_summarysummarycalc_P1 > work/kat/gul_S1_summarycalc_P1 & pid2=$!
pltcalc < /tmp/%FIFO_DIR%/fifo/gul_S1_summarypltcalc_P1 > work/kat/gul_S1_pltcalc_P1 & pid3=$!
eltcalc < /tmp/%FIFO_DIR%/fifo/gul_S2_summaryeltcalc_P1 > work/kat/gul_S2_eltcalc_P1 & pid4=$!
summarycalctocsv < /tmp/%FIFO_DIR%/fifo/gul_S2_summarysummarycalc_P1 > work/kat/gul_S2_summarycalc_P1 & pid5=$!
pltcalc < /tmp/%FIFO_DIR%/fifo/gul_S2_summarypltcalc_P1 > work/kat/gul_S2_pltcalc_P1 & pid6=$!
eltcalc -s < /tmp/%FIFO_DIR%/fifo/gul_S1_summaryeltcalc_P2 > work/kat/gul_S1_eltcalc_P2 & pid7=$!
summarycalctocsv -s < /tmp/%FIFO_DIR%/fifo/gul_S1_summarysummarycalc_P2 > work/kat/gul_S1_summarycalc_P2 & pid8=$!
pltcalc -s < /tmp/%FIFO_DIR%/fifo/gul_S1_summarypltcalc_P2 > work/kat/gul_S1_pltcalc_P2 & pid9=$!
eltcalc -s < /tmp/%FIFO_DIR%/fifo/gul_S2_summaryeltcalc_P2 > work/kat/gul_S2_eltcalc_P2 & pid10=$!
summarycalctocsv -s < /tmp/%FIFO_DIR%/fifo/gul_S2_summarysummarycalc_P2 > work/kat/gul_S2_summarycalc_P2 & pid11=$!
pltcalc -s < /tmp/%FIFO_DIR%/fifo/gul_S2_summarypltcalc_P2 > work/kat/gul_S2_pltcalc_P2 & pid12=$!
tee < /tmp/%FIFO_DIR%/fifo/gul_S1_summary_P1 /tmp/%FIFO_DIR%/fifo/gul_S1_summaryeltcalc_P1 /tmp/%FIFO_DIR%/fifo/gul_S1_summarypltcalc_P1 /tmp/%FIFO_DIR%/fifo/gul_S1_summarysummarycalc_P1 work/gul_S1_summaryaalcalc/P1.bin work/gul_S1_summaryleccalc/P1.bin > /dev/null & pid13=$!
tee < /tmp/%FIFO_DIR%/fifo/gul_S2_summary_P1 /tmp/%FIFO_DIR%/fifo/gul_S2_summaryeltcalc_P1 /tmp/%FIFO_DIR%/fifo/gul_S2_summarypltcalc_P1 /tmp/%FIFO_DIR%/fifo/gul_S2_summarysummarycalc_P1 work/gul_S2_summaryaalcalc/P1.bin work/gul_S2_summaryleccalc/P1.bin > /dev/null & pid14=$!
tee < /tmp/%FIFO_DIR%/fifo/gul_S1_summary_P2 /tmp/%FIFO_DIR%/fifo/gul_S1_summaryeltcalc_P2 /tmp/%FIFO_DIR%/fifo/gul_S1_summarypltcalc_P2 /tmp/%FIFO_DIR%/fifo/gul_S1_summarysummarycalc_P2 work/gul_S1_summaryaalcalc/P2.bin work/gul_S1_summaryleccalc/P2.bin > /dev/null & pid15=$!
tee < /tmp/%FIFO_DIR%/fifo/gul_S2_summary_P2 /tmp/%FIFO_DIR%/fifo/gul_S2_summaryeltcalc_P2 /tmp/%FIFO_DIR%/fifo/gul_S2_summarypltcalc_P2 /tmp/%FIFO_DIR%/fifo/gul_S2_summarysummarycalc_P2 work/gul_S2_summaryaalcalc/P2.bin work/gul_S2_summaryleccalc/P2.bin > /dev/null & pid16=$!
summarycalc -i -1 /tmp/%FIFO_DIR%/fifo/gul_S1_summary_P1 -2 /tmp/%FIFO_DIR%/fifo/gul_S2_summary_P1 < /tmp/%FIFO_DIR%/fifo/gul_P1 &
summarycalc -i -1 /tmp/%FIFO_DIR%/fifo/gul_S1_summary_P2 -2 /tmp/%FIFO_DIR%/fifo/gul_S2_summary_P2 < /tmp/%FIFO_DIR%/fifo/gul_P2 &
eve 1 2 | getmodel | gulcalc -S0 -L0 -r -j /tmp/%FIFO_DIR%/fifo/full_correlation/gul_P1 -a1 -i - > /tmp/%FIFO_DIR%/fifo/gul_P1 &
eve 2 2 | getmodel | gulcalc -S0 -L0 -r -j /tmp/%FIFO_DIR%/fifo/full_correlation/gul_P2 -a1 -i - > /tmp/%FIFO_DIR%/fifo/gul_P2 &
wait $pid1 $pid2 $pid3 $pid4 $pid5 $pid6 $pid7 $pid8 $pid9 $pid10 $pid11 $pid12 $pid13 $pid14 $pid15 $pid16
# --- Do computes for fully correlated output ---
# --- Do ground up loss computes ---
eltcalc < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summaryeltcalc_P1 > work/full_correlation/kat/gul_S1_eltcalc_P1 & pid1=$!
summarycalctocsv < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summarysummarycalc_P1 > work/full_correlation/kat/gul_S1_summarycalc_P1 & pid2=$!
pltcalc < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summarypltcalc_P1 > work/full_correlation/kat/gul_S1_pltcalc_P1 & pid3=$!
eltcalc < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summaryeltcalc_P1 > work/full_correlation/kat/gul_S2_eltcalc_P1 & pid4=$!
summarycalctocsv < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summarysummarycalc_P1 > work/full_correlation/kat/gul_S2_summarycalc_P1 & pid5=$!
pltcalc < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summarypltcalc_P1 > work/full_correlation/kat/gul_S2_pltcalc_P1 & pid6=$!
eltcalc -s < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summaryeltcalc_P2 > work/full_correlation/kat/gul_S1_eltcalc_P2 & pid7=$!
summarycalctocsv -s < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summarysummarycalc_P2 > work/full_correlation/kat/gul_S1_summarycalc_P2 & pid8=$!
pltcalc -s < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summarypltcalc_P2 > work/full_correlation/kat/gul_S1_pltcalc_P2 & pid9=$!
eltcalc -s < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summaryeltcalc_P2 > work/full_correlation/kat/gul_S2_eltcalc_P2 & pid10=$!
summarycalctocsv -s < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summarysummarycalc_P2 > work/full_correlation/kat/gul_S2_summarycalc_P2 & pid11=$!
pltcalc -s < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summarypltcalc_P2 > work/full_correlation/kat/gul_S2_pltcalc_P2 & pid12=$!
tee < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summary_P1 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summaryeltcalc_P1 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summarypltcalc_P1 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summarysummarycalc_P1 work/full_correlation/gul_S1_summaryaalcalc/P1.bin work/full_correlation/gul_S1_summaryleccalc/P1.bin > /dev/null & pid13=$!
tee < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summary_P1 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summaryeltcalc_P1 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summarypltcalc_P1 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summarysummarycalc_P1 work/full_correlation/gul_S2_summaryaalcalc/P1.bin work/full_correlation/gul_S2_summaryleccalc/P1.bin > /dev/null & pid14=$!
tee < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summary_P2 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summaryeltcalc_P2 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summarypltcalc_P2 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summarysummarycalc_P2 work/full_correlation/gul_S1_summaryaalcalc/P2.bin work/full_correlation/gul_S1_summaryleccalc/P2.bin > /dev/null & pid15=$!
tee < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summary_P2 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summaryeltcalc_P2 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summarypltcalc_P2 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summarysummarycalc_P2 work/full_correlation/gul_S2_summaryaalcalc/P2.bin work/full_correlation/gul_S2_summaryleccalc/P2.bin > /dev/null & pid16=$!
summarycalc -i -1 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summary_P1 -2 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summary_P1 < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_P1 &
summarycalc -i -1 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summary_P2 -2 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summary_P2 < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_P2 &
wait $pid1 $pid2 $pid3 $pid4 $pid5 $pid6 $pid7 $pid8 $pid9 $pid10 $pid11 $pid12 $pid13 $pid14 $pid15 $pid16
# --- Do ground up loss kats ---
kat work/kat/gul_S1_eltcalc_P1 work/kat/gul_S1_eltcalc_P2 > output/gul_S1_eltcalc.csv & kpid1=$!
kat work/kat/gul_S1_pltcalc_P1 work/kat/gul_S1_pltcalc_P2 > output/gul_S1_pltcalc.csv & kpid2=$!
kat work/kat/gul_S1_summarycalc_P1 work/kat/gul_S1_summarycalc_P2 > output/gul_S1_summarycalc.csv & kpid3=$!
kat work/kat/gul_S2_eltcalc_P1 work/kat/gul_S2_eltcalc_P2 > output/gul_S2_eltcalc.csv & kpid4=$!
kat work/kat/gul_S2_pltcalc_P1 work/kat/gul_S2_pltcalc_P2 > output/gul_S2_pltcalc.csv & kpid5=$!
kat work/kat/gul_S2_summarycalc_P1 work/kat/gul_S2_summarycalc_P2 > output/gul_S2_summarycalc.csv & kpid6=$!
# --- Do ground up loss kats for fully correlated output ---
kat work/full_correlation/kat/gul_S1_eltcalc_P1 work/full_correlation/kat/gul_S1_eltcalc_P2 > output/full_correlation/gul_S1_eltcalc.csv & kpid7=$!
kat work/full_correlation/kat/gul_S1_pltcalc_P1 work/full_correlation/kat/gul_S1_pltcalc_P2 > output/full_correlation/gul_S1_pltcalc.csv & kpid8=$!
kat work/full_correlation/kat/gul_S1_summarycalc_P1 work/full_correlation/kat/gul_S1_summarycalc_P2 > output/full_correlation/gul_S1_summarycalc.csv & kpid9=$!
kat work/full_correlation/kat/gul_S2_eltcalc_P1 work/full_correlation/kat/gul_S2_eltcalc_P2 > output/full_correlation/gul_S2_eltcalc.csv & kpid10=$!
kat work/full_correlation/kat/gul_S2_pltcalc_P1 work/full_correlation/kat/gul_S2_pltcalc_P2 > output/full_correlation/gul_S2_pltcalc.csv & kpid11=$!
kat work/full_correlation/kat/gul_S2_summarycalc_P1 work/full_correlation/kat/gul_S2_summarycalc_P2 > output/full_correlation/gul_S2_summarycalc.csv & kpid12=$!
wait $kpid1 $kpid2 $kpid3 $kpid4 $kpid5 $kpid6 $kpid7 $kpid8 $kpid9 $kpid10 $kpid11 $kpid12
aalcalc -Kgul_S1_summaryaalcalc > output/gul_S1_aalcalc.csv & lpid1=$!
leccalc -r -Kgul_S1_summaryleccalc -F output/gul_S1_leccalc_full_uncertainty_aep.csv -f output/gul_S1_leccalc_full_uncertainty_oep.csv -S output/gul_S1_leccalc_sample_mean_aep.csv -s output/gul_S1_leccalc_sample_mean_oep.csv -W output/gul_S1_leccalc_wheatsheaf_aep.csv -M output/gul_S1_leccalc_wheatsheaf_mean_aep.csv -m output/gul_S1_leccalc_wheatsheaf_mean_oep.csv -w output/gul_S1_leccalc_wheatsheaf_oep.csv & lpid2=$!
aalcalc -Kgul_S2_summaryaalcalc > output/gul_S2_aalcalc.csv & lpid3=$!
leccalc -r -Kgul_S2_summaryleccalc -F output/gul_S2_leccalc_full_uncertainty_aep.csv -f output/gul_S2_leccalc_full_uncertainty_oep.csv -S output/gul_S2_leccalc_sample_mean_aep.csv -s output/gul_S2_leccalc_sample_mean_oep.csv -W output/gul_S2_leccalc_wheatsheaf_aep.csv -M output/gul_S2_leccalc_wheatsheaf_mean_aep.csv -m output/gul_S2_leccalc_wheatsheaf_mean_oep.csv -w output/gul_S2_leccalc_wheatsheaf_oep.csv & lpid4=$!
aalcalc -Kfull_correlation/gul_S1_summaryaalcalc > output/full_correlation/gul_S1_aalcalc.csv & lpid5=$!
leccalc -r -Kfull_correlation/gul_S1_summaryleccalc -F output/full_correlation/gul_S1_leccalc_full_uncertainty_aep.csv -f output/full_correlation/gul_S1_leccalc_full_uncertainty_oep.csv -S output/full_correlation/gul_S1_leccalc_sample_mean_aep.csv -s output/full_correlation/gul_S1_leccalc_sample_mean_oep.csv -W output/full_correlation/gul_S1_leccalc_wheatsheaf_aep.csv -M output/full_correlation/gul_S1_leccalc_wheatsheaf_mean_aep.csv -m output/full_correlation/gul_S1_leccalc_wheatsheaf_mean_oep.csv -w output/full_correlation/gul_S1_leccalc_wheatsheaf_oep.csv & lpid6=$!
aalcalc -Kfull_correlation/gul_S2_summaryaalcalc > output/full_correlation/gul_S2_aalcalc.csv & lpid7=$!
leccalc -r -Kfull_correlation/gul_S2_summaryleccalc -F output/full_correlation/gul_S2_leccalc_full_uncertainty_aep.csv -f output/full_correlation/gul_S2_leccalc_full_uncertainty_oep.csv -S output/full_correlation/gul_S2_leccalc_sample_mean_aep.csv -s output/full_correlation/gul_S2_leccalc_sample_mean_oep.csv -W output/full_correlation/gul_S2_leccalc_wheatsheaf_aep.csv -M output/full_correlation/gul_S2_leccalc_wheatsheaf_mean_aep.csv -m output/full_correlation/gul_S2_leccalc_wheatsheaf_mean_oep.csv -w output/full_correlation/gul_S2_leccalc_wheatsheaf_oep.csv & lpid8=$!
wait $lpid1 $lpid2 $lpid3 $lpid4 $lpid5 $lpid6 $lpid7 $lpid8
rm -R -f work/*
rm -R -f /tmp/%FIFO_DIR%/
|
<filename>src/training/slidewindow/E76_Hard_MinimumWindowSubstring.java
package training.slidewindow;
import org.junit.jupiter.api.Test;
import java.util.function.BiFunction;
import static org.junit.jupiter.api.Assertions.assertEquals;
/**
* 给你一个字符串 s 、一个字符串 t 。返回 s 中涵盖 t 所有字符的最小子串。
* 如果 s 中不存在涵盖 t 所有字符的子串,则返回空字符串 "" 。
*
* 注意:t 中的重复字符数量必须小于等于涵盖子串。
*
* 注意:如果 s 中存在这样的子串,我们保证它是唯一的答案。
*
* 你能设计一个在 O(n) 时间内解决此问题的算法吗?
*
* 例 1:
* Input:s = "ADOBECODEBANC", t = "ABC"
* Output:"BANC"
*
* 例 2:
* Input:s = "a", t = "a"
* Output:"a"
*
* 约束:
* - 1 <= s.length, t.length <= 10**5
* - s 和 t 由英文字母组成
*/
public class E76_Hard_MinimumWindowSubstring {
static void test(BiFunction<String, String, String> method) {
assertEquals(method.apply("ADOBECODEBANC", "ABC"), "BANC");
assertEquals(method.apply("a", "a"), "a");
assertEquals(method.apply("a", "aa"), "");
assertEquals(method.apply("aa", "aa"), "aa");
assertEquals(method.apply("aaa", "aa"), "aa");
}
/**
* 滑动窗口。这个算法的框架如下:
* void slidingWindow(string s, string t) {
* unordered_map<char, int> need, window;
* for (char c : t) need[c]++;
*
* int left = 0, right = 0;
* int valid = 0;
* while (right < s.size()) {
* // c 是将移入窗口的字符
* char c = s[right];
* // 右移窗口
* right++;
* // 进行窗口内数据的一系列更新
* ...
*
* // 判断左侧窗口是否要收缩
* while (window needs shrink) {
* // d 是将移出窗口的字符
* char d = s[left];
* // 左移窗口
* left++;
* // 进行窗口内数据的一系列更新
* ...
* }
* }
* }
*
* LeetCode 耗时:4ms - 88.49%
* 内存消耗:38.7 MB - 74.36%
*/
public String minWindow(String s, String t) {
// t 只有一个字符,直接找就完事了
if (t.length() == 1)
return s.indexOf(t.charAt(0)) >= 0 ? t : "";
int[] need = new int[128];
int[] window = new int[128];
for (char c : t.toCharArray())
need[c] += 1;
// 滑动窗口:[left, right)。left 和 right 是 s 中的下标。
// left 是滑动窗口中第一个在 t 中的字符的位置;right 是滑动窗口右边界。
// cnt 记录了当前滑动窗口内包含了多少个 t 中的字符。
int left = 0, right = 0, cnt = 0, bestLeft = 0, bestRight = s.length();
// covered 表示有没有找到过覆盖子串
boolean covered = false;
while (right < s.length()) {
// 将 right 位置的字符加入滑动窗口
char c = s.charAt(right++);
// 如果 c 不是 t 中的字符,则跳过
if (need[c] == 0) {
// 如果还没有找到过任何字符,则收缩左边界
if (cnt == 0)
left++;
continue;
}
// 此时 c 是 t 中的字符
// 进行记录
window[c]++;
// 如果 window 中记录的数量小于等于 t 中对应的数量,则增加计数
if (window[c] <= need[c])
cnt++;
// 如果涵盖子串已找到,则收缩左侧窗口
while (cnt == t.length()) {
// 标记找到了涵盖子串
covered = true;
// 更新最小涵盖子串
if (right - left < bestRight - bestLeft) {
bestLeft = left;
bestRight = right;
}
// 取消记录 left 的字符
char leftChar = s.charAt(left);
window[leftChar]--;
// 如果 window 中包含的 leftChar 个数少于 t 中的,则减少计数
if (window[leftChar] < need[leftChar])
cnt--;
// 收缩左边界,直到下一个存在于 t 中的字符。注意别忘了边界检查
while (++left < right && window[s.charAt(left)] == 0);
}
}
return covered ? s.substring(bestLeft, bestRight) : "";
}
@Test
public void testMinWindow() {
test(this::minWindow);
}
}
|
(function() {
'use strict';
angular
.module('app.station')
.controller('StationOverviewCtrl', StationOverviewCtrl);
StationOverviewCtrl.$inject = [
'$mdDialog', 'GoogleMapDefaultOptions', '_sensors', '_groups', '_parameters', 'GoogleMapIcons', 'stationStorage'
];
function StationOverviewCtrl($mdDialog, GoogleMapDefaultOptions, _sensors, _groups, _parameters, GoogleMapIcons, stationStorage) {
var vm = this;
vm.customFullscreen = false;
vm.groups = _groups;
vm.isImage = isImage;
vm.parameters = _parameters;
vm.station = stationStorage.getStation();
vm.sensors = _sensors;
vm.showDownloadInfoDialog = showDownloadInfoDialog;
vm.showSensorInfoDialog = showSensorInfoDialog;
vm.map = {
center: {
latitude: vm.station.position.latitude,
longitude: vm.station.position.longitude
},
showMap: true,
zoom: 12
};
vm.mapOptions = angular.copy(GoogleMapDefaultOptions);
vm.mapIcons = angular.copy(GoogleMapIcons);
vm.mapMarker = {
coords: {
latitude: vm.station.position.latitude,
longitude: vm.station.position.longitude
},
key: 'marker-id-' + vm.station.id,
options: {
icon: vm.mapIcons.blueicon,
title: vm.station.name
}
};
function isImage(img) {
if (!img) {
return false;
}
return true;
}
function showDownloadInfoDialog(ev) {
$mdDialog.show({
controller: 'StationDownloadInfoDialogController',
controllerAs: 'StationDownloadInfoDialogControllerVm',
templateUrl: '/static/partials/station/station-overview-download-info.dialog.html',
parent: angular.element(document.body),
targetEvent: ev,
clickOutsideToClose: true,
fullscreen: vm.customFullscreen
})
.then(function(answer) {
vm.status = 'You said the information was "' + answer + '".';
}, function() {
vm.status = 'You cancelled the dialog.';
});
}
function showSensorInfoDialog(ev, sensor) {
$mdDialog.show({
controller: 'StationSensorInfoDialogController',
controllerAs: 'StationSensorInfoDialogControllerVm',
templateUrl: '/static/partials/station/station-overview-sensor-info.dialog.html',
parent: angular.element(document.body),
targetEvent: ev,
clickOutsideToClose: true,
locals: {
sensor: sensor
},
fullscreen: vm.customFullscreen
});
}
}
})();
|
#!/bin/bash
toggle() {
xdotool key Caps_Lock
}
case $BLOCK_BUTTON in
1) toggle ;; # left click
2) toggle ;; # middle click
3) toggle ;; # right click
esac
echo "CAPS"
echo "CAPS"
xset -q | grep "Caps Lock: *on" > /dev/null 2>&1 && echo "#00FF00" || echo "#FF0000"
|
def sum_of_squares(sequence):
numbers = list(map(int, sequence.split()))
sum_squares = sum(num**2 for num in numbers)
return sum_squares
input_sequence = "5 5 5 5 5"
result = sum_of_squares(input_sequence)
print(result) # Output: 125 |
. "$(dirname ${BASH_SOURCE[0]})/InitializeEnvironment.sh"
pkill -9 -l GVFS.FunctionalTests
pkill -9 -l git
pkill -9 -l gvfs
pkill -9 -l GVFS.Mount
pkill -9 -l prjfs-log
if [ -d ~/GVFS.FT ]; then
rm -r ~/GVFS.FT
fi
|
#! /usr/bin/env sh
python freeze.py
mv about about.html |
SELECT c.name
FROM orders o
INNER JOIN customers c ON c.id = o.customer_id
WHERE o.quantity > 5; |
#include <queue>
class Queue {
private:
std::queue<int> q;
public:
void enqueue(int val) {
q.push(val);
}
int dequeue() {
int val = q.front();
q.pop();
return val;
}
int size() {
return q.size();
}
bool empty() {
return q.empty();
}
}; |
// Autogenerated from library/elements.i
package ideal.library.elements;
public interface readonly_equality_comparable extends readonly_value, any_equality_comparable { }
|
<gh_stars>1-10
package de.rieckpil.blog;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.Objects;
import java.util.concurrent.ThreadLocalRandom;
public class ObjectsRequireNonNullUpdate {
public static void main(String[] args) {
transferMoney(null, null);
transferMoney(null, new BigDecimal(33.33).setScale(2, RoundingMode.DOWN));
transferMoney("Duke", null);
transferMoney("Duke", new BigDecimal(1995));
}
public static void transferMoney(String recipient, BigDecimal amount) {
amount = Objects.requireNonNullElseGet(amount, ObjectsRequireNonNullUpdate::calculateDefaultAmount);
recipient = Objects.requireNonNullElse(recipient, "Phil");
System.out.println(amount + " is transfered to " + recipient);
}
private static BigDecimal calculateDefaultAmount() {
return new BigDecimal(ThreadLocalRandom.current().nextBoolean() ? 1337 : 42);
}
}
|
<reponame>anticipasean/girakkafunc
package cyclops.stream.spliterator.push;
import cyclops.stream.spliterator.CopyableSpliterator;
import java.util.Spliterator;
import java.util.Spliterators.AbstractSpliterator;
import java.util.function.Consumer;
import java.util.stream.Collector;
public class CollectingSinkSpliterator<T, A, R> extends AbstractSpliterator<R> implements Runnable, CopyableSpliterator<R> {
private final Spliterator<T> s;
private final Collector<? super T, A, R> monoid;
volatile A total;
volatile Consumer<? super R> action;
private final Object lock = new Object();
public CollectingSinkSpliterator(long est,
int additionalCharacteristics,
Spliterator<T> s,
Collector<? super T, A, R> monoid) {
super(est,
additionalCharacteristics & Spliterator.ORDERED);
this.s = s;
this.monoid = monoid;
this.total = monoid.supplier()
.get();
}
public void run() {
action.accept(result());
}
public R result() {
return monoid.finisher()
.apply(total);
}
@Override
public boolean tryAdvance(Consumer<? super R> action) {
this.action = action;
A res = monoid.supplier()
.get();
s.forEachRemaining(t -> {
monoid.accumulator()
.accept(res,
t);
});
synchronized(lock) {
total = monoid.combiner()
.apply(res,
total);
}
return false;
}
@Override
public Spliterator<R> copy() {
return new CollectingSinkSpliterator<>(this.estimateSize(),
this.characteristics(),
CopyableSpliterator.copy(s),
monoid);
}
}
|
from scipy.interpolate import splrep, splev
def estimate_interest_rate(time_points, interest_rates, time):
# Perform spline interpolation
tck = splrep(time_points, interest_rates)
estimated_rate = splev(time, tck)
return estimated_rate
# Example usage
time_points = [0, 1, 2, 3, 4]
interest_rates = [5, 7, 3, 6, 8]
estimated_rate_at_2_5_years = estimate_interest_rate(time_points, interest_rates, 2.5)
print(f"The estimated interest rate at 2.5 years is: {estimated_rate_at_2_5_years}%") |
<gh_stars>1-10
// Copyright 2021 99cloud
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
const { resolve } = require('path');
const webpack = require('webpack');
const merge = require('webpack-merge');
const HtmlWebPackPlugin = require('html-webpack-plugin');
// const BundleAnalyzerPlugin = require('webpack-bundle-analyzer').BundleAnalyzerPlugin;
const autoprefixer = require('autoprefixer');
const CleanWebpackPlugin = require('clean-webpack-plugin');
const TerserPlugin = require('terser-webpack-plugin');
const common = require('./webpack.common');
const theme = require('./theme');
const root = (path) => resolve(__dirname, `../${path}`);
const { version, ...restConfig } = common;
module.exports = (env) => {
const API = (env || {}).API || 'mock';
// const devServer = {
// // host: '0.0.0.0',
// host: 'localhost',
// port: 8088,
// contentBase: root('dist'),
// historyApiFallback: true,
// compress: true,
// hot: false,
// inline: false,
// disableHostCheck: true,
// // progress: true
// };
return merge(restConfig, {
entry: {
main: root('src/core/index.jsx'),
},
output: {
filename: '[name].js',
path: root('dist'),
publicPath: '/',
chunkFilename: `[name].bundle.${version}.js`,
},
mode: 'production',
// devtool: 'inline-source-map',
// devServer: devServer,
module: {
rules: [
{
test: /\.css$/,
use: [
{
loader: 'style-loader',
},
{
loader: 'css-loader',
},
],
},
{
test: /\.(css|less)$/,
exclude: /node_modules/,
use: [
{
loader: 'style-loader', // creates style nodes from JS strings
},
{
loader: 'css-loader', // translates CSS into CommonJS
options: {
modules: {
mode: 'global',
},
localIdentName: '[name]__[local]--[hash:base64:5]',
},
},
{
loader: 'postcss-loader',
options: {
plugins: [autoprefixer('last 2 version')],
sourceMap: false,
},
},
{
loader: 'less-loader', // compiles Less to CSS
options: {
importLoaders: true,
javascriptEnabled: true,
},
},
],
},
{
test: /\.(less)$/,
include: /node_modules/,
use: [
{
loader: 'style-loader', // creates style nodes from JS strings
},
{
loader: 'css-loader', // translates CSS into CommonJS
},
{
loader: 'less-loader', // compiles Less to CSS
options: {
javascriptEnabled: true,
modifyVars: theme,
},
},
],
},
],
},
plugins: [
new webpack.DefinePlugin({
// 为项目注入环境变量
'process.env.API': JSON.stringify(API),
}),
new HtmlWebPackPlugin({
template: root('src/asset/template/index.html'),
favicon: root('src/asset/image/favicon.ico'),
}),
new CleanWebpackPlugin(['dist'], {
root: resolve(__dirname, `../`),
}),
// new BundleAnalyzerPlugin(),
],
optimization: {
splitChunks: {
maxInitialRequests: 10,
cacheGroups: {
commons: {
chunks: 'all',
name: 'common',
minChunks: 1,
minSize: 0,
},
vendor: {
test: /node_modules/,
chunks: 'all',
name: 'vendor',
minChunks: 1,
priority: 10,
enforce: true,
},
},
},
runtimeChunk: {
name: () => `runtime.${version}`,
},
minimize: true, // default true for production
minimizer: [
new TerserPlugin({
sourceMap: false,
terserOptions: {
compress: {
drop_console: true,
},
},
}),
],
},
});
};
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
BCSYMBOLMAP_DIR="BCSymbolMaps"
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
if [ -d "${source}/${BCSYMBOLMAP_DIR}" ]; then
# Locate and install any .bcsymbolmaps if present, and remove them from the .framework before the framework is copied
find "${source}/${BCSYMBOLMAP_DIR}" -name "*.bcsymbolmap"|while read f; do
echo "Installing $f"
install_bcsymbolmap "$f" "$destination"
rm "$f"
done
rmdir "${source}/${BCSYMBOLMAP_DIR}"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
warn_missing_arch=${2:-true}
if [ -r "$source" ]; then
# Copy the dSYM into the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .dSYM "$source")"
binary_name="$(ls "$source/Contents/Resources/DWARF")"
binary="${DERIVED_FILES_DIR}/${basename}.dSYM/Contents/Resources/DWARF/${binary_name}"
# Strip invalid architectures from the dSYM.
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary" "$warn_missing_arch"
fi
if [[ $STRIP_BINARY_RETVAL == 0 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.dSYM"
fi
fi
}
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
warn_missing_arch=${2:-true}
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
if [[ "$warn_missing_arch" == "true" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
fi
STRIP_BINARY_RETVAL=1
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=0
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/BottomSheetSlider/BottomSheetSlider.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/BottomSheetSlider/BottomSheetSlider.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
package aufgabe9_5;
public abstract class Weihnachtsobjekt {
// utf8: "Köpfchen in das Wasser, Schwänzchen in die Höh." -CIA-Verhörmethode
protected int x, y;
protected boolean fallend = true;
protected boolean markedForDeath = false;
public Weihnachtsobjekt(int x, int y) {
this.x = x;
this.y = y;
}
public abstract void addObjektToSpielfeld(int[][] spielfeld);
public void addObjectStatic(boolean[][] staticObjects) {
staticObjects[x][y] |= !fallend;
}
boolean tryDownMove(boolean[][] staticObjects) {
return y + 1 < staticObjects[0].length &&
!staticObjects[x][y + 1];
}
public boolean moveDown(boolean[][] staticObjects) {
if (!fallend) {
return false;
}
if (!tryDownMove(staticObjects)) {
fallend = false;
return false;
}
boolean prev = staticObjects[x][y];
staticObjects[x][y] = false;
y++;
staticObjects[x][y] = prev;
return true;
}
public int moveLeft(boolean[][] staticObjects) {
if (!fallend) {
return 0;
}
int left = x - 1;
if (left < 1) {
delete();
return -1;
}
if (staticObjects[left][y]) {
return 0;
}
// boolean prev = staticObjects[x][y];
staticObjects[x][y] = false;
x = left;
staticObjects[left][y] = !fallend;
return 1;
}
public int moveRight(boolean[][] staticObjects) {
if (!fallend) {
return 0;
}
int right = x + 1;
if (right >= staticObjects.length - 1) {
delete();
return -1;
}
if (staticObjects[right][y]) {
return 0;
}
// boolean prev = staticObjects[x][y];
staticObjects[x][y] = false;
x = right;
staticObjects[right][y] = !fallend;
return 1;
}
void delete() {
markedForDeath = true;
}
}
|
from sqlalchemy import *
from migrate import *
meta = MetaData()
def upgrade(migrate_engine):
meta.bind = migrate_engine
job_history = Table('job_history', meta, autoload=True)
job_history_items_count = Column('items_count', Integer)
job_history_items_count.create(job_history)
def downgrade(migrate_engine):
meta.bind = migrate_engine
job_history = Table('job_history', meta, autoload=True)
job_history.c['items_count'].drop()
|
import FormHelperText from '@material-ui/core/FormHelperText';
import React, { useState, useRef, useEffect } from 'react';
import Styled from './styled';
import { SelectProps } from './types';
import { InputLabel, MenuItem, Select as MuiSelect } from '@material-ui/core';
export const Select = ({
options,
id,
labelId,
label,
error,
addEmptyValue,
fullWidth,
helperText,
required,
variant,
margin,
...otherProps
}: SelectProps) => {
const label_id = labelId || `${id}-label`;
const inputLabel = useRef<HTMLLabelElement>(null);
const [labelWidth, setLabelWidth] = useState(0);
useEffect(() => {
if (inputLabel.current) {
setLabelWidth(inputLabel.current.clientWidth);
}
}, [inputLabel, label]);
return (
<Styled.FormControl
fullWidth={fullWidth}
error={error}
required={required}
variant={variant}
margin={margin}
>
<InputLabel ref={inputLabel} id={label_id}>
{label}
</InputLabel>
<MuiSelect
id={id}
labelId={label_id}
fullWidth={fullWidth}
variant={variant}
labelWidth={labelWidth}
{...otherProps}
>
{addEmptyValue && <MenuItem value=""></MenuItem>}
{options.map(({ label, value }, idx) => (
<MenuItem value={value} key={`${label}_${value}_${idx}`}>
{label}
</MenuItem>
))}
</MuiSelect>
{helperText && <FormHelperText>{helperText}</FormHelperText>}
</Styled.FormControl>
);
};
|
<gh_stars>10-100
package com.lbs.server.conversation
import com.lbs.api.json.model.IdName
import com.lbs.bot.model.Command
import com.lbs.server.conversation.Book.BookingData
import com.lbs.server.conversation.StaticData.{FindOptions, FoundOptions, LatestOptions, StaticDataConfig}
import com.lbs.server.conversation.base.Conversation
import com.lbs.server.ThrowableOr
trait StaticDataForBooking extends Conversation[BookingData] {
private[conversation] def staticData: StaticData
protected def withFunctions(latestOptions: => Seq[IdName], staticOptions: => ThrowableOr[List[IdName]], applyId: IdName => BookingData): Step => MessageProcessorFn = {
nextStep: Step => {
case Msg(cmd: Command, _) =>
staticData ! cmd
stay()
case Msg(LatestOptions, _) =>
staticData ! LatestOptions(latestOptions)
stay()
case Msg(FindOptions(searchText), _) =>
staticData ! FoundOptions(filterOptions(staticOptions, searchText))
stay()
case Msg(id: IdName, _) =>
goto(nextStep) using applyId(id)
}
}
protected def staticData(staticDataConfig: => StaticDataConfig)(functions: BookingData => Step => MessageProcessorFn)(requestNext: Step)(implicit functionName: sourcecode.Name): Step = {
ask { _ =>
staticData.restart()
staticData ! staticDataConfig
} onReply {
case msg@Msg(_, bookingData: BookingData) =>
val fn = functions(bookingData)(requestNext)
fn(msg)
}
}
private def filterOptions(options: ThrowableOr[List[IdName]], searchText: String) = {
options.map(opt => opt.filter(c => c.name.toLowerCase.contains(searchText)))
}
}
|
package gotelegram
//Update represents an incoming update
type Update struct {
UpdateID int `json:"update_id"`
Message Message `json:"message"`
EditedMessage Message `json:"edited_message"`
ChannelPost Message `json:"channel_post"`
EditedChannelPost Message `json:"edited_channel_post"`
//TODO: Implement following fields
/*
InlineQuery InlineQuery `json:"inline_query"`
ChosenInlineResult ChosenInlineResult `json:"chosen_inline_result"`
CallbackQuery CallbackQuery `json:"callback_query"`
ShippingQuery ShippingQuery `json:"shipping_query"`
PreCheckoutQuery PreCheckoutQuery `json:"pre_checkout_query"`
Poll Poll `json:"poll"`
*/
}
//Message represents a message
type Message struct {
MessageID int `json:"message_id"`
From User `json:"from"`
Date int `json:"date"`
Chat Chat `json:"chat"`
Text string `json:"text"`
Entities []MessageEntity `json:"entities"`
//TODO: more fields to be implemented...
}
//MessageEntity represents one special entity in a text message.
//For example, hashtags, usernames, URLs, etc.
type MessageEntity struct {
Type string `json:"type"`
Offset int `json:"offset"`
Length int `json:"length"`
URL string `json:"url"`
User string `json:"user"`
}
//User represents a Telegram user or bot
type User struct {
ID int `json:"id"`
IsBot bool `json:"is_bot"`
FirstName string `json:"first_name"`
LastName string `json:"last_name"`
Username string `json:"username"`
LanguageCode string `json:"language_code"`
}
//Chat represents a chat
type Chat struct {
ID int `json:"id"`
Type string `json:"type"`
Title string `json:"title"`
Username string `json:"username"`
FirstName string `json:"first_name"`
LastName string `json:"last_name"`
//TODO: more fields to be implemented...
}
|
word_list = ['She', 'said', 'I'm', 'so', 'excited'] |
angular.module("wust.config").config(RoutesConfig);
RoutesConfig.$inject = ["$stateProvider", "$urlRouterProvider", "$locationProvider"];
function RoutesConfig($stateProvider, $urlRouterProvider, $locationProvider) {
$stateProvider.state("page", {
abstract: true,
templateUrl: `components/page/page.html`,
controller: "PageCtrl as vm",
}).state("dashboard", {
parent: "page",
url: "/dashboard",
templateUrl: `components/dashboard/dashboard.html`,
controller: "DashboardCtrl as vm",
}).state("vote", {
parent: "page",
url: "/vote",
templateUrl: `components/votes/vote.html`,
controller: "VotesCtrl as vm",
}).state("users", {
abstract: true,
parent: "page",
url: "/users",
templateUrl: `components/users/user.html`,
}).state("users.list", {
url: "",
templateUrl: `components/users/list.html`,
controller: "UserListsCtrl as vm",
}).state("users.details", {
url: "/:id",
templateUrl: `components/users/detail.html`,
controller: "UserDetailsCtrl as vm",
}).state("tags", {
parent: "page",
url: "/tags",
templateUrl: `components/tags/tag.html`,
controller: "TagsCtrl as vm",
}).state("tags.details", {
url: "/:id",
templateUrl: `components/tags/tag_detail.html`,
controller: "TagDetailsCtrl as vm",
}).state("focus", {
parent: "page",
url: "/focus/:id/:type",
templateUrl: `components/focus/focus.html`,
controller: "FocusCtrl as vm",
});
$urlRouterProvider.otherwise("/dashboard");
$locationProvider.html5Mode(true);
//TODO: https://github.com/angular/angular.js/issues/8934https://github.com/angular/angular.js/issues/8934
// should fix our problem with paths to marker defs
// $locationProvider.html5Mode({enabled: true, requireBase: false});
}
|
package com.testvagrant.ekam.dataclients;
import com.google.gson.reflect.TypeToken;
import com.testvagrant.ekam.commons.data.DataSetsClient;
import java.util.List;
public class ListDataClient extends DataSetsClient {
public <T> List<T> getList(String fileName) {
return getValue(fileName, new TypeToken<List<T>>() {}.getType());
}
}
|
<reponame>JLLeitschuh/datasift-java
package com.datasift.client;
import com.datasift.client.exceptions.AuthException;
import com.datasift.client.exceptions.DataSiftException;
import com.datasift.client.exceptions.JsonParsingException;
import io.higgs.core.func.Function2;
import io.higgs.http.client.Request;
import io.netty.handler.codec.http.HttpResponseStatus;
import io.netty.util.concurrent.Future;
import io.netty.util.concurrent.GenericFutureListener;
import java.io.IOException;
/*
* The API client is a base class which provides a common set of functionality to the other API classes.
*/
public class DataSiftApiClient {
protected DataSiftConfig config;
public DataSiftApiClient(DataSiftConfig config) {
if (config == null) {
throw new IllegalArgumentException("Config cannot be null");
}
this.config = config;
}
/*
* @return a new ParamBuilder instance and automatically adds the required auth properties
*/
public ParamBuilder newParams() {
return new ParamBuilder();
}
protected <T extends DataSiftResult> Function2<String, io.higgs.http.client.Response> newRequestCallback(
final FutureData<T> future, final T instance, final DataSiftConfig config) {
return new Function2<String, io.higgs.http.client.Response>() {
public void apply(String s, io.higgs.http.client.Response response) {
T result = instance;
if (response.getStatus() != null && HttpResponseStatus.NO_CONTENT.equals(response.getStatus())) {
//if a 204 is returned don't attempt to parse a JSON out of it,
// since there shouldn't be any content as the status implies
result.successful();
} else if (response.hasFailed()) {
result.failed(response.failureCause());
throw new DataSiftException("API request failed", response.failureCause(), response);
} else {
try {
result = (T) DataSiftClient.MAPPER.readValue(s, instance.getClass());
} catch (IOException e) {
result.failed(e);
throw new JsonParsingException("Unable to decode JSON from DataSift response", e, response);
}
}
result.setResponse(new com.datasift.client.Response(s, response));
if (response.getStatus().code() == 401) {
throw new AuthException("Please provide a valid username and API key", response);
}
if (!result.isSuccessful()) {
throw new DataSiftException(result.getError(), result.failureCause(), response);
}
future.received(result);
}
};
}
/*
* To support futures being passed as parameters, this method adds a listener to the unprocessed future that has
* been passed as a parameter. Once that listener is invoked, the response of the unprocessed future is examined
* to see if the response was successful, if it was not then the expected future is passed the failed response
* If the result of the unprocessed future is successful then the response callback is applied.
*
* @param futureToUnwrap the unprocessed future which needs to be unwrapped
* @param futureReturnedToUser the future that has been returned to the user and which callbacks need to be
* triggered on
* @param expectedInstance the instance of the result type to use in failure scenarios
* @param responseToExecuteOnSuccess a future response object which contains the code which will execute once the
* wrapped future has been unwrapped and its result is successful
* @param <T>
* @param <A>
*/
protected <T extends DataSiftResult, A extends DataSiftResult> void unwrapFuture(FutureData<T> futureToUnwrap,
final FutureData<A>
futureReturnedToUser,
final A expectedInstance,
final FutureResponse<T>
responseToExecuteOnSuccess
) {
futureToUnwrap.onData(new FutureResponse<T>() {
public void apply(T stream) {
if (stream.isSuccessful()) {
responseToExecuteOnSuccess.apply(stream);
} else {
expectedInstance.setResponse(stream.getResponse());
futureReturnedToUser.received(expectedInstance);
}
}
});
}
protected <T extends DataSiftResult> void performRequest(final FutureData<T> response, Request request) {
//final Thread thread = Thread.currentThread();
request.header("Authorization", config.authAsHeader());
request.withSSLProtocols(config.sslProtocols());
io.higgs.http.client.FutureResponse execution = request.execute();
execution.addListener(new GenericFutureListener<Future<? super io.higgs.http.client.Response>>() {
@Override
public void operationComplete(Future<? super io.higgs.http.client.Response> future) throws Exception {
if (!future.isSuccess()) {
failNotify(response, future.cause());
}
}
});
}
protected <T extends DataSiftResult> void failNotify(final FutureData<T> response, Throwable cause) {
response.interuptCause(cause);
//thread.interrupt();
response.doNotify();
}
}
|
#!/usr/bin/env bash
set -euxo pipefail
cargo check
cargo check --example hal
cargo fmt -- --check
|
def average(a, b, c):
return (a + b + c) / 3.0
print(average(1,2,3)) # Output: 2.0 |
'use strict';
process.env.NODE_ENV = 'test';
//Require the dev-dependencies
let chai = require('chai');
let chaiHttp = require('chai-http');
let server = require('../server');
let should = chai.should();
let pdfjsLib = require('pdfjs-dist');
let binaryParser = require('./lib/test-tools.js').binaryParser;
chai.use(chaiHttp);
describe('Basic Tests', () => {
beforeEach((done) => { //Before each test we empty the database
//Nothing atm
//console.log("beforeEach ...");
done();
});
describe('GET /', () => {
it('it should return a welcome message', (done) => {
chai.request(server)
.get('/api')
.end((err, res) => {
res.should.have.status(200);
/.*Dr. Processor is here with it's API.*/.test(res.text).should.be.true;
done();
});
});
});
describe('GET /api', () => {
it('it should return a welcome message', (done) => {
chai.request(server)
.get('/api')
.end((err, res) => {
res.should.have.status(200);
/.*Dr. Processor is here with it's API.*/.test(res.text).should.be.true;
done();
});
});
});
describe('GET /wrongRoute', () => {
it('it should return a 404', (done) => {
chai.request(server)
.get('/wrongRoute')
.end((err, res) => {
res.should.have.status(404);
done();
});
});
});
describe('GET /api/wrongprocessor/and/wait/for/download', () => {
it('it should return a 405 status code + description (wrong processor)', (done) => {
chai.request(server)
.get('/api/wrongprocessor/and/wait/for/download')
.end((err, res) => {
res.should.have.status(405);
res.body.should.be.equal("No processor found. And wrong method. Sad!");
done();
});
});
});
describe('GET /testresources', () => {
it('it should return a testresources message', (done) => {
chai.request(server)
.get('/testresources/')
.end((err, res) => {
res.should.have.status(200);
res.text.should.not.be.null;
done();
});
});
});
describe('GET /testresources/1.pdf', () => {
it('it should return a testpdf with content 1', (done) => {
chai.request(server)
.get('/testresources/1.pdf')
.buffer()
.parse(binaryParser)
.end((err, res) => {
res.should.have.status(200);
pdfjsLib.getDocument({
data: res.body
}).then(function (doc) {
var numPages = doc.numPages;
doc.numPages.should.be.equal(1);
doc.getPage(1).then(function (page) {
page.getTextContent().then(function (textContent) {
textContent.items[0].str.should.be.equal("1");
done();
});
});
});
});
});
});
describe('GET /testresources/2.pdf', () => {
it('it should return a testpdf with content 2', (done) => {
chai.request(server)
.get('/testresources/2.pdf')
.buffer()
.parse(binaryParser)
.end((err, res) => {
res.should.have.status(200);
pdfjsLib.getDocument({
data: res.body
}).then(function (doc) {
var numPages = doc.numPages;
doc.numPages.should.be.equal(1);
doc.getPage(1).then(function (page) {
page.getTextContent().then(function (textContent) {
textContent.items[0].str.should.be.equal("2");
done();
});
});
});
});
});
});
describe('GET /testresources/3.pdf', () => {
it('it should return a testpdf with content 3', (done) => {
chai.request(server)
.get('/testresources/3.pdf')
.buffer()
.parse(binaryParser)
.end((err, res) => {
res.should.have.status(200);
pdfjsLib.getDocument({
data: res.body
}).then(function (doc) {
var numPages = doc.numPages;
doc.numPages.should.be.equal(1);
doc.getPage(1).then(function (page) {
page.getTextContent().then(function (textContent) {
textContent.items[0].str.should.be.equal("3");
done();
});
});
});
});
});
});
describe('GET /testresources/4.pdf', () => {
it('it should return a testpdf with content 4', (done) => {
chai.request(server)
.get('/testresources/4.pdf')
.buffer()
.parse(binaryParser)
.end((err, res) => {
res.should.have.status(200);
pdfjsLib.getDocument({
data: res.body
}).then(function (doc) {
var numPages = doc.numPages;
doc.numPages.should.be.equal(1);
doc.getPage(1).then(function (page) {
page.getTextContent().then(function (textContent) {
textContent.items[0].str.should.be.equal("4");
done();
});
});
});
});
});
});
describe('GET /testresources/5.pdf', () => {
it('it should return a testpdf with content 5', (done) => {
chai.request(server)
.get('/testresources/5.pdf')
.buffer()
.parse(binaryParser)
.end((err, res) => {
res.should.have.status(200);
pdfjsLib.getDocument({
data: res.body
}).then(function (doc) {
var numPages = doc.numPages;
doc.numPages.should.be.equal(1);
doc.getPage(1).then(function (page) {
page.getTextContent().then(function (textContent) {
textContent.items[0].str.should.be.equal("5");
done();
});
});
});
});
});
});
describe('GET /testresources/1.txt', () => {
it('it should return a textfile with content 1', (done) => {
chai.request(server)
.get('/testresources/1.txt')
.end((err, res) => {
res.should.have.status(200);
res.text.startsWith("1").should.be.true;
done();
});
});
});
describe('GET /testresources/2.txt', () => {
it('it should return a textfile with content 2', (done) => {
chai.request(server)
.get('/testresources/2.txt')
.end((err, res) => {
res.should.have.status(200);
res.text.startsWith("2").should.be.true;
done();
});
});
});
describe('GET /testresources/3.txt', () => {
it('it should return a textfile with content 3', (done) => {
chai.request(server)
.get('/testresources/3.txt')
.end((err, res) => {
res.should.have.status(200);
res.text.startsWith("3").should.be.true;
done();
});
});
});
describe('GET /testresources/4.txt', () => {
it('it should return a textfile with content 4', (done) => {
chai.request(server)
.get('/testresources/4.txt')
.end((err, res) => {
res.should.have.status(200);
res.text.startsWith("4").should.be.true;
done();
});
});
});
describe('GET /testresources/5.txt', () => {
it('it should return a textfile with content 5', (done) => {
chai.request(server)
.get('/testresources/5.txt')
.end((err, res) => {
res.should.have.status(200);
res.text.startsWith("5").should.be.true;
done();
});
});
});
}); |
cpuCount=`more "/proc/cpuinfo" | grep 'cpu cores' | wc -l`
echo "There are $cpuCount installed CPU cores in the PC"
if [ $cpuCount -lt $1 ]
then echo "Cannot perform task, more CPU cores required"
fi |
/*
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.cloud.dataflow.sdk.util.common.worker;
import static com.google.api.client.util.Lists.newArrayList;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
import com.google.cloud.dataflow.sdk.runners.worker.ByteArrayShufflePosition;
import com.google.cloud.dataflow.sdk.util.common.Reiterator;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/** Unit tests for {@link BatchingShuffleEntryReader}. */
@RunWith(JUnit4.class)
public final class BatchingShuffleEntryReaderTest {
private static final byte[] KEY = {0xA};
private static final byte[] SKEY = {0xB};
private static final byte[] VALUE = {0xC};
private static final ShufflePosition START_POSITION =
ByteArrayShufflePosition.of("aaa".getBytes());
private static final ShufflePosition END_POSITION =
ByteArrayShufflePosition.of("zzz".getBytes());
private static final ShufflePosition NEXT_START_POSITION =
ByteArrayShufflePosition.of("next".getBytes());
private static final ShufflePosition SECOND_NEXT_START_POSITION =
ByteArrayShufflePosition.of("next-second".getBytes());
@Mock private ShuffleBatchReader batchReader;
private ShuffleEntryReader reader;
@Before
public void initMocksAndReader() {
MockitoAnnotations.initMocks(this);
reader = new BatchingShuffleEntryReader(batchReader);
}
@Test
public void readerCanRead() throws Exception {
ShuffleEntry e1 = new ShuffleEntry(KEY, SKEY, VALUE);
ShuffleEntry e2 = new ShuffleEntry(KEY, SKEY, VALUE);
ArrayList<ShuffleEntry> entries = new ArrayList<>();
entries.add(e1);
entries.add(e2);
when(batchReader.read(START_POSITION, END_POSITION))
.thenReturn(new ShuffleBatchReader.Batch(entries, null));
List<ShuffleEntry> results = newArrayList(reader.read(START_POSITION, END_POSITION));
assertThat(results, contains(e1, e2));
}
@Test
public void readerIteratorCanBeCopied() throws Exception {
ShuffleEntry e1 = new ShuffleEntry(KEY, SKEY, VALUE);
ShuffleEntry e2 = new ShuffleEntry(KEY, SKEY, VALUE);
ArrayList<ShuffleEntry> entries = new ArrayList<>();
entries.add(e1);
entries.add(e2);
when(batchReader.read(START_POSITION, END_POSITION))
.thenReturn(new ShuffleBatchReader.Batch(entries, null));
Reiterator<ShuffleEntry> it = reader.read(START_POSITION, END_POSITION);
assertThat(it.hasNext(), equalTo(Boolean.TRUE));
assertThat(it.next(), equalTo(e1));
Reiterator<ShuffleEntry> copy = it.copy();
assertThat(it.hasNext(), equalTo(Boolean.TRUE));
assertThat(it.next(), equalTo(e2));
assertThat(it.hasNext(), equalTo(Boolean.FALSE));
assertThat(copy.hasNext(), equalTo(Boolean.TRUE));
assertThat(copy.next(), equalTo(e2));
assertThat(copy.hasNext(), equalTo(Boolean.FALSE));
}
@Test
public void readerShouldMergeMultipleBatchResults() throws Exception {
ShuffleEntry e1 = new ShuffleEntry(KEY, SKEY, VALUE);
List<ShuffleEntry> e1s = Collections.singletonList(e1);
ShuffleEntry e2 = new ShuffleEntry(KEY, SKEY, VALUE);
List<ShuffleEntry> e2s = Collections.singletonList(e2);
when(batchReader.read(START_POSITION, END_POSITION))
.thenReturn(new ShuffleBatchReader.Batch(e1s, NEXT_START_POSITION));
when(batchReader.read(NEXT_START_POSITION, END_POSITION))
.thenReturn(new ShuffleBatchReader.Batch(e2s, null));
List<ShuffleEntry> results = newArrayList(reader.read(START_POSITION, END_POSITION));
assertThat(results, contains(e1, e2));
verify(batchReader).read(START_POSITION, END_POSITION);
verify(batchReader).read(NEXT_START_POSITION, END_POSITION);
verifyNoMoreInteractions(batchReader);
}
@Test
public void readerShouldMergeMultipleBatchResultsIncludingEmptyShards()
throws Exception {
List<ShuffleEntry> e1s = new ArrayList<>();
List<ShuffleEntry> e2s = new ArrayList<>();
ShuffleEntry e3 = new ShuffleEntry(KEY, SKEY, VALUE);
List<ShuffleEntry> e3s = Collections.singletonList(e3);
when(batchReader.read(START_POSITION, END_POSITION))
.thenReturn(new ShuffleBatchReader.Batch(e1s, NEXT_START_POSITION));
when(batchReader.read(NEXT_START_POSITION, END_POSITION))
.thenReturn(new ShuffleBatchReader.Batch(e2s, SECOND_NEXT_START_POSITION));
when(batchReader.read(SECOND_NEXT_START_POSITION, END_POSITION))
.thenReturn(new ShuffleBatchReader.Batch(e3s, null));
List<ShuffleEntry> results = newArrayList(reader.read(START_POSITION, END_POSITION));
assertThat(results, contains(e3));
verify(batchReader).read(START_POSITION, END_POSITION);
verify(batchReader).read(NEXT_START_POSITION, END_POSITION);
verify(batchReader).read(SECOND_NEXT_START_POSITION, END_POSITION);
verifyNoMoreInteractions(batchReader);
}
}
|
import express from 'express'
import login from './login/login'
import register from './register/register'
const registryRouter = express.Router()
// Change to post in prod
registryRouter.get('/login', login)
registryRouter.post('/register', register)
export default registryRouter
|
#include <Shlwapi.h>
#include "GetThisPath.h"
//
#pragma comment(lib, "shlwapi.lib")
TCHAR* GetThisPath(TCHAR* dest, size_t destSize)
{
if (!dest) return NULL;
if (MAX_PATH > destSize) return NULL;
DWORD length = GetModuleFileName( NULL, dest, destSize );
PathRemoveFileSpec(dest);
return dest;
}
BOOL SetWorkingDirectory2AppPath() //
{
LPTSTR szDest = (LPTSTR)GlobalAlloc(GPTR, MAX_PATH);
GetThisPath(szDest, MAX_PATH);
return SetCurrentDirectory(szDest);
GlobalFree(szDest);
}
|
<gh_stars>1-10
package org.egovframe.rte.fdl.logging.sample;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.stereotype.Component;
@Component("logLevelDebug")
public class LogLevelDebug {
private static Logger logger = LogManager.getLogger(LogLevelDebug.class.getName());
public void executeSomeLogic() {
logger.debug("DEBUG - LogLevelDebug.executeSomeLogic executed");
}
} |
<reponame>XiaoLin1995/vite-plugin-lint<filename>src/index.ts
import eslintPlugin from './eslint'
import stylelintPlugin from './stylelint'
export { eslintPlugin, stylelintPlugin }
|
package com.lewisallen.rtdptiCache.tests;
import com.lewisallen.rtdptiCache.RtdptiCacheApplication;
import org.junit.jupiter.api.Test;
class RtdptiCacheApplicationTest {
@Test
void main() {
RtdptiCacheApplication.main(new String[]{});
}
} |
#ifndef _GDT_H
#define _GDT_H 1
void gdt_init();
#endif
|
<reponame>masogit/dashboard
var express = require('express');
var http = require('http-proxy');
var path = require('path');
var app = express();
var proxy = http.createProxyServer();
var bodyParser = require('body-parser');
var fs = require('fs');
app.use(bodyParser.urlencoded({ 'extended': 'true' })); // parse application/x-www-form-urlencoded
app.use(bodyParser.json()); // parse application/json
app.use(express.static(__dirname + '/../dist'));
app.use('/devicemanager', (req, res) => {
// http://www.zhiyuninfo.com:8080/devicemanager-1.0/dmrest/devicetypes
proxy.web(req, res, { target: "http://www.zhiyuninfo.com:9091" });
});
app.get('/data/:folder/:filename', function (req, res) {
var filename = __dirname + '/../data/' + req.params.folder + '/' + req.params.filename + '.json';
fs.readFile(filename, 'utf8', (err, data) => {
if (err)
throw err;
else
res.send(JSON.parse(data));
});
// try {
// var data = require('../data/' + req.params.folder + '/' + req.params.filename + '.json');
// } catch (e) {
// res.send(null);
// }
// res.send(data);
});
app.post('/data/:folder/:filename', function (req, res) {
var filename = __dirname + '/../data/' + req.params.folder + '/' + req.params.filename + '.json';
fs.writeFile(filename, JSON.stringify(req.body, null, ' '), 'utf8', (err) => {
if (err) {
console.log(err);
res.send({ status: 'error', msg: 'update file failed' });
}
else
res.send({ status: 'ok', msg: 'update file success' });
});
});
app.get('/*', function (req, res) {
res.sendFile(path.resolve(path.join(__dirname, '/../dist/index.html')));
});
app.listen(8080, function () {
console.log('Example app listening on port 8080!');
});
|
<gh_stars>10-100
export const ANIMATED_COMPONENT_TRANSITION_MS = 200;
const cubic = `${ANIMATED_COMPONENT_TRANSITION_MS}ms cubic-bezier(0, 0, 0, 1.07)`;
const transitions = {
cubic,
};
export default transitions;
|
#!/usr/bin/env bash
if [[ "$TMUX_FZF_SED"x == ""x ]]; then
TMUX_FZF_SED="sed"
fi
FZF_DEFAULT_OPTS=$(echo $FZF_DEFAULT_OPTS | $TMUX_FZF_SED -r -e '$a --header="select a key binding"')
CURRENT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
if [[ "$TMUX_FZF_OPTIONS"x == ""x ]]; then
TARGET_ORIGIN=$(tmux list-keys | $TMUX_FZF_SED '1i [cancel]' | "$CURRENT_DIR/.fzf-tmux")
else
TARGET_ORIGIN=$(tmux list-keys | $TMUX_FZF_SED '1i [cancel]' | "$CURRENT_DIR/.fzf-tmux" "$TMUX_FZF_OPTIONS")
fi
if [[ "$TARGET_ORIGIN" == "[cancel]" ]]; then
exit
else
if [[ $(echo "$TARGET_ORIGIN" | grep -o "copy-mode")x != ""x && $(echo "$TARGET_ORIGIN" | grep -o "prefix")x == x ]]; then
tmux copy-mode
echo "$TARGET_ORIGIN" | $TMUX_FZF_SED -r 's/^.{46}//g' | xargs tmux
else
echo "$TARGET_ORIGIN" | $TMUX_FZF_SED -r 's/^.{46}//g' | xargs tmux
fi
fi
|
package com.anomalydetect.IsolationTree;
import com.anomalydetect.Tool.DisplayTool;
import com.anomalydetect.Tool.FileTool;
import org.junit.Before;
import org.junit.Test;
import java.io.FileNotFoundException;
public class IsolationTreeToolTest {
public double[] testData;
@Before
public void setUp() throws FileNotFoundException {
testData = FileTool.getData("data.json");
}
@Test
public void timeSeriesAnalyse() {
IsolationTreeTool isolationTreeTool = new IsolationTreeTool();
isolationTreeTool.timeSeriesAnalyse(testData);
DisplayTool.showResult(isolationTreeTool);
}
} |
<filename>src/org/opoo/oqs/core/mapper/BeanPropertyMapper.java
/*
* $Id$
*
* Copyright 2006-2008 <NAME>. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opoo.oqs.core.mapper;
import java.beans.PropertyDescriptor;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.commons.beanutils.PropertyUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.opoo.oqs.MapperException;
import org.opoo.oqs.core.Property;
import org.opoo.oqs.core.PropertyTypeAware;
import org.opoo.util.ClassUtils;
/**
*
* @author <NAME>(<EMAIL>)
* @version 1.1
*/
public class BeanPropertyMapper extends AbstractMultiPropertyMapper {
private static final Log log = LogFactory.getLog(BeanPropertyMapper.class);
private final Class beanClass;
public BeanPropertyMapper(String name, String string,
PropertyMapper[] mappers, String className) {
super(name, string, mappers);
this.beanClass = forName(className);
}
public BeanPropertyMapper(String name, String string,
PropertyMapper[] mappers, Class beanClass) {
super(name, string, mappers);
this.beanClass = beanClass;
}
public BeanPropertyMapper(Property sp, PropertyMapper[] mappers,
String className) {
super(sp, mappers);
this.beanClass = forName(className);
}
public BeanPropertyMapper(Property sp, PropertyMapper[] mappers,
Class beanClass) {
super(sp, mappers);
this.beanClass = beanClass;
}
/**
*
* @param rs ResultSet
* @param rowNum int
* @return Object
* @throws SQLException
*/
public Object map(ResultSet rs, int rowNum) throws SQLException {
if (beanClass.isInterface()) {
Map values = getValues(rs, rowNum, beanClass);
return ClassUtils.createObject(values, beanClass);
} else {
//if is not interface
Object target = newInstance(beanClass); //ClassUtils.newInstance(className);
if (target == null) {
log.debug("cannot instance: " + beanClass.getName());
return null;
}
Map values = getValues(rs, rowNum, beanClass);
populate(target, values);
return target;
}
/*
PropertyDescriptor[] pds = PropertyUtils.getPropertyDescriptors(target);
for (int i = 0; i < mappers.length; i++)
{
PropertyMapper pm = mappers[i];
if(PropertyTypeAware.class.isInstance(pm))//(pm instanceof PropertyTypeAware)
{
PropertyDescriptor pd = findPropertyDescriptor(pds, mappers[i].getName());
if (pd != null)
{
((PropertyTypeAware)pm).setPropertyType(pd.getPropertyType());
}
}
Object value = mappers[i].mapRow(rs, rowNum);
setProperty(target, mappers[i].getName(), value);
}
return target;
*/
}
/**
* Map<String, ?> in JDK1.5
*
* @param rs ResultSet
* @param rowNum int
* @param type Class
* @return Map
* @throws SQLException
*/
private Map getValues(ResultSet rs, int rowNum, Class type) throws
SQLException {
Map map = new HashMap();
PropertyDescriptor[] pds = PropertyUtils.getPropertyDescriptors(type);
for (int i = 0; i < mappers.length; i++) {
PropertyMapper pm = mappers[i];
if (PropertyTypeAware.class.isInstance(pm)) { //(pm instanceof PropertyTypeAware)
PropertyDescriptor pd = findPropertyDescriptor(pds,
mappers[i].getName());
if (pd != null) {
((PropertyTypeAware) pm).setPropertyType(pd.getPropertyType());
}
}
Object value = pm.map(rs, rowNum);
map.put(pm.getName(), value);
//setProperty(target, mappers[i].getName(), value);
}
return map;
}
private PropertyDescriptor findPropertyDescriptor(PropertyDescriptor[] pds,
String name) {
if (name == null) {
return null;
}
for (int i = 0; i < pds.length; i++) {
if (name.equals(pds[i].getName())) {
return pds[i];
}
}
return null;
}
/**
* Object target, Map<String, ?> values)
*
* @param target Object
* @param values Map
*/
private void populate(Object target, Map values) {
Iterator it = values.entrySet().iterator();
while (it.hasNext()) {
Map.Entry entry = (Entry) it.next();
if (entry.getValue() != null) {
setProperty(target, (String) entry.getKey(), entry.getValue());
}
}
/*
for(Map.Entry entry: values.entrySet())
{
if(entry.getValue() != null)
{
setProperty(target, entry.getKey(), entry.getValue());
}
}*/
}
private void setProperty(Object target, String name, Object value) {
try {
PropertyUtils.setProperty(target, name, value);
} catch (Exception ex) {
log.error("set property error", ex);
}
}
private Class forName(String className) {
try {
return ClassUtils.forName(className);
} catch (ClassNotFoundException ex) {
log.error(ex);
throw new MapperException(ex);
}
}
private Object newInstance(Class clazz) throws SQLException {
try {
return clazz.newInstance();
} catch (Exception ex) {
log.error(ex);
throw new MapperException(ex);
}
}
public Class getReturnType() {
return this.beanClass;
}
}
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+0+512-N-IP/7-model --tokenizer_name model-configs/1024-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+0+512-N-IP/7-512+0+512-N-VB-ADJ-ADV-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_nouns_verbs_adjectives_and_adverbs_first_half_quarter --eval_function penultimate_quarter_eval |
"""
Check if a linked list is sorted in ascending order or not
"""
class Node:
def __init__(self, data = None):
self.data = data
self.next = None
def is_sorted_ascending(head):
node = head
while node and node.next:
if node.data >= node.next.data:
return False
node = node.next
return True |
#!/bin/bash
# Copyright 2018 - 2021 Crunchy Data Solutions, Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
$DIR/cleanup.sh
docker network create --driver bridge pgnet
docker run \
-v backups:/pgdata \
-e PGRESTORE_HOST=primary \
-e PGRESTORE_DB=postgres \
-e PGRESTORE_USER=postgres\
-e PGRESTORE_PASS=password \
--name=pgrestore \
--hostname=pgrestore \
--network=pgnet \
-d $CCP_IMAGE_PREFIX/radondb-pgrestore:$CCP_IMAGE_TAG
|
package section1;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.lang.Thread.State;
import section1.Calculator;
public class Main {
public static void main(String[] args){
// Thread priority infomation
System.out.printf("Minimum Priority: %s\n", Thread.MIN_PRIORITY);
System.out.printf("Normal Priority: %s\n", Thread.NORM_PRIORITY);
System.out.printf("Maximun Priority: %s\n", Thread.MAX_PRIORITY);
Thread threads[];
Thread.State status[];
// Launch 10 threads to do the operation, 5 with the max
// priority, 5 with the min
threads = new Thread[10];
status = new Thread.State[10];
for (int i = 0; i < 10; i++) {
threads[i] = new Thread(new Calculator());
if ((i % 2) == 0) {
threads[i].setPriority(Thread.MAX_PRIORITY);
} else {
threads[i].setPriority(Thread.MIN_PRIORITY);
}
threads[i].setName("My Thread " + i);
}
try (FileWriter file = new FileWriter(".\\data\\log.txt"); PrintWriter pw = new PrintWriter(file);) {
// Write the status of the threads
for (int i = 0; i < 10; i++) {
pw.println("Main : Status of Thread " + i + " : " + threads[i].getState());
status[i] = threads[i].getState();
}
// Start the ten threads
for (int i = 0; i < 10; i++) {
threads[i].start();
}
// Wait for the finalization of the threads. We save the status of
// the threads and only write the status if it changes.
boolean finish = false;
while (!finish) {
for (int i = 0; i < 10; i++) {
if (threads[i].getState() != status[i]) {
writeThreadInfo(pw, threads[i], status[i]);
status[i] = threads[i].getState();
}
}
finish = true;
for (int i = 0; i < 10; i++) {
finish = finish && (threads[i].getState() == State.TERMINATED);
}
}
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* This method writes the state of a thread in a file
*
* @param pw
* : PrintWriter to write the data
* @param thread
* : Thread whose information will be written
* @param state
* : Old state of the thread
*/
private static void writeThreadInfo(PrintWriter pw, Thread thread, State state) {
pw.printf("Main : Id %d - %s\n", thread.getId(), thread.getName());
pw.printf("Main : Priority: %d\n", thread.getPriority());
pw.printf("Main : Old State: %s\n", state);
pw.printf("Main : New State: %s\n", thread.getState());
pw.printf("Main : ************************************\n");
}
}
|
# frozen_string_literal: true
RSpec.describe Resque::Plugins::Prioritize::ResqueSchedulerUtilExtension do
describe '.constantize' do
subject(:klass) { Resque::Scheduler::Util.constantize(klass_name) }
context 'without priority' do
let(:klass_name) { 'TestWorker' }
it { is_expected.to eq TestWorker }
it { expect(klass.instance_variable_get(:@resque_prioritize_priority)).to eq nil }
end
context 'with priority' do
let(:klass_name) { 'TestWorker{{priority}:20}' }
it { is_expected.to eq TestWorker.with_priority(20) }
it { expect(klass.instance_variable_get(:@resque_prioritize_priority)).to eq 20 }
end
context 'with invalid data' do
let(:klass_name) { 'TestWorker{{priority}:20}test' }
its_block { is_expected.to raise_error NameError }
end
context 'when instance of class' do
let(:klass_name) { TestWorker }
it { is_expected. to eq TestWorker }
end
end
end
|
module FML
# Validations classes have two required methods:
#
# initialize(field, data, form)
# create the validation. `field` is the FMLField object representing
# the field in which the validation was created. `data` is the value
# given to in the FML spec. `form` is the FMLForm within which the field
# was created.
#
# Given a spec like:
# form:
# title: "Validation Example"
# id: <KEY>
# version: "1"
# fieldsets:
# - fieldset:
# - field:
# name: "validate"
# fieldType: "text"
# label: "Dependent"
# validations:
# - minLength: 10
#
# `field` would be the object represting the `validate` field, `data` would
# be 10, and form would be the object representing the entire form spec.
#
# validate()
# validate that the condition the validation represents is true. Raises
# an FML::ValidationError if it's not, otherwise may return anything
#
# conforms!()
# ensure that the validation is applied to the appropriate type of field.
# raises FML::InvalidSpec if it's not, otherwise may return anything.
#
# required?()
# return true if the condition the validation depends on is satisfied.
# This field is required only for validations in form.rb's
# @@conditional_classes
#
# ideas: GreaterThan, LessThan, EarlierThan, LaterThan
class BaseValidation
attr_accessor :field, :parent, :form
def conforms!
end
end
class RequiredIfBoolean < BaseValidation
def initialize(field, required_field_name, form)
@form = form
@negative = required_field_name.start_with? "!"
# If the assertion is negative, we require the parent to be true, and
# vice versa
@required = !@negative
# strip the ! if there was one
required_field_name = required_field_name[1..-1] if @negative
@field = field
if !form.fields.has_key? required_field_name
raise InvalidSpec.new(<<-EOM)
Invalid field name in requiredIf validation: #{required_field_name}
from field: #{field}
EOM
end
@parent = form.fields[required_field_name]
end
def conforms!
if ["yes_no", "checkbox"].index(@parent.type).nil?
raise InvalidSpec.new(<<-EOM)
Field #{@field.name} depends on field #{@parent.name}, which is not a boolean.
Fields may only depend on "yes_no" or "checkbox" fields, but #{@parent.name} is a
"#{@parent.type}" field.
EOM
end
end
def required?
@parent.visible? && (@parent.value) == @required
end
def valid?
!(required? && @field.empty?)
end
def validate
# if parent is @required, child must be non-empty. Note that @parent is
# required to be a boolean element, so we don't need to worry about ""
# being a truthy value
if not valid?
debug_message = <<-EOM
Field #{@field.name}:#{@field.value.inspect} must be present when #{@parent.name}:#{@parent.value.inspect} is #{@required}
EOM
user_message = "This field is required"
err = DependencyError.new(user_message, debug_message, @field.name, @parent.name)
@field.errors << err
raise err
end
end
end
class RequiredIfTextEquals < BaseValidation
def initialize(field, data, form)
@field = field
@wanted_values = Array(data['value'] || data['values'])
if !form.fields.has_key? data['field']
raise InvalidSpec.new(<<-EOM)
Invalid field name in requiredIf validation: #{data['field']}
from field: #{field}
EOM
end
@parent = form.fields[data['field']]
end
def conforms!
if ["select", "text"].index(@parent.type).nil?
raise InvalidSpec.new(<<-EOM)
Field #{@field.name} depends on field #{@parent.name}, which is not a text field.
Fields may only depend on "select" or "text" fields, but #{@parent.name} is a
"#{@parent.type}" field.
EOM
end
end
def required?
@wanted_values.include?(@parent.value)
end
def valid?
!(required? && @field.empty?)
end
def validate
if not valid?
debug_message = <<-EOM
Field #{@field.name}:#{@field.value.inspect} must be #{@wanted_values}
EOM
user_message = "when #{@parent.name} is '#{@wanted_values}', #{@field.name} must be filled in"
err = ValidationError.new(user_message, debug_message, @field.name)
@field.errors << err
raise err
end
end
end
class MinLengthValidation < BaseValidation
def initialize(field, data, form)
@field = field
@minlength = data
end
def valid?
# @field must be either nil or have length >= minLength
@field.value.nil? || @field.value.length >= @minlength
end
def validate
if not valid?
debug_message = <<-EOM
Field #{@field.name}:#{@field.value.inspect} must be longer than #{@minlength} characters
EOM
user_message = "Must be longer than #{@minlength} characters"
err = ValidationError.new(user_message, debug_message, @field.name)
@field.errors << err
raise err
end
end
end
end
|
<filename>internal/modeling/addresstree.go
package modeling
import (
"fmt"
"github.com/lavalamp-/ipv666/internal"
"github.com/lavalamp-/ipv666/internal/addressing"
"github.com/lavalamp-/ipv666/internal/logging"
"github.com/lavalamp-/ipv666/internal/persist"
"net"
)
type AddressTree struct {
ChildrenCount uint32 `msgpack:"c"`
Children map[uint8]*AddressTreeNode `msgpack:"h"`
}
type AddressTreeNode struct {
ChildrenCount uint32 `msgpack:"c"`
Children map[uint8]*AddressTreeNode `msgpack:"h"`
Depth int `msgpack:"d"`
}
func newAddressTree() *AddressTree {
return &AddressTree{
ChildrenCount: 0,
Children: make(map[uint8]*AddressTreeNode),
}
}
func newAddressTreeNode(depth int) *AddressTreeNode {
return &AddressTreeNode{
ChildrenCount: 0,
Children: make(map[uint8]*AddressTreeNode),
Depth: depth,
}
}
func CreateFromAddresses(toAdd []*net.IP, emitFreq int) *AddressTree {
toReturn := newAddressTree()
toReturn.AddIPs(toAdd, emitFreq)
return toReturn
}
func (addrTree *AddressTree) Size() int {
return int(addrTree.ChildrenCount)
}
func (addrTree *AddressTree) AddIP(toAdd *net.IP) bool {
ipNybbles := addressing.GetNybblesFromIP(toAdd, 32)
if addrTree.containsIPByNybbles(ipNybbles) {
return false
}
if _, ok := addrTree.Children[ipNybbles[0]]; !ok {
addrTree.Children[ipNybbles[0]] = newAddressTreeNode(1)
}
addrTree.Children[ipNybbles[0]].addNybbles(ipNybbles[1:])
addrTree.ChildrenCount++
return true
}
func (addrTree *AddressTree) AddIPs(toAdd []*net.IP, emitFreq int) (int, int) {
added, skipped := 0, 0
for i, curAdd := range toAdd {
if i % emitFreq == 0 && i != 0 {
logging.Infof("Adding IP address %d out of %d to address tree.", i, len(toAdd))
}
if addrTree.AddIP(curAdd) {
added++
} else {
skipped++
}
}
return added, skipped
}
func (addrTree *AddressTree) GetAllIPs() []*net.IP {
if addrTree.ChildrenCount == 0 {
return []*net.IP{}
} else {
var toReturn []*net.IP
for k, v := range addrTree.Children {
toReturn = append(toReturn, v.getAllIPs([]uint8{k})...)
}
return toReturn
}
}
func (addrTree *AddressTree) seekChildByNybbles(nybbles []uint8) (*AddressTreeNode, error) {
if val, ok := addrTree.Children[nybbles[0]]; !ok {
return nil, nil
} else {
return val.seekNode(nybbles[1:]), nil
}
}
func (addrTree *AddressTree) getSeekNybbles(fromRange *net.IPNet) ([]uint8, error) {
ones, _ := fromRange.Mask.Size()
if ones % 4 != 0 {
return nil, fmt.Errorf("cannot get IPs from a network range that isn't on a nybble boundary (ie: modulo 4, mask size was %d)", ones)
} else {
return addressing.GetNybblesFromIP(&fromRange.IP, ones / 4), nil
}
}
func (addrTree *AddressTree) GetIPsInRange(fromRange *net.IPNet) ([]*net.IP, error) {
networkNybbles, err := addrTree.getSeekNybbles(fromRange)
if err != nil {
return nil, err
}
if len(networkNybbles) == 0 {
return addrTree.GetAllIPs(), nil
}
child, err := addrTree.seekChildByNybbles(networkNybbles)
if err != nil {
return nil, err
} else {
return child.getAllIPs(networkNybbles), nil
}
}
func (addrTree *AddressTree) GetIPsInGenRange(fromRange *GenRange) []*net.IP {
if _, ok := fromRange.WildIndices[0]; ok {
var toReturn []*net.IP
for k, v := range addrTree.Children {
toReturn = append(toReturn, v.getIPsInGenRange([]uint8{ k }, fromRange.AddrNybbles[1:], fromRange.WildIndices)...)
}
return toReturn
} else if val, ok := addrTree.Children[fromRange.AddrNybbles[0]]; !ok {
return []*net.IP{}
} else {
return val.getIPsInGenRange([]uint8 { fromRange.AddrNybbles[0] }, fromRange.AddrNybbles[1:], fromRange.WildIndices)
}
}
func (addrTree *AddressTree) CountIPsInRange(fromRange *net.IPNet) (uint32, error) {
networkNybbles, err := addrTree.getSeekNybbles(fromRange)
if err != nil {
return 0, err
}
if len(networkNybbles) == 0 {
return addrTree.ChildrenCount, nil
} else if len(networkNybbles) == 32 {
if addrTree.containsIPByNybbles(networkNybbles) {
return 1, nil
} else {
return 0, nil
}
}
child, err := addrTree.seekChildByNybbles(networkNybbles)
if err != nil {
return 0, err
} else {
return child.ChildrenCount, nil
}
}
func (addrTree *AddressTree) CountIPsInGenRange(fromRange *GenRange) int {
if _, ok := fromRange.WildIndices[0]; ok {
var toReturn = 0
for _, v := range addrTree.Children {
toReturn += v.countIPsInGenRange(fromRange.AddrNybbles[1:], fromRange.WildIndices)
}
return toReturn
} else if val, ok := addrTree.Children[fromRange.AddrNybbles[0]]; !ok {
return 0
} else {
return val.countIPsInGenRange(fromRange.AddrNybbles[1:], fromRange.WildIndices)
}
}
func (addrTree *AddressTree) Save(filePath string) error {
return persist.Save(filePath, addrTree)
}
func LoadAddressTreeFromFile(filePath string) (*AddressTree, error) { // TODO abstract this away
var toReturn AddressTree
err := persist.Load(filePath, &toReturn)
return &toReturn, err
}
func (addrTree *AddressTree) ContainsIP(toCheck *net.IP) bool {
nybs := addressing.GetNybblesFromIP(toCheck, 32)
return addrTree.containsIPByNybbles(nybs)
}
func (addrTree *AddressTree) containsIPByNybbles(nybbles []uint8) bool {
if val, ok := addrTree.Children[nybbles[0]]; !ok {
return false
} else {
return val.containsNybbles(nybbles[1:])
}
}
func (addrTreeNode *AddressTreeNode) addNybbles(nybbles []uint8) {
if len(nybbles) == 0 {
return
} else if _, ok := addrTreeNode.Children[nybbles[0]]; !ok {
addrTreeNode.Children[nybbles[0]] = newAddressTreeNode(addrTreeNode.Depth + 1)
}
addrTreeNode.Children[nybbles[0]].addNybbles(nybbles[1:])
addrTreeNode.ChildrenCount++
}
func (addrTreeNode *AddressTreeNode) containsNybbles(nybbles []uint8) bool {
if val, ok := addrTreeNode.Children[nybbles[0]]; !ok {
return false
} else if len(nybbles) == 1 {
return true
} else {
return val.containsNybbles(nybbles[1:])
}
}
func (addrTreeNode *AddressTreeNode) getAllIPs(parentNybbles []uint8) []*net.IP {
if len(addrTreeNode.Children) == 0 && addrTreeNode.Depth != 32 {
logging.Warnf("Ran out of children at depth %d when getting all IPs. This shouldn't happen.", addrTreeNode.Depth)
return []*net.IP{}
} else if len(addrTreeNode.Children) == 0 {
toAdd := addressing.NybblesToIP(parentNybbles)
return []*net.IP{ toAdd }
} else {
var toReturn []*net.IP
for k, v := range addrTreeNode.Children {
toReturn = append(toReturn, v.getAllIPs(append(parentNybbles, k))...)
}
return toReturn
}
}
func (addrTreeNode *AddressTreeNode) getIPsInRange(parentNybbles []uint8, searchNybbles []uint8) []*net.IP {
if len(searchNybbles) == 0 {
return addrTreeNode.getAllIPs(parentNybbles)
} else if val, ok := addrTreeNode.Children[searchNybbles[0]]; !ok {
return []*net.IP{}
} else {
return val.getIPsInRange(append(parentNybbles, searchNybbles[0]), searchNybbles[1:])
}
}
func (addrTreeNode *AddressTreeNode) seekNode(seekNybbles []uint8) *AddressTreeNode {
if len(seekNybbles) == 0 {
return addrTreeNode
} else if val, ok := addrTreeNode.Children[seekNybbles[0]]; !ok {
return nil
} else {
return val.seekNode(seekNybbles[1:])
}
}
func (addrTreeNode *AddressTreeNode) getIPsInGenRange(parentNybbles []uint8, rangeNybbles []uint8, wildIndices map[int]internal.Empty) []*net.IP {
if len(addrTreeNode.Children) == 0 && addrTreeNode.Depth != 32 {
logging.Warnf("Ran out of children at depth %d when getting all IPs. This shouldn't happen.", addrTreeNode.Depth)
return []*net.IP{}
} else if len(addrTreeNode.Children) == 0 {
toAdd := addressing.NybblesToIP(parentNybbles)
return []*net.IP{ toAdd }
} else if _, ok := wildIndices[addrTreeNode.Depth]; ok {
var toReturn []*net.IP
for k, v := range addrTreeNode.Children {
toReturn = append(toReturn, v.getIPsInGenRange(append(parentNybbles, k), rangeNybbles[1:], wildIndices)...)
}
return toReturn
} else if val, ok := addrTreeNode.Children[rangeNybbles[0]]; !ok {
return []*net.IP{}
} else {
return val.getIPsInGenRange(append(parentNybbles, rangeNybbles[0]), rangeNybbles[1:], wildIndices)
}
}
func (addrTreeNode *AddressTreeNode) countIPsInGenRange(rangeNybbles []uint8, wildIndices map[int]internal.Empty) int {
if len(rangeNybbles) == 0 {
return 1
} else if _, ok := wildIndices[addrTreeNode.Depth]; ok {
var toReturn = 0
for _, v := range addrTreeNode.Children {
toReturn += v.countIPsInGenRange(rangeNybbles[1:], wildIndices)
}
return toReturn
} else if val, ok := addrTreeNode.Children[rangeNybbles[0]]; !ok {
return 0
} else {
return val.countIPsInGenRange(rangeNybbles[1:], wildIndices)
}
} |
from pymongo import *
from datetime import datetime
client = MongoClient()
db = client.cbd
collection = db.rest
def insert_doc(json_doc):
try:
inserted_id = collection.insert_one(json_doc).inserted_id
print("JSON FILE inserted")
except Exception as e:
print("Error: ", e)
def change_entry(condition, alteration):
try:
x = collection.update_many(condition, alteration)
except Exception as e:
print("Error: ", e)
def query_to_search(query):
for post in collection.find(query):
print(post)
#def add_index(index, condition):
# result=collection.create_index(index, condition)
#add_index([('user_id', pymongo.ASCENDING)], unique=True) #does not work
def countLocalidades():
localidades = collection.aggregate([{"$group": {"_id": "$localidade"}}])
print("Numero de localidades distintas: {}".format(len(list(localidades))))
def countRestByLocalidade():
print("Numero de restaurantes por localidade:")
localidades = collection.aggregate([{"$group": {"_id": "$localidade", "Counter": {"$sum": 1}}}])
for i in localidades:
print("-> {}: {}".format(i['_id'], i['Counter']))
def countRestByLocalidadeByGastronomia():
print("Numero de restaurantes por localidade e gastronomia:")
localidades = collection.aggregate([{"$group": {"_id": {"localidade":"$localidade", "gastronomia":"$gastronomia"}, "Counter": {"$sum": 1}}}])
for i in localidades:
print("-> {} | {} : {}".format(i["_id"]["localidade"], i["_id"]["gastronomia"], i["Counter"]))
def getRestWithNameCloserTo(name):
print("Nome de restaurantes contendo '{}' no nome:".format(name))
#https://docs.mongodb.com/manual/reference/operator/aggregation/match/
restaurantes = collection.aggregate([{"$match": {"nome": {"$regex": name}}}])
for i in restaurantes:
print("-> {}".format(i["nome"]))
insert_doc({"address": {"building": "123", "coord": [-1.0, -2.0], "rua": "rua", "zipcode": "456"}, "localidade": "Aveiro", "gastronomia": "None", "grades": [{"date": datetime.utcnow(), "grade": "F", "score": 7}], "nome": "WELP", "restaurant_id": "0101010101"})
query_to_search({"gastronomia": "None"})
change_entry({"gastronomia": "None"},{"$set": {"zipcode": "000"}})
query_to_search({"gastronomia": "None"})
countLocalidades()
countRestByLocalidade()
countRestByLocalidadeByGastronomia()
getRestWithNameCloserTo("WE")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.