text stringlengths 1 1.05M |
|---|
SELECT name
FROM employees
WHERE department = (SELECT department
FROM employees
WHERE name = 'John'); |
#!/bin/bash
#export LD_LIBRARY_PATH=/usr/local/lib/python3.2/dist-packages/PySide:$LD_LIBRARY_PATH
python3 ./player.py -a localhost -l /home/pi/Music $*
|
<reponame>wujia28762/Tmate<filename>App/src/main/java/com/honyum/elevatorMan/activity/worker/FixNextTimeActivity.java
package com.honyum.elevatorMan.activity.worker;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.DatePicker;
import android.widget.TextView;
import android.widget.TimePicker;
import com.baidu.mapapi.SDKInitializer;
import com.baidu.mapapi.map.BaiduMap;
import com.baidu.mapapi.map.BitmapDescriptor;
import com.baidu.mapapi.map.BitmapDescriptorFactory;
import com.baidu.mapapi.map.MapView;
import com.baidu.mapapi.map.Marker;
import com.baidu.mapapi.map.MarkerOptions;
import com.baidu.mapapi.model.LatLng;
import com.honyum.elevatorMan.R;
import com.honyum.elevatorMan.base.BaseActivityWraper;
import com.honyum.elevatorMan.data.FixInfo;
import com.honyum.elevatorMan.data.MaintenanceServiceInfo;
import com.honyum.elevatorMan.net.FixNextTimeRequest;
import com.honyum.elevatorMan.net.base.NetConstant;
import com.honyum.elevatorMan.net.base.NetTask;
import com.honyum.elevatorMan.net.base.NewRequestHead;
import com.honyum.elevatorMan.net.base.RequestBean;
import com.honyum.elevatorMan.net.base.Response;
import com.honyum.elevatorMan.utils.ViewUtils;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;
import static com.honyum.elevatorMan.net.base.NetConstant.RSP_CODE_SUC_0;
/**
* Created by Star on 2017/6/12.
*/
public class FixNextTimeActivity extends BaseActivityWraper {
private View dialogLayout;
private DatePicker datePicker;
private TimePicker timePicker;
Date date;
String s1;
String s2;
@BindView(R.id.tv_city)
TextView tv_city;
private AlertDialog alertDialog;
@BindView(R.id.mapView)
MapView mapView;
@BindView(R.id.tv_time)
TextView tvTime;
@BindView(R.id.tv_submit)
TextView tvSubmit;
private BaiduMap mMap;
private FixInfo mFixInfo;
private boolean isTimePass;
@Override
public String getTitleString() {
return getString(R.string.title_apptime);
}
@Override
protected void initView() {
mFixInfo = getIntent("Info");
dialogLayout = LayoutInflater.from(this).inflate(R.layout.dia_datetime_layout, null);
datePicker = (DatePicker) dialogLayout.findViewById(R.id.datePicker);
timePicker = (TimePicker) dialogLayout.findViewById(R.id.timePicker);
ViewUtils.resizePikcer(datePicker);
ViewUtils.resizePikcer(timePicker);
//使用dialog组合日期和时间控件。
mMap = mapView.getMap();
date = new Date();
findViewById(R.id.tv_modify_date).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View arg0) {
timePicker.setIs24HourView(true);
timePicker.setCurrentHour(date.getHours() + 1);
timePicker.setCurrentMinute(0);
int minute = timePicker.getCurrentMinute();
s2 = " " + (timePicker.getCurrentHour()) + ":" + (minute < 10 ? "0" + minute : minute);
timePicker.setOnTimeChangedListener(new TimePicker.OnTimeChangedListener() {
@Override
public void onTimeChanged(TimePicker view, int hourOfDay, int minute) {
s2 = (" " + hourOfDay + ":" + (minute < 10 ? "0" + minute : minute));
}
});
alertDialog.show();
}
});
alertDialog = new AlertDialog.Builder(this,R.style.dialogStyle).setTitle("选择时间").setView(dialogLayout).setPositiveButton("确定",
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int arg1) {
s1 = (datePicker.getYear() + "-" + (datePicker.getMonth() + 1) + "-" + datePicker.getDayOfMonth());
String dateString = s1 + s2;
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm");
Date d = new Date();
try {
d = sdf.parse(dateString);
long t = d.getTime();
long cl = System.currentTimeMillis();
if (cl > t) {
isTimePass = false;
}
else {
isTimePass = true;
}
tvTime.setText(dateString);
dialog.dismiss();
} catch (ParseException e) {
e.printStackTrace();
}
}
}).setNegativeButton("取消", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int arg1) {
dialog.dismiss();
}
}).create();
//end 组合控件
// alertDialog = new AlertDialog.Builder(this).setTitle("选择时间").setView(dialogLayout).setPositiveButton("确定",
// new DialogInterface.OnClickListener() {
//
// @Override
// public void onClick(DialogInterface dialog, int arg1) {
// s1 = (datePicker.getYear() + "-" + (datePicker.getMonth() + 1) + "-" + datePicker.getDayOfMonth());
// String dateString = s1 + s2;
// tvTime.setText(dateString);
// dialog.dismiss();
// }
// }).setNegativeButton("取消", new DialogInterface.OnClickListener() {
//
// @Override
// public void onClick(DialogInterface dialog, int arg1) {
// dialog.dismiss();
// }
// }).create();
//end 组合控件
tv_city.setText(mFixInfo.getVillaInfo().getCellName());
long time = System.currentTimeMillis();
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm");
tvTime.setText(sdf.format(new Date(time)));
markMap(mFixInfo);
}
private void requestAddRepairOrderProcess() {
NetTask task = new NetTask(getConfig().getServer() + NetConstant.URL_TASK_ADD,
getRequestBean(getConfig().getUserId(), getConfig().getToken())) {
@Override
protected void onResponse(NetTask task, String result) {
Response response = Response.getResponse(result);
if (response.getHead() != null && response.getHead().getRspCode().equals(RSP_CODE_SUC_0)) {
showAppToast(getString(R.string.sucess));
finish();
}
}
};
addTask(task);
}
private RequestBean getRequestBean(String userId, String token) {
FixNextTimeRequest request = new FixNextTimeRequest();
request.setHead(new NewRequestHead().setaccessToken(token).setuserId(userId));
request.setBody(request.new FixNextTimeBody().setRepairOrderId(mFixInfo.getId()).setPlanTime(tvTime.getText().toString()));
return request;
}
@Override
protected void preView() {
super.preView();
SDKInitializer.initialize(getApplicationContext());
}
/**
* 标记地图
*/
private void markMap(FixInfo alarmInfo1) {
//mBaiduMap.hideInfoWindow();
if (null == mMap) {
return;
}
if (null == alarmInfo1) {
return;
}
BitmapDescriptor bitmapDescriptor = BitmapDescriptorFactory
.fromResource(R.drawable.marker_worker);
double latitude = alarmInfo1.getVillaInfo().getLat();
double longitude = alarmInfo1.getVillaInfo().getLng();
LatLng point = new LatLng(latitude, longitude);
MarkerOptions markerOption = new MarkerOptions().icon(bitmapDescriptor).position(point);
Marker locationMarker = (Marker) mMap.addOverlay(markerOption);
}
@Override
protected int getLayoutID() {
return R.layout.activity_fix_app_time;
}
@OnClick({R.id.tv_submit})
public void onViewClicked(View view) {
switch (view.getId()) {
case R.id.tv_submit:
if(isTimePass)
requestAddRepairOrderProcess();
else showToast("选择日期应大于当前日期!");
break;
}
}
}
|
import path from "path";
import type { UserConfig } from "vite";
import vue from "@vitejs/plugin-vue";
import vueJsx from "@vitejs/plugin-vue-jsx";
import { svgBuilder } from "./src/core/utils/svg";
function resolve(dir: string) {
return path.resolve(__dirname, ".", dir);
}
// https://vitejs.dev/config/
export default (): UserConfig => {
return {
base: "/",
plugins: [vue(), vueJsx(), svgBuilder("./src/icons/svg/")],
resolve: {
alias: {
"/@": resolve("src"),
"/#": resolve("types"),
"/$": resolve("src/cool/modules")
}
},
css: {
preprocessorOptions: {
scss: {
additionalData: "@import './src/assets/css/common.scss';"
}
}
},
server: {
port: 9000,
hmr: {
overlay: true
},
proxy: {
"/dev": {
target: "http://localhost:2019/v2",
changeOrigin: true,
rewrite: (path) => path.replace(/^\/dev/, "")
},
"/pro": {
target: "https://show.cool-admin.com",
changeOrigin: true,
rewrite: (path) => path.replace(/^\/pro/, "/api")
}
}
},
build: {
sourcemap: false,
polyfillDynamicImport: false // 必须为false
},
optimizeDeps: {
exclude: ["vue-demi"]
}
};
};
|
<reponame>jrussnak/webglayer
var heatmap;
var defaultPCValue = 40;
var datasets = {
'1': {
path: 'brno_dn',
about: 'Tato mapa prezentuje 530 dopravních nehod z let 2011 až 2013. Jedná se o vybrané dopravní nehody s podezřením na spáchání trestného činu (alkohol, zranění, vyšší škoda).',
name: '<NAME>'
},
'2': {
path: 'brno_kradeze_aut',
about: 'Tato mapa prezentuje 905 krádeží dvoustopých vozidel z let 2011 až 2013.',
name: '<NAME>'
},
'3': {
path: 'brno_lp',
about: 'Tato mapa prezentuje 832 loupežných přepadení z let 2011 až 2013 spáchaných ve veřejném prostoru Brna.',
name: '<NAME>'
},
'4': {
path: 'brno_znasilneni',
about: 'Tato mapa prezentuje 96 znásilnění z let 2011 až 2013 spáchaných ve veřejném prostoru Brna.',
name: 'ZNÁSILNĚNÍ'
}
}
function hidePanels() {
$('#minch5').click();
$('#minch4').click();
$('#minch1').click();
$('#minch2').click();
}
function init() {
loadDataset('1')
$('#dataset-selector').change(function () {
var newId = $('#dataset-selector input:checked').val();
loadDataset(newId);
})
}
function loadDataset(dsId) {
var dataset = datasets[dsId];
var pcup = $('#butPC').hasClass('fa-chevron-up');
$('.vis-div').remove();
$('.btn-minimize').next().remove();
$('.btn-minimize').remove();
$('.olMap').children().remove();
$('#webglayer').remove();
$('#slider_pc').val(defaultPCValue);
$('#points_visible').prop('checked', true);
$('#heatmap_visible').prop('checked', true);
initMap();
$('#dataset-about-content').text(dataset.about);
$('#dataset-name').html(dataset.name);
$('#map').append('<div id="wgl">Visualization made by <a href="http://webglayer.org/"> WebGLayer </a></div>')
var data = new DataLoader();
data.loadPosData('./data/' + dataset.path + '.csv');
setTimeout(function() {
hidePanels();
var l = WGL.getDimension("pc_chart");
if (l && pcup) {
console.log('SETTING UP')
l.setVisible(false);
$('#butPC').removeClass("fa-chevron-down");
$('#butPC').addClass("fa-chevron-up");
}
}, 500);
}
var togglePC = function() {
// $("#pc").slideToggle();
var l = WGL.getDimension("pc_chart");
var resize = function(){
WGL.getManager().updateMapSize();
WGL.mcontroller.resize();
WGL.mcontroller.zoommove(map.getZoom(), getTopLeftTC());
WGL.render();
}
console.log(l);
if (l.visible){
console.log('hiding');
l.setVisible(false);
$('#map').animate({ 'margin-bottom': '1.5em'}, {done: resize})
$('#pc').animate({ 'height': '1.5em'}, {done: resize})
$('#butPC').removeClass("fa-chevron-down");
$('#butPC').addClass("fa-chevron-up");
setTimeout( function() { map.updateSize();}, 10);
} else {
console.log('showing');
l.setVisible(true);
$('#map').animate({ 'margin-bottom': '18.5em'}, {done: resize})
$('#pc').animate({ 'height': '18.5em'}, {done: resize})
$('#butPC').removeClass("fa-chevron-up");
$('#butPC').addClass("fa-chevron-down");
setTimeout( function() { map.updateSize();}, 200);
}
}
function visualize(data){
//wgl = new WGL(data.num,'http://localhost:9999/js/webglayer/','map');
WGL.init(data.num,'../../','map');
window.onresize = function(){
WGL.resize();
}
map.events.register("move", map, onMove);
var controlHM = new WGL.ChartDiv("right","chm","Nastavení heatmapy");
heatmap = WGL.addHeatMapDimension(data.pts, 'heatmap');
heatmap.radiusFunction = function(r, z){
var res = r/20000 * Math.pow(2,z);
//console.log(res);
var gpsize = map.getGeodesicPixelSize();
var pixelsize = (gpsize.h+gpsize.w)/2;
return res ;
};
heatmap.setRadius(50);
var mapdim = WGL.addMapDimension(data.pts, 'themap');
WGL.addPolyBrushFilter('themap','polybrush');
addHeatMapControl(heatmap,'chm');
WGL.addExtentFilter();
var charts = [];
var year = {data: data.year, domain: data.yeararray, min:0, max: 2, num_bins: 3, name: 'year', type:'ordinal', label: "rok"};
var chd5 = new WGL.ChartDiv("right", "ch5", "Rok");
chd5.setDim(WGL.addOrdinalHistDimension(year));
WGL.addLinearFilter(year,3, 'yearF');
charts['year'] = new WGL.ui.StackedBarChart(year, "ch5", "rok", 'yearF');
/* MONTHS*/
//var days = {data: data.dayes, min:0, max: 7, num_bins: 7, name: 'dayes'};
var month = {data: data.month, domain: data.montharray, min:0, max: 11, num_bins: 12, name: 'month', type:'ordinal', label: "měsíc"};
var chd4 = new WGL.ChartDiv("right", "ch4", "Měsíc v roce");
//wgl.addLinearHistDimension(months);
chd4.setDim(WGL.addOrdinalHistDimension(month));
WGL.addLinearFilter(month,12, 'monthF');
charts['month'] = new WGL.ui.StackedBarChart(month, "ch4", "měsíc v roce", 'monthF');
/* DAYS*/
//var days = {data: data.dayes, min:0, max: 7, num_bins: 7, name: 'dayes'};
var days = {data: data.days, domain: data.daysarray, min:0, max: 6, num_bins: 7, name: 'days', type:'ordinal', label: "den"};
var chd1 = new WGL.ChartDiv("right","ch1", "Den v týdnu");
//wgl.addLinearHistDimension(dayes);
chd1.setDim(WGL.addOrdinalHistDimension(days));
WGL.addLinearFilter(days,7, 'daysF');
charts['days'] = new WGL.ui.StackedBarChart(days, "ch1", "den v týdnu", 'daysF');
/*HOURS*/
/* var hours = {data: data.hours, domain:['0','1','2','3','4','5','6','7','8','9','10','11','12','13','14','15','16','17','18','19','20', '21','22','23'], min:1, max:24, name: 'hours',type:'ordinal', label :"hour of the day"} ;
var chd2 = new WGL.ChartDiv("right","ch2", "Hodina během dne");
chd2.setDim(WGL.addOrdinalHistDimension(hours));
WGL.addLinearFilter(hours, 24*5, 'hoursF');
charts['hours'] = new WGL.ui.StackedBarChart(hours, "ch2", "hour of the day", 'hoursF'); */
var hours = {data: data.hours, min:-0.01, max:23.99, num_bins: 24, name: 'hours',type:'linear', label :"hodina"} ; // pokud num_bins: 24*5, je to rozděleno pětiny
var chd2 = new WGL.ChartDiv("right","ch2", "Hodina během dne");
chd2.setDim(WGL.addLinearHistDimension(hours));
WGL.addLinearFilter(hours, 24*10, 'hoursF');
charts['hours'] = new WGL.ui.StackedBarChart(hours, "ch2", "hodina během dne", 'hoursF');
var d =[];
d[0] = year;
d[1] = month;
d[2] = days;
d[3]= hours;
//d[4] = sev;
var pc = WGL.addParallelCoordinates('pc_chart', d);
WGL.addMultiDim(d);
/**
* Addin all charts
*/
WGL.addCharts(charts);
//wgl.addLegend(legend);
WGL.initFilters();
$("#slider_pc").on("input", function(){
//mapdim.render2(this.value);
pc.reRender(this.value);
});
$("#points_visible").click(function(){
var l = WGL.getDimension(this.name);
l.setVisible(this.checked);
WGL.render();
});
$("#heatmap_visible").click(function(){
var l = WGL.getDimension(this.name);
l.setVisible(this.checked);
// heatmap.reRender();
WGL.render();
});
WGL.mcontroller.zoommove(map.getZoom(), getTopLeftTC());
pc.reRender(defaultPCValue);
}
function getTopLeftTC() {
var tlwgs = (new OpenLayers.LonLat(-180, 90)).transform(
new OpenLayers.Projection("EPSG:4326"),
new OpenLayers.Projection("EPSG:900913"));
var s = Math.pow(2, map.getZoom());
tlpixel = map.getViewPortPxFromLonLat(tlwgs);
res = {
x : -tlpixel.x / s,
y : -tlpixel.y / s
}
return res;
}
function onMove() {
WGL.mcontroller.zoommove(map.getZoom(), getTopLeftTC(), WGL.filterByExt);
}
function updateLabel(v){
console.log(v);
}
function addHeatMapControl(hm,divid){
$("#"+divid).append(
"<div id="+divid+"left style='top:0em; left:0em; width:40%'></div>"+
"<div id="+divid+"right style='top:0em; right:0em; width:60%; height:10em; position:absolute'></div>"
);
var thediv = $("#"+divid+"right");
thediv.append(
"<div style='margin:0.5em'>"+
"<text>Poloměr: </text><text id='radius_label'></text>"+
"<input style='width: 50%; right:1em; position:absolute' type ='range' max='100' min='1'"+
"step='1' name='points' id='slider_radius' value='`50'></input>" +
"</div>"
);
WGL.addColorFilter(hm.id,'colorbrush');
var legend = new WGL.ui.HeatMapLegend(divid + "left", 'colorbrush');
hm.addLegend(legend);
WGL.addLegend(legend);
$("#slider_radius").on("input", function(){
sliderValueChangeValue(this.value);
});
var sliderValueChangeValue = function(newValue) {
hm.setRadius(newValue);
$('#radius_label').html(newValue+"m ");
WGL.render();
}
sliderValueChangeValue(50);
$("#hm_max").on("input", function(){
hm.maxVal = this.value;
//heatmap.reRender();
WGL.render();
legend.updateMaxAll(this.value);
});
$("#max_checked").on("click", function(d,i){
hm.lockScale = !this.checked;
//$("#hm_min").val(100);
document.getElementById("hm_max").disabled = this.checked;
});
} |
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import LabelEncoder, StandardScaler
from sklearn.multiclass import OneVsRestClassifier
from sklearn.svm import SVC
# Define preprocessing steps
preprocess_steps = [
('label_encoder', LabelEncoder()),
('scaler', StandardScaler())
]
# Define the model
model = OneVsRestClassifier(SVC())
# Combine preprocessing steps and model in a pipeline
pipeline = Pipeline(steps=preprocess_steps+[('model', model)])
# Fit the pipeline on the training set
pipeline.fit(X_train, y_train) |
var User = require('../models/user');
module.exports = function (app, passport) {
app.get('/auth/login', function(req, res) {
res.render('./auth/login', { message: req.flash('loginMessage') });
});
app.post('/auth/login', passport.authenticate('local-login', {
successRedirect : '/auth/profile',
failureRedirect : '/auth/login',
failureFlash : true
}));
app.get('/auth/signup', function(req, res) {
// render the page and pass in any flash data if it exists
res.render('./auth/signup', { message: req.flash('signupMessage') });
});
app.post('/auth/signup', passport.authenticate('local-signup', {
successRedirect : '/auth/profile',
failureRedirect : '/auth/signup',
failureFlash : true
}));
app.get('/auth/profile', app.locals.isLoggedIn, function(req, res) {
res.render('./auth/profile', {
user : req.user
});
});
app.get('/auth/logout', function(req, res) {
req.logout();
res.redirect('/');
});
app.get('/auth/roles', app.locals.isLoggedIn, app.locals.isAdmin, function (req, res) {
User.find({}, function (err, allUsers) {
if (err) throw err;
var sortedUsers = allUsers.sort(function (a, b) {
if (a.role < b.role) {
return -1;
} else if (a.role > b.role) {
return 1;
} else {
return 0;
}
});
res.render('./auth/roles', {
users: sortedUsers,
message: req.flash('rolesMessage')
});
});
});
app.get('/auth/changeRole/:id', app.locals.isLoggedIn, app.locals.isAdmin, function (req, res) {
var id = req.params.id;
if (id == req.user.id) {
req.flash('rolesMessage', 'Промяната на собствената роля е забранена!')
return res.redirect('/auth/roles');
}
User.findById(id, function (err, user) {
if (err) throw err;
user.role = user.role === 'admin' ? 'user' : 'admin';
user.save(function (err) {
if (err) throw err;
});
res.redirect('/auth/roles');
});
});
app.get('/auth/changePassword', app.locals.isLoggedIn, app.locals.isAdmin, function (req, res) {
res.render('./auth/changePassword', { message: req.flash('changePassword')});
});
app.post('/auth/changePassword', app.locals.isLoggedIn, app.locals.isAdmin, function (req, res) {
if (!req.user.validPassword(req.body.oldPassword)) {
req.flash('changePassword', '<PASSWORD>');
return res.redirect('/auth/changePassword');
}
if (req.body.newPassword != req.body.confirmPassword) {
req.flash('changePassword', '<PASSWORD> при потвърждаване на паролата');
return res.redirect('/auth/changePassword');
}
req.user.local.password = <PASSWORD>(req.body.newPassword);
req.user.save(function (err) {
if (err) throw err;
});
res.redirect('/');
});
}; |
<filename>test/encoding/varint.js<gh_stars>10-100
/* eslint-disable */
// TODO: Remove previous line and work through linting issues at next edit
'use strict';
var should = require('chai').should();
var bitcore = require('../../index.js');
var BN = bitcore.crypto.BN;
var BufferReader = bitcore.encoding.BufferReader;
var BufferWriter = bitcore.encoding.BufferWriter;
var Varint = bitcore.encoding.Varint;
describe('Varint', function () {
it('should make a new varint', function () {
var buf = Buffer.from('00', 'hex');
var varint = new Varint(buf);
should.exist(varint);
varint.buf.toString('hex').should.equal('00');
varint = Varint(buf);
should.exist(varint);
varint.buf.toString('hex').should.equal('00');
//various ways to use the constructor
Varint(Varint(0).toBuffer()).toNumber().should.equal(0);
Varint(0).toNumber().should.equal(0);
Varint(new BN(0)).toNumber().should.equal(0);
});
describe('#set', function () {
it('should set a buffer', function () {
var buf = Buffer.from('00', 'hex');
var varint = Varint().set({ buf: buf });
varint.buf.toString('hex').should.equal('00');
varint.set({});
varint.buf.toString('hex').should.equal('00');
});
});
describe('#fromString', function () {
it('should set a buffer', function () {
var buf = BufferWriter().writeVarintNum(5).concat();
var varint = Varint().fromString(buf.toString('hex'));
varint.toNumber().should.equal(5);
});
});
describe('#toString', function () {
it('should return a buffer', function () {
var buf = BufferWriter().writeVarintNum(5).concat();
var varint = Varint().fromString(buf.toString('hex'));
varint.toString().should.equal('05');
});
});
describe('#fromBuffer', function () {
it('should set a buffer', function () {
var buf = BufferWriter().writeVarintNum(5).concat();
var varint = Varint().fromBuffer(buf);
varint.toNumber().should.equal(5);
});
});
describe('#fromBufferReader', function () {
it('should set a buffer reader', function () {
var buf = BufferWriter().writeVarintNum(5).concat();
var br = BufferReader(buf);
var varint = Varint().fromBufferReader(br);
varint.toNumber().should.equal(5);
});
});
describe('#fromBN', function () {
it('should set a number', function () {
var varint = Varint().fromBN(new BN(5));
varint.toNumber().should.equal(5);
});
});
describe('#fromNumber', function () {
it('should set a number', function () {
var varint = Varint().fromNumber(5);
varint.toNumber().should.equal(5);
});
});
describe('#toBuffer', function () {
it('should return a buffer', function () {
var buf = BufferWriter().writeVarintNum(5).concat();
var varint = Varint(buf);
varint.toBuffer().toString('hex').should.equal(buf.toString('hex'));
});
});
describe('#toBN', function () {
it('should return a buffer', function () {
var varint = Varint(5);
varint.toBN().toString().should.equal(new BN(5).toString());
});
});
describe('#toNumber', function () {
it('should return a buffer', function () {
var varint = Varint(5);
varint.toNumber().should.equal(5);
});
});
});
|
package com.zhuanghl.jfinal.api;
import com.jfinal.plugin.activerecord.Db;
import com.jfinal.plugin.activerecord.Page;
import com.jfinal.plugin.activerecord.Record;
import com.zhuanghl.jfinal.common.bean.BaseResponse;
import com.zhuanghl.jfinal.common.bean.Code;
import com.zhuanghl.jfinal.common.utils.DateUtils;
import com.zhuanghl.jfinal.common.utils.RandomUtils;
import com.zhuanghl.jfinal.model.PublishDaoyou;
public class APIPublishDaoyouController extends BaseAPIController {
public void upload() {
if (!methodType("post")) {
render404();
return;
}
String contentId = RandomUtils.randomCustomUUID();
new PublishDaoyou()
.set(PublishDaoyou.USER_ID, getPara("userId"))
.set(PublishDaoyou.CONTENT_ID, contentId)
.set(PublishDaoyou.TAGS, getPara("tags"))
.set(PublishDaoyou.TYPE, "BOZHU")
.set(PublishDaoyou.CONTENT_DETAILS, getPara("contentDetails"))
.set(PublishDaoyou.CREATION_DATE, DateUtils.getNowTimeStamp())
.save();
String sql = "SELECT * FROM t_publish_daoyou WHERE contentId=?";
PublishDaoyou publishDaoyou = PublishDaoyou.publishDaoyou.findFirst(sql, contentId);
renderJson(new BaseResponse(Code.SUCCESS, "", publishDaoyou.getAttrs()));
}
public void getAllDaoyou() {
if (!methodType("post")) {
render404();
return;
}
Page<Record> publishDaoyouPage =
Db.paginate(Integer.parseInt(getPara("pageNumber")),
Integer.parseInt(getPara("pageSize")),
"select *",
"from t_publish_daoyou order by creationDate desc");
renderJson(new BaseResponse(Code.SUCCESS, "", publishDaoyouPage));
}
} |
package psql
import (
"database/sql"
"fmt"
"math/rand"
"strconv"
"time"
"github.com/dgrijalva/jwt-go"
)
type Auth struct {
DB *sql.DB
}
type Otp struct {
mobile_num string
otp string
}
func (a *Auth) GetOtp(mobileNum string) (string, error) {
rand.Seed(time.Now().UnixNano())
otp := strconv.Itoa(rand.Intn(999999))
query := `INSERT INTO otp (mobile_num, otp, created_on, last_otp) VALUES($1, $2, $3, $3)
ON CONFLICT (mobile_num)
DO UPDATE
SET
otp = $2,
last_otp = $3`
_, err := a.DB.Exec(query, mobileNum, otp, time.Now())
if err != nil {
panic(err)
return "", err
}
return otp, nil
}
func (a *Auth) VerifyOtp(mobileNum string, otp string) (string, error) {
const OTP_VALIDITY = 15 * 60 // 15 minutes
query := `SELECT otp, last_otp FROM otp WHERE mobile_num = $1`
var resultOtp string
var last_otp time.Time
rows, err := a.DB.Query(query, mobileNum)
if err != nil {
return "", err
}
if rows.Next() {
if err := rows.Scan(&resultOtp, &last_otp); err != nil {
panic(err)
}
}
currTime := time.Now()
diff := currTime.Sub(last_otp).Seconds()
if otp != resultOtp || diff > OTP_VALIDITY {
return "", nil
}
token, err := a.GenerateToken(mobileNum)
if err != nil {
return "", err
}
return token, nil
}
func (a *Auth) GenerateToken(mobileNum string) (string, error) {
var id int64
getUserQuery := "SELECT id FROM users WHERE mobile_num = $1"
insertUserQuery := "INSERT INTO users (mobile_num, profile_pic, created_on) VALUES ($1, $2, $3) RETURNING id"
hmacSampleSecret := []byte("SampleSecretKey")
row, err := a.DB.Query(getUserQuery, mobileNum)
if err != nil {
fmt.Println(err)
return "", err
}
if row.Next() {
fmt.Println("go next")
if err := row.Scan(&id); err != nil {
fmt.Print(err)
panic(err)
}
} else {
err := a.DB.QueryRow(insertUserQuery, mobileNum, "", time.Now()).Scan(&id)
if err != nil {
fmt.Println(err)
return "", err
}
}
token := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.MapClaims{
"mobile_num": mobileNum,
"id": id,
})
tokenString, err := token.SignedString(hmacSampleSecret)
if err != nil {
return "", err
}
return tokenString, nil
}
|
import Route from '@ember/routing/route';
import { fetchPaginated } from 'example-app/helpers/pagination';
export default Route.extend({
model() {
return fetchPaginated('/books');
}
}); |
/*
* Copyright 2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ratpack.file.internal;
import io.netty.handler.codec.http.HttpHeaderNames;
import ratpack.exec.Blocking;
import ratpack.file.MimeTypes;
import ratpack.func.Action;
import ratpack.func.Factory;
import ratpack.handling.Context;
import ratpack.http.Response;
import ratpack.http.internal.HttpHeaderConstants;
import ratpack.render.RendererSupport;
import ratpack.server.internal.ServerEnvironment;
import ratpack.util.Exceptions;
import ratpack.util.internal.BoundedConcurrentHashMap;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.Date;
import java.util.Optional;
import java.util.concurrent.ConcurrentMap;
import static io.netty.handler.codec.http.HttpResponseStatus.NOT_MODIFIED;
public class FileRenderer extends RendererSupport<Path> {
private static final boolean CACHEABLE = !ServerEnvironment.env().isDevelopment();
private static final ConcurrentMap<Path, Optional<BasicFileAttributes>> CACHE = new BoundedConcurrentHashMap<>(10000, Runtime.getRuntime().availableProcessors());
@Override
public void render(Context context, Path targetFile) throws Exception {
readAttributes(targetFile, attributes -> {
if (attributes == null || !attributes.isRegularFile()) {
context.clientError(404);
} else {
sendFile(context, targetFile, attributes);
}
});
}
public static void sendFile(Context context, Path file, BasicFileAttributes attributes) {
if (!context.getRequest().getMethod().isGet()) {
context.clientError(405);
return;
}
Date date = new Date(attributes.lastModifiedTime().toMillis());
context.lastModified(date, () -> {
final String ifNoneMatch = context.getRequest().getHeaders().get(HttpHeaderNames.IF_NONE_MATCH);
Response response = context.getResponse();
if (ifNoneMatch != null && ifNoneMatch.trim().equals("*")) {
response.status(NOT_MODIFIED.code()).send();
return;
}
response.contentTypeIfNotSet(() -> context.get(MimeTypes.class).getContentType(file.getFileName().toString()));
response.getHeaders().set(HttpHeaderConstants.CONTENT_LENGTH, Long.toString(attributes.size()));
try {
response.sendFile(file);
} catch (Exception e) {
throw Exceptions.uncheck(e);
}
});
}
private static Factory<BasicFileAttributes> getter(Path file) {
return () -> {
if (Files.exists(file)) {
return Files.readAttributes(file, BasicFileAttributes.class);
} else {
return null;
}
};
}
public static void readAttributes(Path file, Action<? super BasicFileAttributes> then) throws Exception {
if (CACHEABLE) {
Optional<BasicFileAttributes> basicFileAttributes = CACHE.get(file);
if (basicFileAttributes == null) {
Blocking.get(getter(file)).then(a -> {
CACHE.put(file, Optional.ofNullable(a));
then.execute(a);
});
} else {
then.execute(basicFileAttributes.orElse(null));
}
} else {
Blocking.get(getter(file)).then(then);
}
}
}
|
// Ahora lo realizamos con funciones para el ingreso de parametros!
function calcularPrecioConDescuento(precio, descuento) {
const porcentajePrecioConDescuento = 100 - descuento;
const precioConDescuento = (precio * porcentajePrecioConDescuento) / 100;
return precioConDescuento;
}
function clickDiscount() {
const inputPrice = document.getElementById("InputPrice");
const priceValue = inputPrice.value;
const inputDiscount = document.getElementById("InputDiscount");
const discountValue = inputDiscount.value;
const precioConDescuento = calcularPrecioConDescuento(priceValue, discountValue);
// no usamos alert, mostramos el prcio con html!
const R = document.getElementById("ResultP");
R.innerText = "El precio con descuento son: $" + precioConDescuento;
} |
<gh_stars>1-10
"""Leetcode 349. Intersection of Two Arrays
Easy
URL:
Given two arrays, write a function to compute their intersection.
Example 1:
Input: nums1 = [1,2,2,1], nums2 = [2,2]
Output: [2]
Example 2:
Input: nums1 = [4,9,5], nums2 = [9,4,9,8,4]
Output: [9,4]
Note:
- Each element in the result must be unique.
- The result can be in any order.
"""
class SolutionBuiltIn(object):
def intersection(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: List[int]
Time complexity: O(n1*n2).
Space complexity: O(n1+n2).
"""
return list(set(nums1).intersection(set(nums2)))
class SolutionSmallerNumCountDict(object):
def intersection(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: List[int]
Time complexity: O(max(n1, n2)).
Space complexity: O(max(n1, n2)).
"""
from collections import defaultdict
# Convert smaller nums to nums1.
if len(nums1) > len(nums2):
nums1, nums2 = nums2, nums1
# Create smaller nums to dict: n1->count.
nums1_count_d = defaultdict(int)
for n1 in nums1:
nums1_count_d[n1] += 1
# Collect n2 in nums1_count_d with char count > 0.
result = []
for n2 in nums2:
if n2 in nums1_count_d and nums1_count_d[n2] > 0:
result.append(n2)
# Set n2 count to 0 to prevent duplicates.
nums1_count_d[n2] = 0
return result
def main():
# Output: [2]
nums1 = [1,2,2,1]
nums2 = [2,2]
print SolutionBuiltIn().intersection(nums1, nums2)
print SolutionSmallerNumCountDict().intersection(nums1, nums2)
# Output: [9, 4]
nums1 = [4,9,5]
nums2 = [9,4,9,8,4]
print SolutionBuiltIn().intersection(nums1, nums2)
print SolutionSmallerNumCountDict().intersection(nums1, nums2)
if __name__ == '__main__':
main()
|
def printCombinations(arr,n):
for i in range(1<<n):
for j in range(n):
if (i&(1<<j))!=0:
print(arr[j], end=" ")
print()
# Driver program
arr = [1,2,3]
n = len(arr)
printCombinations(arr,n) |
#!/usr/bin/env sh
#--- prerequisites ---#
# doctl
# DO_PK_FIREWALL_ID env var in ~/.envrc file
# myip script
[ -f ~/.envrc ] && . ~/.envrc
doctl compute firewall add-rules $DO_PK_FIREWALL_ID --inbound-rules=protocol:tcp,ports:22,address:$(myip)
|
<filename>src/main/java/org/paasta/container/platform/common/api/clusters/ClustersService.java<gh_stars>1-10
package org.paasta.container.platform.common.api.clusters;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
* Clusters Service 클래스
*
* @author hrjin
* @version 1.0
* @since 2020.11.04
**/
@Service
public class ClustersService {
private final ClustersRepository clustersRepository;
/**
* Instantiates a new Clusters service
*
* @param clustersRepository the cluster repository
*/
@Autowired
public ClustersService(ClustersRepository clustersRepository) {
this.clustersRepository = clustersRepository;
}
/**
* Clusters 정보 저장(Create Clusters Info)
*
* @param clusters the clusters
* @return the clusters
*/
public Clusters createClusters(Clusters clusters) {
return clustersRepository.save(clusters);
}
/**
* Clusters 정보 조회(Get Clusters Info)
*
* @param clusterName the cluster name
* @return the clusters
*/
public Clusters getClusters(String clusterName) {
return clustersRepository.findAllByClusterName(clusterName).get(0);
}
}
|
<reponame>exKAZUu-Research/SmartMotivator
// @flow
import React from 'react';
import { shallow } from 'enzyme';
import { View } from 'react-native';
import { ErrorComponent } from './ErrorComponent';
describe('<ErrorComponent />', () => {
it('should render one component', () => {
expect(shallow(<ErrorComponent error={'error!'} />).find(View).length).toBe(1);
});
});
|
# bash script to execute az commands to launch a Python/CNTK Image using ACI, attaching an Azure Fileshare for
# data input and output.
#
# Assumes az login already performed from Azure bash shell, and an existing resource group has been created
# Azure ACI will be used to create single container batch instance of Python/CNTK Image (must use supporting location)
# and mount an existing Azure Storage Account/Fileshare to container data volume.
#
# *** Use provided helper script 'a1day-init-fileshare.sh' to create Storage Account, Fileshare, and initialize data directories
#
# $1 - Resource Group Name
# $2 - Resource Group location (e.g eastus)
# $3 - ACI Container Prefix (Try to make unique, 8 char or less)
# $4 - Storage Account Name
#
# Define Container Group, Storage Account & Fileshare
ACI_PERS_RESOURCE_GROUP=$1
ACI_PERS_CONTAINER_GROUP_NAME=${3}${RANDOM}pycntk
ACI_PERS_LOCATION=$2
ACI_PERS_STORAGE_ACCOUNT_NAME=${4}
ACI_PERS_SHARE_NAME=pycntk
echo
echo '***' A1Day Python CNTK Container Group
echo $ACI_PERS_CONTAINER_GROUP_NAME
# Export the connection string as an environment variable. The following 'az storage' commands
# reference this environment variable.
export AZURE_STORAGE_CONNECTION_STRING=`az storage account show-connection-string --resource-group $ACI_PERS_RESOURCE_GROUP --name $ACI_PERS_STORAGE_ACCOUNT_NAME --output tsv`
# Get and display Storage Account
STORAGE_ACCOUNT=$(az storage account list --resource-group $ACI_PERS_RESOURCE_GROUP --query "[?contains(name,'$ACI_PERS_STORAGE_ACCOUNT_NAME')].[name]" --output tsv)
echo '****************'
echo Storage Acct: $STORAGE_ACCOUNT
# Get and display Storage Key
STORAGE_KEY=$(az storage account keys list --resource-group $ACI_PERS_RESOURCE_GROUP --account-name $STORAGE_ACCOUNT --query "[0].value" --output tsv)
echo Storage Key: $STORAGE_KEY
# Create A1Day Python/CNTK Server Container Instance
az container create \
--resource-group $ACI_PERS_RESOURCE_GROUP \
--name $ACI_PERS_CONTAINER_GROUP_NAME \
--image ghoelzer2azure/a1day-python-cntk:latest \
--restart-policy Never \
--environment-variables MODEL_RUN=$ACI_PERS_CONTAINER_GROUP_NAME \
--azure-file-volume-account-name $ACI_PERS_STORAGE_ACCOUNT_NAME \
--azure-file-volume-account-key $STORAGE_KEY \
--azure-file-volume-share-name $ACI_PERS_SHARE_NAME \
--azure-file-volume-mount-path /env
# Add continuation and uncomment/update for use with ACR
# --registry-login-server mycontainerregistry.azurecr.io \
# --registry-username <username> \
# --registry-password <password1>
# Display created Container
echo '****************'
echo Launched Container: $ACI_PERS_CONTAINER_GROUP_NAME
# Attach to Container and Stream Sysout
echo '****************'
az container attach \
--resource-group $ACI_PERS_RESOURCE_GROUP \
--name $ACI_PERS_CONTAINER_GROUP_NAME
|
<reponame>dvicklund/tiles
// Grab canvas element from HTML, then grab its context.
var canvas = document.getElementById('canvas');
var context = canvas.getContext('2d');
// Set width and height of the canvas element to fill the viewport
var windowWidth = canvas.width = document.defaultView.innerWidth;
var windowHeight = canvas.height = document.defaultView.innerHeight;
// Grab renderer and entity modules
var Renderer = require('./renderer');
var Entity = require('./entity');
// Set up the renderer and player
var Render = new Renderer(context, Math.round(windowWidth/25), Math.round(windowHeight/25));
var Player = new Entity(Render, true);
Render.setPlayer(Player);
// Add window listeners for resizing canvas dimensions and player controls
window.addEventListener('resize', Render.refreshDimensions, false);
window.addEventListener('keypress', Player.keyPressed, false);
canvas.addEventListener("touchstart", Player.screenTouched, false);
// canvas.addEventListener("touchend", Player.screenReleased, false);
// Initialize infinite drawing loop
Render.draw();
|
#include <stdio.h>
int main ()
{
// Declare variables
int i, n, sum;
// Set n
n = 10;
// Set sum to 0
sum = 0;
// loop through 1 to n
for (i = 1; i <= n; i++)
{
sum = sum + i;
}
// Print the sum
printf("Sum = %d", sum);
return 0;
} |
<filename>RASA_ConceptNet5/actions/actions.py
# This files contains your custom actions which can be used to run
# custom Python code.
#
# See this guide on how to implement these action:
# https://rasa.com/docs/rasa/custom-actions
# This is a simple example for a custom action which utters "Hello World!"
#
# REFERENCES
# - https://medium.com/betacom/unsupervised-nlp-task-in-python-with-doc2vec-da1f7727857d
# - https://medium.com/betacom/building-a-rasa-chatbot-to-perform-listings-search-60cea9829e60
# - https://homes.cs.washington.edu/~msap/acl2020-commonsense/slides/02%20-%20knowledge%20in%20LMs.pdf
# - https://github.com/UKPLab/sentence-transformers/blob/master/docs/pretrained-models/nli-models.md
import os
from typing import Any, Text, Dict, List
from rasa_sdk import Action, Tracker, utils
from rasa_sdk.executor import CollectingDispatcher
from rasa_sdk.knowledge_base.actions import ActionQueryKnowledgeBase
from rasa_sdk.knowledge_base.storage import InMemoryKnowledgeBase
import pandas as pd
import re
import json
from rasa_sdk import Action, Tracker
from rasa_sdk.executor import CollectingDispatcher
from py2neo import Graph
from collections import defaultdict
import time
topK=5
# Load mem_cache features.
# mem_cache_conceptNet5={}
# with open('mem_cache_conceptNet5.json') as json_file:
# mem_cache_conceptNet5 = json.load(json_file)
# use neo4j for real-time recommendations.
g = Graph("bolt://localhost:7687/neo4j", password = "<PASSWORD>")
print("Connected to Neo4j")
#used to retrieve data from conceptNet5 in real-time.
def getConceptTags(word,topK):
collection = []
try:
words = word.split(' ')
for w in words:
query = """
CALL ga.nlp.ml.word2vec.nn($wid, $k, 'en-ConceptNet5') YIELD word, distance RETURN word AS list;
"""
for row in g.run(query, wid=w, k=topK):
processed=re.sub('[^a-zA-Z0-9]+', ' ', row[0])
collection.append(processed)
except:
query = """
CALL ga.nlp.ml.word2vec.nn($wid, $k, 'en-ConceptNet5') YIELD word, distance RETURN word AS list;
"""
for row in g.run(query, wid=word, k=topK):
processed=re.sub('[^a-zA-Z0-9]+', ' ', row[0])
collection.append(processed)
return collection
## Recommendations based on real-time conceptnet + review text fusion.
class ActionReview_ConceptNet5(Action):
def name(self) -> Text:
return "action_reviews_ConceptNet5"
def run(self, dispatcher: CollectingDispatcher,
tracker: Tracker,
domain: Dict[Text, Any]) -> List[Dict[Text, Any]]:
userMessage = tracker.latest_message['text']
print(userMessage)
data = {"payload": 'cardsCarousel'}
image_list = []
#replace with dynamic value.
start_time = time.time()
try:
prediction = tracker.latest_message['entities'][0]['value']
except:
prediction = ""
if prediction:
#replace with dynamic value.
word=str(prediction)
word=word.lower()
# if word in mem_cache_conceptNet5:
# collection = mem_cache_conceptNet5[word]
print("fetching concepts related to:",word)
collection = getConceptTags(word, 25)
query_string=""
query_string="MATCH (r:Review_Text)-[]-(l:Listing) WHERE "
tags=""
for item in collection:
query_string+="r.name=~'(?i).*"+item.lower()+".*' or "
tags+=item.lower()+","
query_string+=" r.name=~'(?i).*"+word.lower()+".*'"
query_string+=" RETURN l.name as name, l.url As url,l.picture_url As picture_url,l.accomodates as accomodates,l.bathrooms as bathrooms,l.bedrooms as bedrooms,l.beds as beds,l.host_identity_verified as verified,l.review_scores_rating as review_scores,l.price as price LIMIT "+str(topK)+";"
query = ""+query_string+""
count=0
for row in g.run(query, query_string=query_string,k=topK):
print(row)
dic={}
dic["image"] = row['picture_url']
dic['title'] = row['name']
dic['url'] = row['url']
image_list.append(dic)
dispatcher.utter_message(text=str(row['url']))
dispatcher.utter_message(text="Accomodates:"+str(row['accomodates']))
dispatcher.utter_message(text="Bedrooms:"+str(row['bedrooms']))
dispatcher.utter_message(text="Bathrooms:"+str(row['bathrooms']))
dispatcher.utter_message(text="Beds:"+str(row['beds']))
dispatcher.utter_message(text="Host_Verified:"+str(row['verified']))
dispatcher.utter_message(text="Price:"+str(row['price']))
dispatcher.utter_message(image=str(row['picture_url']))
dispatcher.utter_message(text="\n***")
count+=1
if count==0:
dispatcher.utter_message(text="no great matches! Can you rephrase?")
else:
dispatcher.utter_message(text='Recommendation based on the following similar ConceptNet5(common-sense network) tags:')
dispatcher.utter_message(text=tags.rstrip(','))
data["data"]=image_list
res_time="response time:"+str(time.time() - start_time)+" seconds"
dispatcher.utter_message(res_time)
dispatcher.utter_message(json_message=data)
else:
dispatcher.utter_message(text="No matched listings")
return []
class ActionListing_ConceptNet5(Action):
def name(self) -> Text:
return "action_listing_ConceptNet5"
def run(self, dispatcher: CollectingDispatcher,
tracker: Tracker,
domain: Dict[Text, Any]) -> List[Dict[Text, Any]]:
userMessage = tracker.latest_message['text']
print(userMessage)
data = {"payload": 'cardsCarousel'}
image_list = []
#replace with dynamic value.
start_time = time.time()
try:
prediction = tracker.latest_message['entities'][0]['value']
except:
prediction = ""
if prediction:
#replace with dynamic value.
word=str(prediction)
word=word.lower()
# if word in mem_cache_conceptNet5:
# collection = mem_cache_conceptNet5[word]
print("fetching concepts related to:",word)
collection = getConceptTags(word, 10)
query_string=""
query_string="MATCH (r:Listing_Text)-[]-(l:Listing) WHERE "
tags=""
for item in collection:
query_string+="r.name=~'(?i).*"+item.lower()+".*' or "
tags+=item.lower()+","
query_string+=" r.name=~'(?i).*"+word.lower()+".*'"
query_string+=" RETURN l.name as name, l.url As url,l.picture_url As picture_url,l.accomodates as accomodates,l.bathrooms as bathrooms,l.bedrooms as bedrooms,l.beds as beds,l.host_identity_verified as verified,l.review_scores_rating as review_scores,l.price as price LIMIT "+str(topK)+";"
query = ""+query_string+""
count=0
for row in g.run(query, query_string=query_string,k=topK):
print(row)
dic={}
dic["image"] = row['picture_url']
dic['title'] = row['name']
dic['url'] = row['url']
image_list.append(dic)
dispatcher.utter_message(text=str(row['url']))
dispatcher.utter_message(text="Accomodates:"+str(row['accomodates']))
dispatcher.utter_message(text="Bedrooms:"+str(row['bedrooms']))
dispatcher.utter_message(text="Bathrooms:"+str(row['bathrooms']))
dispatcher.utter_message(text="Beds:"+str(row['beds']))
dispatcher.utter_message(text="Host_Verified:"+str(row['verified']))
dispatcher.utter_message(text="Price:"+str(row['price']))
dispatcher.utter_message(image=str(row['picture_url']))
dispatcher.utter_message(text="\n***")
count+=1
if count==0:
dispatcher.utter_message(text="no great matches! Can you rephrase?")
else:
dispatcher.utter_message(text='Recommendation based on the following similar ConceptNet5(common-sense network) tags:')
dispatcher.utter_message(text=tags.rstrip(','))
data["data"]=image_list
res_time="response time:"+str(time.time() - start_time)+" seconds"
dispatcher.utter_message(res_time)
dispatcher.utter_message(json_message=data)
else:
dispatcher.utter_message(text="No matched listings")
return []
class ActionImageCarosaul(Action):
def name(self) -> Text:
return "action_image_carosaul"
def run(self, dispatcher: CollectingDispatcher,
tracker: Tracker,
domain: Dict[Text, Any]) -> List[Dict[Text, Any]]:
userMessage = tracker.latest_message['text']
data= [ { "title": "Sick Leave", "description": "Sick leave is time off from work that workers can use to stay home to address their health and safety needs without losing pay." }, { "title": "Earned Leave", "description": "Earned Leaves are the leaves which are earned in the previous year and enjoyed in the preceding years. " }, { "title": "Casual Leave", "description": "Casual Leave are granted for certain unforeseen situation or were you are require to go for one or two days leaves to attend to personal matters and not for vacation." }, { "title": "Flexi Leave", "description": "Flexi leave is an optional leave which one can apply directly in system at lease a week before." } ]
message={ "payload": "collapsible", "data": data }
dispatcher.utter_message(text="You can apply for below leaves",json_message=message)
return [] |
import os
def simulate_rm_rf(directory_path: str) -> None:
if os.path.exists(directory_path):
for root, dirs, files in os.walk(directory_path, topdown=False):
for file in files:
os.remove(os.path.join(root, file))
for dir in dirs:
os.rmdir(os.path.join(root, dir))
os.rmdir(directory_path) |
<filename>src/services/getAllUser.js
const endpointUrl = "http://localhost:3001";
export const getAllUser = (email, password) => {
return new Promise((resolve, reject) => {
fetch(`${endpointUrl}/login`, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ mailid: email, password })
})
.then(res => {
const text = res.text();
return text.then(value => {
console.log(value);
return resolve(JSON.parse(value));
});
})
.catch(error => {
console.log(error);
return reject(error);
});
});
};
export const getAllUser = () => {};
|
<gh_stars>0
/*-
* Copyright (c) 2017 <NAME> <<EMAIL>>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
/*
* SPI memory map.
* SiFive E300 Platform Reference Manual, Version 1.0.1
*/
#ifndef _SYS_RISCV_SIFIVE_E300G_SPI_H_
#define _SYS_RISCV_SIFIVE_E300G_SPI_H_
#define SPI_SCKDIV 0x000 /* Serial clock divisor */
#define SPI_SCKMODE 0x004 /* Serial clock mode */
#define SCKMODE_PHA (1 << 0) /* Data is shifted on the leading edge of SCK and sampled on the trailing edge of SCK */
#define SCKMODE_POL (1 << 1) /* Inactive state of SCK is logical 1 */
#define SPI_CSID 0x010 /* Chip select ID */
#define SPI_CSDEF 0x014 /* Chip select default */
#define SPI_CSMODE 0x018 /* Chip select mode */
#define CSMODE_AUTO 0x1 /* Assert/de-assert CS at the beginning/end of each frame */
#define CSMODE_HOLD 0x2 /* Keep CS continuously asserted after the initial frame */
#define CSMODE_OFF 0x3 /* Disable hardware control of the CS pin */
#define SPI_DELAY0 0x028 /* Delay control 0 */
#define SPI_DELAY1 0x02C /* Delay control 1 */
#define SPI_FMT 0x040 /* Frame format */
#define FMT_PROTO_S (0)
#define FMT_PROTO_M (3 << FMT_PROTO_S)
#define FMT_PROTO_SINGLE (1 << FMT_PROTO_S)
#define FMT_PROTO_DUAL (2 << FMT_PROTO_S)
#define FMT_PROTO_QUAD (3 << FMT_PROTO_S)
#define FMT_ENDIAN_LSB (1 << 2) /* Transmit least-significant bit (LSB) first */
#define FMT_ENDIAN_MSB (0) /* Transmit most-significant bit (MSB) first */
#define FMT_DIR_TX (1 << 3) /* The receive FIFO is not populated. */
#define FMT_LEN_S 16 /* the number of bits per frame */
#define FMT_LEN_M (0xf << FMT_LEN_S)
#define SPI_TXDATA 0x048 /* Tx FIFO data */
#define TXDATA_FULL (1 << 31)
#define SPI_RXDATA 0x04C /* Rx FIFO data */
#define SPI_TXMARK 0x050 /* Tx FIFO watermark */
#define SPI_RXMARK 0x054 /* Rx FIFO watermark */
#define SPI_FCTRL 0x060 /* SPI flash interface control */
#define SPI_FFMT 0x064 /* SPI flash instruction format */
#define SPI_IE 0x070 /* SPI interrupt enable */
#define SPI_IP 0x074 /* SPI interrupt pending */
#define IP_TXWM (1 << 0)
#define IP_RXWM (1 << 1)
struct spi_softc {
uint32_t base;
uint8_t cs;
};
int e300g_spi_init(struct spi_softc *sc, spi_device_t *dev,
uint32_t base, uint8_t cs);
void e300g_spi_poll_txwm(struct spi_device *dev);
#endif /* !_SYS_RISCV_SIFIVE_E300G_SPI_H_ */
|
import { useMint } from './useMint';
export const useCurrency = () => {
const { mint } = useMint();
return { Currency: mint.Currency };
};
|
<filename>src/main.ts
import { NestFactory } from '@nestjs/core';
import { Transport } from '@nestjs/microservices';
require('newrelic');
import { configService } from './config/config.service';
import { AppModule } from './app.module';
async function bootstrap() {
const fs = require('fs');
let app;
// if (process.env.NODE_ENV === 'production' || process.env.NODE_ENV === 'prod') {
// const keyFile = fs.readFileSync('/certs/api.airframes.io-key.pem');
// const certFile = fs.readFileSync('/certs/api.airframes.io-cert.pem');
// app = await NestFactory.create(AppModule, {
// httpsOptions: {
// key: keyFile,
// cert: certFile,
// }
// });
// } else {
app = await NestFactory.create(AppModule, {});
// }
const eventsMicroservice = app.connectMicroservice({
transport: Transport.TCP,
});
const natsMicroservice = app.connectMicroservice({
transport: Transport.NATS,
options: {
url: configService.getNatsConfig().natsUrl,
},
});
await app.startAllMicroservicesAsync();
const allowedOrigins = "*:*"
const corsOptions = {
"origins": allowedOrigins,
"methods": "GET,HEAD,PUT,PATCH,POST,DELETE",
"preflightContinue": false,
"optionsSuccessStatus": 204,
"credentials": true
}
app.enableCors(corsOptions);
if (process.env.NODE_ENV === 'production' || process.env.NODE_ENV === 'prod') {
await app.listen(80);
} else {
await app.listen(3001);
}
}
bootstrap();
|
#!/bin/sh
lacc="$1"
comp="$2"
if [ -z "$comp" ]
then
echo "Usage: $0 <compiler to test> <reference compiler>";
exit 1
fi
if [ ! -f sqlite/shell.c ] || [ ! -f sqlite/sqlite3.c ]
then
echo "Missing sqlite source, download and place in 'sqlite' folder"
exit 1
fi
# Build with lacc
valgrind --leak-check=full --show-leak-kinds=all \
$lacc -std=c89 -fPIC -g -v -o bin/sqlite \
sqlite/shell.c sqlite/sqlite3.c \
-DSQLITE_DEBUG \
-DSQLITE_MEMDEBUG \
--dump-symbols \
--dump-types \
-lm -lpthread -ldl \
> /dev/null
if [ $? -ne 0 ]
then
echo "$(tput setaf 1)Compilation failed!$(tput sgr0)";
exit 1
fi
# Build with reference compiler
$comp sqlite/shell.c sqlite/sqlite3.c -o bin/sqlite-cc -lm -lpthread -ldl
# Test case
input=$(cat <<EOF
create table tbl1(one varchar(10), two smallint);
insert into tbl1 values('hello!', 10);
insert into tbl1 values('goodbye', 20);
select * from tbl1;
EOF
)
expected=$(echo "$input" | bin/sqlite-cc)
actual=$(echo "$input" | bin/sqlite)
if [ "$expected" != "$actual" ]
then
echo "$(tput setaf 1)Wrong output!$(tput sgr0)";
exit 1
fi
echo "$(tput setaf 2)Ok!$(tput sgr0)"
exit 0
|
// Copyright 2018, Google, LLC.
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import assert from 'assert';
import nock from 'nock';
import {GaxiosError, GaxiosOptions, request, Gaxios} from '../src';
const assertRejects = require('assert-rejects');
nock.disableNetConnect();
const url = 'https://example.com';
function getConfig(err: Error) {
const e = err as GaxiosError;
if (e && e.config && e.config.retryConfig) {
return e.config.retryConfig;
}
return;
}
afterEach(() => {
nock.cleanAll();
});
describe('🛸 retry & exponential backoff', () => {
it('should provide an expected set of defaults', async () => {
const scope = nock(url)
.get('/')
.times(4)
.reply(500);
await assertRejects(request({url, retry: true}), (e: Error) => {
scope.done();
const config = getConfig(e);
if (!config) {
assert.fail('no config available');
}
assert.strictEqual(config!.currentRetryAttempt, 3);
assert.strictEqual(config!.retry, 3);
assert.strictEqual(config!.noResponseRetries, 2);
assert.strictEqual(config!.retryDelay, 100);
const expectedMethods = ['GET', 'HEAD', 'PUT', 'OPTIONS', 'DELETE'];
for (const method of config!.httpMethodsToRetry!) {
assert(expectedMethods.indexOf(method) > -1);
}
const expectedStatusCodes = [[100, 199], [429, 429], [500, 599]];
const statusCodesToRetry = config!.statusCodesToRetry!;
for (let i = 0; i < statusCodesToRetry.length; i++) {
const [min, max] = statusCodesToRetry[i];
const [expMin, expMax] = expectedStatusCodes[i];
assert.strictEqual(min, expMin);
assert.strictEqual(max, expMax);
}
return true;
});
});
it('should retry on 500 on the main export', async () => {
const body = {buttered: '🥖'};
const scopes = [
nock(url)
.get('/')
.reply(500),
nock(url)
.get('/')
.reply(200, body),
];
const res = await request({
url,
retry: true,
});
assert.deepStrictEqual(res.data, body);
scopes.forEach(s => s.done());
});
it('should not retry on a post', async () => {
const scope = nock(url)
.post('/')
.reply(500);
await assertRejects(
request({url, method: 'POST', retry: true}),
(e: Error) => {
const config = getConfig(e);
return config!.currentRetryAttempt === 0;
}
);
scope.done();
});
it('should retry at least the configured number of times', async () => {
const body = {dippy: '🥚'};
const scopes = [
nock(url)
.get('/')
.times(3)
.reply(500),
nock(url)
.get('/')
.reply(200, body),
];
const cfg = {url, retryConfig: {retry: 4}};
const res = await request(cfg);
assert.deepStrictEqual(res.data, body);
scopes.forEach(s => s.done());
});
it('should not retry more than configured', async () => {
const scope = nock(url)
.get('/')
.twice()
.reply(500);
const cfg = {url, retryConfig: {retry: 1}};
await assertRejects(request(cfg), (e: Error) => {
return getConfig(e)!.currentRetryAttempt === 1;
});
scope.done();
});
it('should not retry on 4xx errors', async () => {
const scope = nock(url)
.get('/')
.reply(404);
await assertRejects(request({url, retry: true}), (e: Error) => {
const cfg = getConfig(e);
return cfg!.currentRetryAttempt === 0;
});
scope.done();
});
it('should retain the baseUrl on retry', async () => {
const body = {pumpkin: '🥧'};
const url = '/path';
const baseUrl = 'http://example.com';
const scope = nock(baseUrl)
.get(url)
.reply(500)
.get(url)
.reply(200, body);
const gaxios = new Gaxios({
baseUrl,
});
const res = await gaxios.request({
url,
retry: true,
});
assert.deepStrictEqual(res.data, body);
scope.done();
});
it('should not retry if retries set to 0', async () => {
const scope = nock(url)
.get('/')
.reply(500);
const cfg = {url, retryConfig: {retry: 0}};
await assertRejects(request(cfg), (e: Error) => {
const cfg = getConfig(e);
return cfg!.currentRetryAttempt === 0;
});
scope.done();
});
it('should notify on retry attempts', async () => {
const body = {buttered: '🥖'};
const scopes = [
nock(url)
.get('/')
.reply(500),
nock(url)
.get('/')
.reply(200, body),
];
let flipped = false;
const config: GaxiosOptions = {
url,
retryConfig: {
onRetryAttempt: err => {
const cfg = getConfig(err);
assert.strictEqual(cfg!.currentRetryAttempt, 1);
flipped = true;
},
},
};
await request(config);
assert.strictEqual(flipped, true);
scopes.forEach(s => s.done());
});
it('should support overriding the shouldRetry method', async () => {
const scope = nock(url)
.get('/')
.reply(500);
const config = {
url,
retryConfig: {
shouldRetry: () => {
return false;
},
},
};
await assertRejects(request(config), (e: Error) => {
const cfg = getConfig(e);
return cfg!.currentRetryAttempt === 0;
});
scope.done();
});
it('should retry on ENOTFOUND', async () => {
const body = {spicy: '🌮'};
const scopes = [
nock(url)
.get('/')
.replyWithError({code: 'ENOTFOUND'}),
nock(url)
.get('/')
.reply(200, body),
];
const res = await request({url, retry: true});
assert.deepStrictEqual(res.data, body);
scopes.forEach(s => s.done());
});
it('should retry on ETIMEDOUT', async () => {
const body = {sizzling: '🥓'};
const scopes = [
nock(url)
.get('/')
.replyWithError({code: 'ETIMEDOUT'}),
nock(url)
.get('/')
.reply(200, body),
];
const res = await request({url, retry: true});
assert.deepStrictEqual(res.data, body);
scopes.forEach(s => s.done());
});
it('should allow configuring noResponseRetries', async () => {
const scope = nock(url)
.get('/')
.replyWithError({code: 'ETIMEDOUT'});
const config = {url, retryConfig: {noResponseRetries: 0}};
await assertRejects(request(config), (e: Error) => {
const cfg = getConfig(e);
return cfg!.currentRetryAttempt === 0;
});
scope.done();
});
});
|
module Rails #:nodoc
class Configuration #:nodoc
attr_accessor :app_config
alias :org_default_frameworks :default_frameworks
# Extends list of known frameworks with app_config.
# Allows access to config.app_config in environment.rb
def default_frameworks
org_default_frameworks + [:app_config]
end
alias :org_default_load_paths :default_load_paths
# Modify load path in way that allows access to app_config.rb
# when AppConfig module is required.
def default_load_paths
org_default_load_paths + ["#{root_path}/vendor/plugins/app_config/lib"]
end
end
end |
#!/bin/sh
# start HHVM
hhvm -m daemon -vServer.Type=fastcgi -vServer.Port=9000 -vServer.FixPathInfo=true
|
<gh_stars>0
package com.cjean.springcloud.ribbon.controller;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.client.RestTemplate;
@RestController
public class Test {
@Autowired
private RestTemplate restTemplate;
@GetMapping("/testRibbonUseProvider/{id}")
public String test(@PathVariable String id){
return this.restTemplate.getForObject("http://service-provider/test/"+id+": ribbon",String.class);
}
@GetMapping("/methodOfribbon/{id}")
public String ribbonTest(@PathVariable String id){
return "ribbon:hello"+id.toString();
}
@GetMapping("/testRibbonUseFeign/{id}")
public String feignTest(@PathVariable String id){
return this.restTemplate.getForObject("http://client-feign/test/"+id+": ribbon",String.class);
}
} |
import sqlalchemy
from sqlalchemy import create_engine, Column, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
# Create a SQLite in-memory database
engine = create_engine('sqlite:///:memory:', echo=True)
# Create a session class
Session = sessionmaker(bind=engine)
# Create a declarative base class
Base = declarative_base()
# Define the InventoryItem class
class InventoryItem(Base):
__tablename__ = 'inventory_items' # Automatically generated table name
id = Column(Integer, primary_key=True)
name = Column(String)
quantity = Column(Integer)
def __init__(self, name, quantity):
self.name = name
self.quantity = quantity
def update_quantity(self, new_quantity):
self.quantity = new_quantity
def __repr__(self):
return f"InventoryItem(id={self.id}, name='{self.name}', quantity={self.quantity})"
# Create the table in the database
Base.metadata.create_all(engine)
# Create a session
session = Session()
# Example usage
item1 = InventoryItem(name='Product A', quantity=100)
print(item1) # Output: InventoryItem(id=None, name='Product A', quantity=100)
# Add the item to the session and commit to the database
session.add(item1)
session.commit()
# Update the quantity of the item
item1.update_quantity(150)
print(item1) # Output: InventoryItem(id=1, name='Product A', quantity=150)
# Close the session
session.close() |
<reponame>yexhoo/light-distribution
const constants = require("./constants")
const color = require('./color')
exports.build =
(matrix) => matrix
.map((line, y) =>
line.map((char, x) =>
constants.buildCell(char == '1', x, y)))
exports.print = (room, msg = '') => {
console.log("\n", msg)
room.forEach(row => {
console.log.apply(this, row.map((cell) => color.get(cell)))
});
}
exports.getMax = (room) => {
return {
"x": room[0].length - 1,
"y": room.length - 1
}
} |
package com.comandulli.engine.panoramic.playback.engine.render.camera;
import java.util.List;
import com.comandulli.engine.panoramic.playback.engine.core.Entity;
import com.comandulli.engine.panoramic.playback.engine.math.Vector3;
import com.comandulli.engine.panoramic.playback.engine.render.material.Shader;
import com.comandulli.engine.panoramic.playback.engine.render.renderer.Renderer;
import android.opengl.GLES20;
import android.opengl.Matrix;
public class StereoscopicCamera extends Camera {
protected float interpupillaryDistance = 0.06f;
protected boolean isStereo = true;
public StereoscopicCamera(Projection projection) {
super(projection);
}
public void setInterpupillaryDistance(float interpupillaryDistance) {
this.interpupillaryDistance = interpupillaryDistance;
}
@Override
public void render() {
if (!isStereo) {
super.render();
return;
}
clear();
float[] monoView = entity.transform.getViewMatrix();
float[] leftView = monoView.clone();
Matrix.translateM(leftView, 0, -(interpupillaryDistance / 2), 0.0f, 0.0f);
float[] rightView = monoView.clone();
Matrix.translateM(rightView, 0, interpupillaryDistance / 2, 0.0f, 0.0f);
for (int i = 0; i < shaders.size(); i++) {
Shader shader = shaders.get(i);
shader.begin();
GLES20.glViewport(x, y, width / 2, height);
List<Entity> shaderQueue = renderQueue.get(i);
this.viewMatrix = leftView;
for (Entity entity : shaderQueue) {
Renderer renderer = entity.renderer;
if (renderer.isEnabled()) {
if ((renderer.flags & Renderer.FLAG_EXCLUDE_LEFT) != Renderer.FLAG_EXCLUDE_LEFT) {
shader.render(renderer, this);
}
}
}
shader.end();
}
for (int i = 0; i < shaders.size(); i++) {
Shader shader = shaders.get(i);
shader.begin();
GLES20.glViewport(x + width / 2, y, width / 2, height);
List<Entity> shaderQueue = renderQueue.get(i);
this.viewMatrix = rightView;
for (Entity entity : shaderQueue) {
Renderer renderer = entity.renderer;
if (renderer.isEnabled()) {
if ((renderer.flags & Renderer.FLAG_EXCLUDE_RIGHT) != Renderer.FLAG_EXCLUDE_RIGHT) {
shader.render(renderer, this);
}
}
}
shader.end();
}
}
@Override
public void render(Renderer[] queue, Shader shader) {
if (!isStereo) {
super.render(queue, shader);
return;
}
clear();
float[] monoView = entity.transform.getViewMatrix();
float[] leftView = monoView.clone();
Matrix.translateM(leftView, 0, -(interpupillaryDistance / 2), 0.0f, 0.0f);
float[] rightView = monoView.clone();
Matrix.translateM(rightView, 0, interpupillaryDistance / 2, 0.0f, 0.0f);
shader.begin();
GLES20.glViewport(x, y, width / 2, height);
entity.transform.translate(new Vector3(0.0f, 100.0f, 0.0f));
this.viewMatrix = leftView;
for (Renderer renderer : queue) {
if (renderer.isEnabled()) {
shader.render(renderer, this);
}
}
GLES20.glViewport(x + width / 2, y, width / 2, height);
entity.transform.translate(new Vector3(0.0f, -100.0f, 0.0f));
this.viewMatrix = rightView;
for (Renderer renderer : queue) {
if (renderer.isEnabled()) {
shader.render(renderer, this);
}
}
shader.end();
}
@Override
public void adjustViewport(int width, int height) {
this.viewportWidth = width;
this.viewportHeight = height;
this.x = (int) (left * viewportWidth);
this.y = (int) (bottom * viewportHeight);
this.width = (int) ((right - left) * viewportWidth);
this.height = (int) ((top - bottom) * viewportHeight);
float ratio;
if (isStereo) {
ratio = (float) (this.width / 2) / this.height;
} else {
ratio = (float) this.width / this.height;
}
setPerspectiveProjection(fieldOfView, ratio, near, far);
}
public boolean isStereo() {
return isStereo;
}
public void setStereo(boolean isStereo) {
this.isStereo = isStereo;
adjustViewport(width, height);
}
}
|
#!/bin/bash
set -ex
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
NAMESPACE=${NAMESPACE:-default}
WORKER_NS=${WORKER_NS:-test-pods}
# create prow namespace
kubectl create namespace "${NAMESPACE}" || echo Skipping
# create test-pods namespace
kubectl create namespace "${WORKER_NS}" || echo Skipping
# create configmaps
kubectl -n "${NAMESPACE}" create cm config || echo Skipping
kubectl -n "${NAMESPACE}" create cm plugins || echo Skipping
# create secrets
kubectl -n "${NAMESPACE}" create secret generic hmac-token --from-file=hmac="${DIR}"/secrets/github-hmac-secret || echo Skipping
kubectl -n "${NAMESPACE}" create secret generic cookie --from-file=secret="${DIR}"/secrets/cookie-secret || echo Skipping
kubectl -n "${NAMESPACE}" create secret generic oauth-token --from-file=oauth="${DIR}"/secrets/github-token || echo Skipping
kubectl -n "${WORKER_NS}" create secret generic ike-image-deployer --from-env-file="${DIR}"/secrets/quay-credentials.env || echo Skipping
kubectl -n "${WORKER_NS}" create secret generic github-token --from-file=github-token="${DIR}"/secrets/github-token || echo Skipping
kubectl -n "${WORKER_NS}" create secret generic gcs-credentials --from-file=service-account.json="${DIR}"/secrets/gcs-credentials.json || echo Skipping
# creates service account including secret holding kubeconfig (for auto-updating prow config on merged PRs)
. "${DIR}"/setup-prow-deployer.sh
# creates secret with cluster credentials for e2e tests
kubectl create secret generic ike-cluster-credentials --from-literal=IKE_CLUSTER_USER="${IKE_CLUSTER_USER:-ike}" --from-literal=IKE_CLUSTER_PWD="${IKE_CLUSTER_PWD:-letmein}" -n "${WORKER_NS}"
# enables privileged container builds
oc adm policy add-scc-to-user privileged -z default -n "${WORKER_NS}" || echo "Not Openshift. Skipping"
sleep 10
# deploy prow
. "${DIR}"/update.sh
|
<filename>CarFactory/src/main/java/com/solvd/carfactory/services/IPartTypeService.java
package com.solvd.carfactory.services;
import com.solvd.carfactory.models.supply.PartType;
public interface IPartTypeService {
PartType getPartTypeById(long id);
} |
#!/usr/bin/env bash
artifact_download
assert_failure $?
artifact_download --groupid 'test' --artifactid 'shell' --versionid '0.05' --extension '.sh'
assert_failure $?
set_artifactory_server --value 'build.dev.fco'
set_artifactory_port --value '80'
set_artifactory_server_path --value 'artifactory'
set_artifactory_repository --value 'libs-snapshot-bci-local'
artifact_download --groupid 'test' --artifactid 'shell' --versionid '0.05' --extension '.sh'
assert_failure $?
set_artifactory_user --value 'klumi01'
encpass="$( encode_passwd 'QMigraine1' )"
set_artifactory_password --value "${encpass}"
artifact_download --filename "${SUBSYSTEM_TEMPORARY_DIR}/test_download.sh" --groupid 'test' --artifactid 'shell' --versionid '0.05' --extension '.sh'
assert_success $?
|
export { ArtistConnect } from './Artist/Artist';
export { ArtistEventsConnect } from './Events/ArtistEvents';
export { ArtistsConnect } from './Artists/Artists';
export { default as EmailVerification } from './auth/EmailVerification';
export { EventsConnect } from './Events/Events';
export { default as FAQ } from './FAQ';
export { default as ForgotPassword } from './auth/ForgotPassword';
export { ForYouConnect } from './ForYou';
export { HomeConnect } from './Home';
export { NavBarConnect } from './Navbar';
export { GuestNavbarConnect } from './shared/GuestNavbar';
export { Loading } from './shared/Loading';
export { default as Music } from './Music/Music';
export { LandingConnect } from './Landing';
export { default as PrivacyPolicy } from './PrivacyPolicy';
export { SignInConnect } from './auth/SignIn';
export { SignUpConnect } from './auth/SignUp';
export { default as TermsOfUse } from './TermsOfUse';
export { ArticleiFrameConnect } from './News/ArticleiFrame';
|
<gh_stars>0
import tool from "./../tool";
const doc = document;
/**
* 监听{F11} 并执行相应的方法
* @param {enterFunc} 进入全屏 进行的 function
* @param {outFunc} 退出全屏 function
*
*/
const listenKeyDown = (enterFunc, outFunc) => {
console.log(doc);
// doc.addEventListener("keydown", function(e) {
// // console.log(e);
// var e = event || window.event || arguments.callee.caller.arguments[0];
// //捕捉F11键盘动作
// if (e && e.keyCode == 122) {
// //阻止F11默认动作
// e.preventDefault();
// var el = document.documentElement;
// //定义不同浏览器的全屏API
// var rfs =
// el.requestFullScreen ||
// el.webkitRequestFullScreen ||
// el.mozRequestFullScreen ||
// el.msRequestFullScreen;
// //执行全屏
// if (typeof rfs != "undefined" && rfs) {
// rfs.call(el);
// } else if (typeof window.ActiveXObject != "undefined") {
// var wscript = new ActiveXObject("WScript.Shell");
// if (wscript != null) {
// wscript.SendKeys("{F11}");
// }
// }
// //监听不同浏览器的全屏事件,并件执行相应的代码 ,这里只真对Chrome
// document.addEventListener(
// "webkitfullscreenchange",
// function() {
// //
// if (document.webkitIsFullScreen) {
// //全屏后要执行的代码
// console.log("webkitfullscreenchange enter");
// } else {
// //退出全屏后执行的代码
// console.log("webkitfullscreenchange out");
// }
// },
// false
// );
// }
// });
};
/**
* 监听视图变化 并执行相应的方法
* @param {enterFunc} 进入全屏 进行的 function
* @param {outFunc} 退出全屏 function
*
*/
const listenResize = (enterFunc, outFunc) => {
console.log("监听视图变化");
console.log(tool);
};
export default {
listenKeyDown,
listenResize
};
|
#!/bin/bash
gradle="./gradlew $@"
gradleBuild=""
gradleBuildOptions="--build-cache --configure-on-demand --no-daemon "
echo -e "***********************************************"
echo -e "Gradle build started at `date`"
echo -e "***********************************************"
echo -e "Installing NPM...\n"
./gradlew npmInstall --stacktrace -q --no-daemon
gradleBuild="$gradleBuild checkstyleMain checkstyleTest -x test -x javadoc \
-DskipGradleLint=true -DskipSass=true -DskipNestedConfigMetadataGen=true \
-DskipNodeModulesCleanUp=true -DskipNpmCache=true --parallel -DshowStandardStreams=true "
tasks="$gradle $gradleBuildOptions $gradleBuild"
echo -e "***************************************************************************************"
echo $tasks
echo -e "***************************************************************************************"
eval $tasks
retVal=$?
echo -e "***************************************************************************************"
echo -e "Gradle build finished at `date` with exit code $retVal"
echo -e "***************************************************************************************"
if [ $retVal == 0 ]; then
echo "Gradle build finished successfully."
else
echo "Gradle build did NOT finish successfully."
exit $retVal
fi
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/MosaiqueAssetsPicker/MosaiqueAssetsPicker.framework"
install_framework "${PODS_ROOT}/Reveal-SDK/RevealServer/iOS/RevealServer.framework"
install_framework "${BUILT_PRODUCTS_DIR}/TransitionPatch/TransitionPatch.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/MosaiqueAssetsPicker/MosaiqueAssetsPicker.framework"
install_framework "${PODS_ROOT}/Reveal-SDK/RevealServer/iOS/RevealServer.framework"
install_framework "${BUILT_PRODUCTS_DIR}/TransitionPatch/TransitionPatch.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
<gh_stars>1-10
package types
import (
"testing"
"gx/ipfs/QmPVkJMTeRC6iBByPWdrRkD3BE5UXsj5HPzb4kPqL186mS/testify/assert"
)
func TestCidForTestGetter(t *testing.T) {
newCid := NewCidForTestGetter()
c1 := newCid()
c2 := newCid()
assert.False(t, c1.Equals(c2))
assert.False(t, c1.Equals(SomeCid())) // Just in case.
}
func TestNewMessageForTestGetter(t *testing.T) {
newMsg := NewMessageForTestGetter()
m1 := newMsg()
c1, _ := m1.Cid()
m2 := newMsg()
c2, _ := m2.Cid()
assert.False(t, c1.Equals(c2))
}
|
import getEnv from "./lib/utils/env";
export default {
bot: {
name: "",
themeColor: 0x836DC4
},
discord: {
fetchLimitPerRequest: 100 // Limited by Discord API policy,
},
command: {
prefix: "!!"
},
auth: {
token: getEnv("TOKEN") || "<PASSWORD>"
},
confirmDialog: {
timeout: 600 * 1000
},
archiveChannel: {
channelName: "고정메시지",
topicKeyword: "아카이브"
},
string: {
jumpToMessage: "메시지로 이동"
},
redis: {
url: getEnv("REDISTOGO_URL") || "anything haha"
},
simsimi: {
apiKey: getEnv("SIMSIMI_API_KEY") || "adadad"
},
pinByReaction: {
threshold: 3
}
};
|
// Main Entry Point:
document.addEventListener("DOMContentLoaded", function(event) {
let pixelSize = 8;
let roundDelay = 50;
let chanceOfLife = .2
let container = document.getElementById('container');
let containerWidth = window.innerWidth * .99;
let containerHeight = window.innerHeight * .99;
let cols = containerWidth / pixelSize;
let rows = containerHeight / pixelSize;
container.style.height = containerHeight + 'px';
container.style.width = containerWidth + 'px';
let sim = createWildOceanSim(rows, cols, pixelSize, roundDelay, chanceOfLife);
container.append(sim.canvas);
let diagonalLength = Math.sqrt((rows * rows) + (cols * cols)); //rows^2 + cols^2
let hueIncrement = 360 / diagonalLength;
for(let i = 0; i < rows; i++) {
for(let j = 0; j < cols; j++) {
let h = Math.floor(Math.sqrt((i * i) + (j * j)) * hueIncrement);
sim.grid[i][j].lifeStyle = `hsl(${h}, 100%, 60%)`;
sim.grid[i][j].deathStyle = '#000000';
}
}
sim.start();
window.addEventListener('keydown', (e) => {
if(sim.intervalId && e.which === 90) {
sim.stop();
}
else if(e.which === 90){
sim.start();
}
});
});
|
package com.wangxy.exoskeleton.risk;
public class OptionValueBionominalTree
{
double asset =100.0;
double volatility =0.2;
double intrate=0.1;
double strike=100.0;
double expiry=5.00/12.00;
int numberstep=5;
double [] []stockprice = new double[5][5];
double [] []optionprice=new double[5][5];
double [] []delta=new double [5][5];
public void aa(){
double timestep=expiry/numberstep;
double discountfactor=Math.exp(-intrate*timestep);
double temp1=Math.exp((intrate +Math.pow(volatility,2))*timestep);
double temp2=0.5*(discountfactor+temp1);
double u=temp2+Math.sqrt(Math.pow(temp2,2)-1);
double d=1/u;
double p=(Math.exp(intrate*timestep)-d)/(u-d);
stockprice[0][0]=asset;
for(int i=1;i<numberstep;i++)
{
for(int j=0;j<i;j++)
{
stockprice[i][j]=u*stockprice[i-1][j];
stockprice[i][j+1]=d*stockprice[i-1][j];
}
}
for(int i=0;i<numberstep;i++)
{
optionprice[numberstep-1][i]=Payoff(stockprice[numberstep-1][i],strike);
}
for(int j=numberstep-2;j>=0;j--)
{
for(int i=0;i<=j;i++)
{
optionprice[j][i]=(p*optionprice[j+1][i]+(1-p)*optionprice[j+1][i+1])*discountfactor;
}
}
for(int j=numberstep-2;j>=0;j--)
{
for(int i=0;i<=j;i++)
{
delta[j][i]=(optionprice[j+1][i]-optionprice[j+1][i+1])/((u-d)*stockprice[j][i]);
}
}
}
public static double Payoff(double s, double k) {
if(s>k)
return s-k;
else
return 0.0;
}
}
|
<reponame>zllovesuki/t<filename>server/meta.go
package server
import (
"encoding"
"encoding/binary"
"net"
"github.com/zllovesuki/t/multiplexer/protocol"
"github.com/pkg/errors"
"go.uber.org/zap"
"go.uber.org/zap/zapcore"
)
const (
MetaSize = 26
)
type Meta struct {
ConnectIP string
ConnectPort uint64
PeerID uint64
Protocol protocol.Protocol
RespondOnly bool
}
var _ zapcore.ObjectMarshaler = &Meta{}
var _ encoding.BinaryMarshaler = &Meta{}
var _ encoding.BinaryUnmarshaler = &Meta{}
func (m Meta) MarshalLogObject(enc zapcore.ObjectEncoder) error {
enc.AddString("IP", m.ConnectIP)
enc.AddUint64("Port", m.ConnectPort)
enc.AddUint64("Peer", m.PeerID)
zap.Inline(m.Protocol).AddTo(enc)
enc.AddBool("RespondeOnly", m.RespondOnly)
return nil
}
func (m *Meta) MarshalBinary() ([]byte, error) {
b := make([]byte, MetaSize)
ip := net.ParseIP(m.ConnectIP)
copy(b[0:net.IPv4len], []byte(ip)[12:16])
binary.BigEndian.PutUint64(b[8:16], m.ConnectPort)
binary.BigEndian.PutUint64(b[16:24], m.PeerID)
b[24] = byte(m.Protocol)
if m.RespondOnly {
b[25] = 1
}
return b, nil
}
func (m *Meta) UnmarshalBinary(b []byte) error {
if len(b) != MetaSize {
return errors.Errorf("invalid buffer length: %d", len(b))
}
ip := net.IP(b[0:net.IPv4len])
m.ConnectIP = ip.To4().String()
m.ConnectPort = binary.BigEndian.Uint64(b[8:16])
m.PeerID = binary.BigEndian.Uint64(b[16:24])
m.Protocol = protocol.Protocol(b[24])
if b[25] == 1 {
m.RespondOnly = true
}
return nil
}
|
require_relative 'wexpr/exception'
require_relative 'wexpr/expression'
require_relative 'wexpr/object_ext'
require_relative 'wexpr/uvlq64'
require_relative 'wexpr/version'
#
# Ruby-Wexpr library
#
# Currently does not handle Binary Wexpr.
#
module Wexpr
#
# Parse Wexpr and turn it into a ruby hash
# Will thrown an Exception on failure
#
def self.load(str, flags=[])
expr = Expression::create_from_string(str, flags)
return expr.to_ruby
end
#
# Emit a hash as the equivilant wexpr string, human readable or not.
# See possible writeflags in Expression. We also support :returnAsExpression which will return the expression, and not the string (for internal use).
#
def self.dump(variable, writeFlags=[])
# first step, go through the variable and create the equivilant wexpr expressions
expr = Expression::create_from_ruby(variable)
if writeFlags.include? :returnAsExpression
return expr
end
# then have it write out the string
return expr.create_string_representation(0, writeFlags)
end
end
|
def is_balanced(node):
# Base case: (when tree is empty)
if node is None:
return True
# before computing the left and right height, subtract the
# current node from the total tree height
lh = tree_height(node.left) - 1
rh = tree_height(node.right) - 1
# If the difference between the left/right height is more than 1
# then the tree is not balanced.
if abs(lh - rh) >= 2:
return False
# Check the subtrees for balance
return is_balanced(node.left) and is_balanced(node.right) |
#!/bin/bash
#----------------------------------------------------
# Example SLURM job script to run hybrid applications
# (MPI/OpenMP or MPI/pthreads) on TACC's Stampede
# system.
#----------------------------------------------------
#SBATCH -J benchmark_bruno_thresholds # Job name
##SBATCH -o bruno.o%j # Name of stdout output file(%j expands to jobId)
##SBATCH -e bruno.e%j # Name of stderr output file(%j expands to jobId)
##SBATCH -p normal # Queue name
## OVERRIDE THESE ON COMMANDLINE.
#SBATCH -N 1 # Total number of nodes requested (16 cores/node)
#SBATCH -n 64 # Total number of mpi tasks requested
#SBATCH -t 72:00:00 # Run time (hh:mm:ss) - 1.5 hours
# The next line is required if the user has more than one project
# #SBATCH -A A-yourproject # <-- Allocation name to charge job against
DATAROOT=/project/tpan7/data/gage
BINDIR=/nethome/tpan7/build/bruno
OUTROOT=/scratch/tpan7/bruno/gage
############### scaling - chr14 1 to 64 cores, 3 iterations, all threshold values, freq_clean_recompact, freq_clean, freq_minimizer, and baseline.
for DATASET in "H_sapiens_chr14"
do
DATA=${DATAROOT}/${DATASET}/Data/original/all.fastq
OUTDIR=${OUTROOT}/${DATASET}
# data should already be in file cache from prev iterations
drop_caches
# warm up
mpirun -np 64 --map-by ppr:16:socket --rank-by core --bind-to core ${BINDIR}/bin/compact_debruijn_graph_fastq_A4_K31_freq -R -B -T -L 4 -O ${OUTDIR}/${DATASET}_A4_K31_L4_P64_freq $DATA > ${OUTDIR}/warmup${DATASET}_A4_K31_L4_P64_freq.dummy 2>&1
rm ${OUTDIR}/{DATASET}_A4_K31_L4_P64_freq*.fasta
for p in 64
do
ppn=$((p / 4))
for it in 1 2 3
do
mkdir -p ${OUTDIR}/${it}
cd ${OUTDIR}/${it}
# affects the number of elements and therefore cleaning and compaction. not construction.
for t in 8 4 3 2 1
do
# scaling of construct, compact, clean, clean_recompact
# run the experiments. freq vs orig. freq (first part of freq_clean) vs freq_minimizer, clean vs clean_recompact
for exp in "_freq_clean_recompact" "_freq_clean" "_freq_minimizer" "" #"_freq"
do
if [ ! -f ${OUTDIR}/${DATASET}_A4_K31_L${t}_P${p}${exp}.${it}.log ]
then
echo "mpirun -np ${p} --map-by ppr:${ppn}:socket --rank-by core --bind-to core ${BINDIR}/bin/compact_debruijn_graph_fastq_A4_K31${exp} -R -B -T -L ${t} -O ${OUTDIR}/${it}/${DATASET}_A4_K31_L${t}_P${p}${exp} $DATA > ${OUTDIR}/${DATASET}_A4_K31_L${t}_P${p}${exp}.${it}.log 2>&1"
echo "mpirun -np ${p} --map-by ppr:${ppn}:socket --rank-by core --bind-to core ${BINDIR}/bin/compact_debruijn_graph_fastq_A4_K31${exp} -R -B -T -L ${t} -O ${OUTDIR}/${it}/${DATASET}_A4_K31_L${t}_P${p}${exp} $DATA > ${OUTDIR}/${DATASET}_A4_K31_L${t}_P${p}${exp}.${it}.log 2>&1" > ${OUTDIR}/${DATASET}_A4_K31_L${t}_P${p}${exp}.${it}.log
mpirun -np ${p} --map-by ppr:${ppn}:socket --rank-by core --bind-to core ${BINDIR}/bin/compact_debruijn_graph_fastq_A4_K31${exp} -R -B -T -L ${t} -O ${OUTDIR}/${it}/${DATASET}_A4_K31_L${t}_P${p}${exp} $DATA >> ${OUTDIR}/${DATASET}_A4_K31_L${t}_P${p}${exp}.${it}.log 2>&1
rm ${OUTDIR}/${it}/*
else
echo "${OUTDIR}/${DATASET}_A4_K31_L${t}_P${p}${exp}.${it}.log exists. skipping."
fi
done
done
done
done
done
|
#!/usr/bin/env bash
conda uninstall -y --force \
numpy \
scipy \
pandas \
matplotlib \
dask \
distributed \
fsspec \
zarr \
cftime \
rasterio \
packaging \
pint \
bottleneck \
sparse \
flox \
h5netcdf \
xarray
# to limit the runtime of Upstream CI
python -m pip install pytest-timeout
python -m pip install \
-i https://pypi.anaconda.org/scipy-wheels-nightly/simple \
--no-deps \
--pre \
--upgrade \
numpy \
scipy \
pandas
python -m pip install \
-f https://7933911d6844c6c53a7d-47bd50c35cd79bd838daf386af554a83.ssl.cf2.rackcdn.com \
--no-deps \
--pre \
--upgrade \
matplotlib
python -m pip install \
--no-deps \
--upgrade \
git+https://github.com/dask/dask \
git+https://github.com/dask/distributed \
git+https://github.com/zarr-developers/zarr \
git+https://github.com/Unidata/cftime \
git+https://github.com/rasterio/rasterio \
git+https://github.com/pypa/packaging \
git+https://github.com/hgrecco/pint \
git+https://github.com/pydata/bottleneck \
git+https://github.com/pydata/sparse \
git+https://github.com/intake/filesystem_spec \
git+https://github.com/SciTools/nc-time-axis \
git+https://github.com/dcherian/flox \
git+https://github.com/h5netcdf/h5netcdf
|
# stty -F /dev/ttyUSB1 raw speed 9600 min 0 time 10
port='/dev/ttyUSB1'
stty -F $port 9600 cs8 -cstopb -parenb
# stty -F $port 9600 cs8 -cstopb -parenb
# 全开发送码:FE 0F 00 00 00 04 01 FF 31 D2
# 全断发送码:FE 0F 00 00 00 04 01 00 71 92
for i in {1..1}
do
echo 第$i次断开USB"\xFE\x0F\x00\x00\x00\x04\x01\xFF\x31\xD2"
# cat /dev/ttyUSB1 &
echo -en "\xFE\x0F\x00\x00\x00\x04\x01\xFF\x31\xD2" > $port
read -N 10 -t 3 XChar < /dev/ttyUSB1
echo $XChar
echo 第$i次接通USB"\xFE\x0F\x00\x00\x00\x04\x01\x00\x71\x92"
echo -en "\xFE\x0F\x00\x00\x00\x04\x01\x00\x71\x92" > $port
read -N 10 -t 3 XChar < /dev/ttyUSB1
echo $XChar
done
|
import tensorflow as tf
from tensorflow.keras.preprocessing.text import Tokenizer
from tensorflow.keras.preprocessing.sequence import pad_sequences
from tensorflow.keras.layers import Embedding, LSTM, Dense
tokenizer = Tokenizer()
vocab_size = 10000
embedding_dim = 16
max_length = 100
trunc_type= 'post'
padding_type= 'post'
oov_tok = "<OOV>"
training_sentences = []
training_labels = []
model = tf.keras.Sequential([
Embedding(vocab_size, embedding_dim, input_length=max_length),
LSTM(64),
Dense(1, activation='sigmoid')
])
model.compile(loss='binary_crossentropy',optimizer='adam',metrics=['accuracy'])
model.summary() |
package eu.chargetime.ocpp.utilities.test;
/*
ubitricity.com - Java-OCA-OCPP
MIT License
Copyright (C) 2018 <NAME> <<EMAIL>>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
import eu.chargetime.ocpp.utilities.MoreObjects;
import org.junit.Test;
import java.util.*;
import static org.junit.Assert.*;
/**
* Unit test for {@link MoreObjects}.
*
* @author <a href=mailto:<EMAIL>><NAME></a>
*/
public class MoreObjectsTest {
private final TestObjectClass testObject = new TestObjectClass();
private final ChildTestObjectClass childTestObject = new ChildTestObjectClass();
@Test
public void testEqualsWitSameObjects() {
assertEquals(true, MoreObjects.equals(testObject, testObject));
}
@Test
public void testEqualsWithNullObjectA() {
assertEquals(false, MoreObjects.equals(null, testObject));
}
@Test
public void testEqualsWithNullObjectB() {
assertEquals(false, MoreObjects.equals(testObject, null));
}
@Test
public void testEqualsWithNullObjectsAB() {
assertEquals(true, MoreObjects.equals(null, null));
}
@Test
public void testEqualsWithDifferentObjects() { assertEquals(false, MoreObjects.equals(testObject,
new TestObjectClass())); }
@Test
public void testDeepEqualsWithSameObjects() { assertEquals(true, MoreObjects.deepEquals(testObject, testObject)); }
@Test
public void testDeepEqualsWithNullObjectA() { assertEquals(false, MoreObjects.deepEquals(null, testObject)); }
@Test
public void testDeepEqualsWithNullObjectB() { assertEquals(false, MoreObjects.deepEquals(testObject, null)); }
@Test
public void testDeepEqualsWithNullObjectsAB() { assertEquals(true, MoreObjects.deepEquals(null, null)); }
@Test
public void testDeepEqualsWithDifferentObjects() { assertEquals(false, MoreObjects.deepEquals(testObject,
new TestObjectClass())); }
@Test
public void testHashCode() {
assertEquals(testObject.hashCode(), MoreObjects.hashCode(testObject));
}
@Test
public void testHashCodeWithNullObject() {
assertEquals(0, MoreObjects.hashCode(null));
}
@Test
public void testHash() {
assertEquals(Arrays.hashCode(new Object[]{testObject}), MoreObjects.hash(testObject));
}
@Test
public void testHashWithNullObjects() {
assertEquals(961, MoreObjects.hash(null, null));
}
@Test
public void testCloneObjectArray() {
Object a = new Object();
Object[] array = new Object[]{a};
Object[] arrayClone = MoreObjects.clone(array);
assertEquals(1, arrayClone.length);
assertNotEquals(array, arrayClone);
assertEquals(array[0], arrayClone[0]);
}
@Test
public void testCloneByteArray() {
byte[] array = new byte[1];
array[0] = '1';
byte[] arrayClone = MoreObjects.clone(array);
assertEquals(1, arrayClone.length);
assertNotEquals(array, arrayClone);
assertEquals(array[0], arrayClone[0]);
}
@Test
public void testCloneNull() {
assertArrayEquals(null, MoreObjects.clone((Object[])null));
}
@Test
public void testToString() {
assertEquals("TestObjectClass{value=test}", testObject.toString());
}
@Test
public void testToStringSecure() {
assertEquals("TestObjectClassWithSecureField{secret=********}",
(new TestObjectClassWithSecureField()).toString());
}
@Test
public void testToStringWithObjectWithSuperclass() {
assertEquals("ChildTestObjectClass{TestObjectClass{value=test}, flag=true}", childTestObject.toString());
}
@Test
public void testToStringWithClass() {
assertEquals("TestObjectClass{value=test}", testObject.toStringWithClass());
}
@Test
public void testToStringWithClassname() {
assertEquals("TestObjectClass{value=test}", testObject.toStringWithClassName());
}
@Test
public void testToStringWithNullArray() {
assertEquals("TestObjectClassWithNullArray{array=null}", (new TestObjectClassWithNullArray()).toString());
}
@Test
public void testToStringWithShortArray() {
assertEquals("TestObjectClassWithShortArray{array=[49, 50]}", (new TestObjectClassWithShortArray()).toString());
}
@Test
public void testToStringWithLongArray() {
assertEquals("TestObjectClassWithLongArray{array.length=40}", (new TestObjectClassWithLongArray()).toString());
}
@Test
public void testToStringWithLongArrayFullOutput() {
assertEquals("TestObjectClassWithLongArray{array=[49, 50, 51, 52, 53, 54, 55, 56, 57, 48, 49, 50, 51, 52, 53, " +
"54, 55, 56, 57, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 48]}",
(new TestObjectClassWithLongArray()).toString(true));
}
@Test
public void testToStringWithList() {
assertEquals("TestObjectClassWithList{list.size=1}", (new TestObjectClassWithList()).toString());
}
@Test
public void testToStringWithListFullOutput() {
assertEquals("TestObjectClassWithList{list=[test]}", (new TestObjectClassWithList()).toString(true));
}
@Test
public void testToStringWithSet() {
assertEquals("TestObjectClassWithSet{set.size=1}", (new TestObjectClassWithSet()).toString());
}
@Test
public void testToStringWithSetFullOutput() {
assertEquals("TestObjectClassWithSet{set=[test]}", (new TestObjectClassWithSet()).toString(true));
}
@Test
public void testToStringWithMap() {
assertEquals("TestObjectClassWithMap{map.size=1}", (new TestObjectClassWithMap()).toString());
}
@Test
public void testToStringWithMapFullOutput() {
assertEquals("TestObjectClassWithMap{map={test=test}}", (new TestObjectClassWithMap()).toString(true));
}
@Test
public void testToStringWithQueue() {
assertEquals("TestObjectClassWithQueue{queue.size=1}", (new TestObjectClassWithQueue()).toString());
}
@Test
public void testToStringWithQueueFullOutput() {
assertEquals("TestObjectClassWithQueue{queue=[test]}", (new TestObjectClassWithQueue()).toString(true));
}
@Test
public void testToStringHelperOmitNullValues() {
assertEquals("TestObjectClassWithNullArray{}",
MoreObjects.toStringHelper(TestObjectClassWithNullArray.class, true).omitNullValues().toString());
}
@Test
public void testToStringHelperWithPrimitiveTypes() {
assertEquals("TestPrimitiveTypeObjectClass{b=true, c=c, d=0.1, f=0.2, i=3, l=4}",
(new TestPrimitiveTypesObjectClass().toString()));
}
@Test
public void testToStringHelperWithPrimitiveArrays() {
assertEquals("TestPrimitiveArraysObjectClass{b=[true], c=[c], d=[0.1], f=[0.2], i=[3], l=[4]}",
(new TestPrimitiveArraysObjectClass().toString()));
}
@Test
public void testToStringHelperWithObjectArray() {
TestObjectClass[] array = new TestObjectClass[]{new TestObjectClass("1"), new TestObjectClass("2")};
assertEquals("{array.length=2}",
MoreObjects.toStringHelper("").add("array", array).toString());
}
@Test
public void testToStringHelperWitNullObjectArray() {
assertEquals("{array=null}",
MoreObjects.toStringHelper("").add("array", (TestObjectClass[])null).toString());
}
@Test
public void testToStringHelperAddPrimitiveValues() {
TestPrimitiveTypesObjectClass container = new TestPrimitiveTypesObjectClass();
assertEquals("{true, c, 0.1, 0.2, 3, 4}",
MoreObjects.toStringHelper("")
.addValue(container.b)
.addValue(container.c)
.addValue(container.d)
.addValue(container.f)
.addValue(container.i)
.addValue(container.l)
.toString());
}
private class TestObjectClassWithSecureField {
private String secret = "secret";
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.addSecure("secret", secret)
.toString();
}
}
private class TestObjectClassWithNullArray {
private byte[] array = null;
@Override
public String toString() {
return MoreObjects.toStringHelper(this, false)
.add("array", array)
.toString();
}
}
private abstract class TestObjectClassWithContainer {
public abstract String toString(boolean withDetails);
@Override
public String toString() {
return toString(false);
}
}
private class TestObjectClassWithList extends TestObjectClassWithContainer {
private List<String> list = new ArrayList<String>();
{
list.add("test");
}
@Override
public String toString(boolean withDetails) {
return MoreObjects.toStringHelper("TestObjectClassWithList", withDetails)
.add("list", list)
.toString();
}
}
private class TestObjectClassWithSet extends TestObjectClassWithContainer {
private Set<String> set = new HashSet<String>();
{
set.add("test");
}
@Override
public String toString(boolean withDetails) {
return MoreObjects.toStringHelper("TestObjectClassWithSet", withDetails)
.add("set", set)
.toString();
}
}
private class TestObjectClassWithMap extends TestObjectClassWithContainer {
private Map<String, String> map = new HashMap<String, String>();
{
map.put("test", "test");
}
@Override
public String toString(boolean withDetails) {
return MoreObjects.toStringHelper("TestObjectClassWithMap", withDetails)
.add("map", map)
.toString();
}
}
private class TestObjectClassWithQueue extends TestObjectClassWithContainer {
private Queue<String> queue = new ArrayDeque<String>();
{
queue.add("test");
}
@Override
public String toString(boolean withDetails) {
return MoreObjects.toStringHelper("TestObjectClassWithQueue", withDetails)
.add("queue", queue)
.toString();
}
}
private class TestObjectClassWithShortArray {
private byte[] array = "12".getBytes();
@Override
public String toString() {
return MoreObjects.toStringHelper("TestObjectClassWithShortArray")
.add("array", array)
.toString();
}
}
private class TestObjectClassWithLongArray extends TestObjectClassWithContainer {
private byte[] array = "1234567890123456789012345678901234567890".getBytes();
@Override
public String toString(boolean withDetails) {
return MoreObjects.toStringHelper("TestObjectClassWithLongArray", withDetails)
.add("array", array)
.toString();
}
}
private class TestObjectClass {
private String value = "test";
public TestObjectClass() {}
public TestObjectClass(String value) {
this.value = value;
}
@Override
public String toString() {
return MoreObjects.toStringHelper("TestObjectClass")
.add("value", value)
.toString();
}
public String toStringWithClass() {
return MoreObjects.toStringHelper(this.getClass())
.add("value", value)
.toString();
}
public String toStringWithClassName() {
return MoreObjects.toStringHelper("TestObjectClass")
.add("value", value)
.toString();
}
}
private class ChildTestObjectClass extends TestObjectClass {
private boolean flag = true;
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.addValue(super.toString())
.add("flag", flag)
.toString();
}
}
private class TestPrimitiveTypesObjectClass {
boolean b = true;
char c = 'c';
double d = 0.1;
float f = 0.2f;
int i = 3;
long l = 4l;
@Override
public String toString() {
return MoreObjects.toStringHelper("TestPrimitiveTypeObjectClass")
.add("b", b)
.add("c", c)
.add("d", d)
.add("f", f)
.add("i", i)
.add("l", l)
.toString();
}
}
private class TestPrimitiveArraysObjectClass {
boolean[] b = new boolean[]{true};
char[] c = new char[]{'c'};
double[] d = new double[]{0.1};
float[] f = new float[]{0.2f};
int[] i = new int[]{3};
long[] l = new long[]{4l};
@Override
public String toString() {
return MoreObjects.toStringHelper("TestPrimitiveArraysObjectClass")
.add("b", b)
.add("c", c)
.add("d", d)
.add("f", f)
.add("i", i)
.add("l", l)
.toString();
}
}
}
|
import React, { useState } from 'react';
const CountVowels = () => {
const [input, setInput] = useState('');
const [vowels, setVowels] = useState(0);
const [consonants, setConsonants] = useState(0);
const [alphabets, setAlphabets] = useState(0);
const handleChange = (e) => {
setInput(e.target.value);
let countVowels = 0;
let countConsonants = 0;
let countAlphabets = 0;
let inputString = input.toLowerCase();
for (let i = 0; i < inputString.length; i++) {
let char = inputString.charAt(i);
if (char.match(/[aeiou]/)) {
countVowels += 1;
} else if (char.match(/[a-z]/) && char !== ' ') {
countConsonants += 1;
}
if (char.match(/[a-z]/)) {
countAlphabets += 1;
}
}
setVowels(countVowels);
setConsonants(countConsonants);
setAlphabets(countAlphabets);
};
return (
<div>
<input type="text" placeholder="Enter string here" onChange={handleChange} />
<p>Vowels: {vowels}</p>
<p>Consonants: {consonants}</p>
<p>Alphabets: {alphabets}</p>
</div>
);
};
export default CountVowels; |
@test "composer" {
su - cphp -c "composer --version"
}
|
package com.solofeed.tchernocraft.block;
import com.solofeed.tchernocraft.Tchernocraft;
import net.minecraft.creativetab.CreativeTabs;
/**
* Tchernocraft's block interface. All mod's block implements this interface.
*/
public interface ITchernocraftBlock {
/**
* Get the block's name
* @return the block's name
*/
String getName();
/**
* Get the block's creative tab
* @return the tab in which the block will be displayed in creative mod
*/
default CreativeTabs getCreativeTabs(){
return Tchernocraft.creativeTab;
}
}
|
<filename>sshd-core/src/test/java/org/apache/sshd/server/ServerAuthenticationManagerTest.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sshd.server;
import java.util.List;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.sshd.common.NamedResource;
import org.apache.sshd.common.keyprovider.HostKeyCertificateProvider;
import org.apache.sshd.common.keyprovider.KeyPairProvider;
import org.apache.sshd.common.util.GenericUtils;
import org.apache.sshd.server.auth.BuiltinUserAuthFactories;
import org.apache.sshd.server.auth.UserAuthFactory;
import org.apache.sshd.server.auth.gss.GSSAuthenticator;
import org.apache.sshd.server.auth.hostbased.HostBasedAuthenticator;
import org.apache.sshd.server.auth.keyboard.KeyboardInteractiveAuthenticator;
import org.apache.sshd.server.auth.password.PasswordAuthenticator;
import org.apache.sshd.server.auth.pubkey.PublickeyAuthenticator;
import org.apache.sshd.util.test.BaseTestSupport;
import org.apache.sshd.util.test.NoIoTestCase;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runners.MethodSorters;
/**
* @author <a href="mailto:<EMAIL>">Apache MINA SSHD Project</a>
*/
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
@Category({ NoIoTestCase.class })
public class ServerAuthenticationManagerTest extends BaseTestSupport {
public ServerAuthenticationManagerTest() {
super();
}
@Test
public void testDefaultUserAuthFactoriesMethods() {
AtomicReference<List<UserAuthFactory>> factoriesHolder = new AtomicReference<>();
@SuppressWarnings("checkstyle:anoninnerlength")
ServerAuthenticationManager manager = new ServerAuthenticationManager() {
@Override
public List<UserAuthFactory> getUserAuthFactories() {
return factoriesHolder.get();
}
@Override
public void setUserAuthFactories(List<UserAuthFactory> userAuthFactories) {
assertNull("Unexpected multiple invocation", factoriesHolder.getAndSet(userAuthFactories));
}
@Override
public PasswordAuthenticator getPasswordAuthenticator() {
return null;
}
@Override
public void setPasswordAuthenticator(PasswordAuthenticator passwordAuthenticator) {
throw new UnsupportedOperationException("setPasswordAuthenticator(" + passwordAuthenticator + ")");
}
@Override
public PublickeyAuthenticator getPublickeyAuthenticator() {
return null;
}
@Override
public void setPublickeyAuthenticator(PublickeyAuthenticator publickeyAuthenticator) {
throw new UnsupportedOperationException("setPublickeyAuthenticator(" + publickeyAuthenticator + ")");
}
@Override
public KeyboardInteractiveAuthenticator getKeyboardInteractiveAuthenticator() {
return null;
}
@Override
public void setKeyboardInteractiveAuthenticator(KeyboardInteractiveAuthenticator interactiveAuthenticator) {
throw new UnsupportedOperationException(
"setKeyboardInteractiveAuthenticator(" + interactiveAuthenticator + ")");
}
@Override
public GSSAuthenticator getGSSAuthenticator() {
return null;
}
@Override
public void setGSSAuthenticator(GSSAuthenticator gssAuthenticator) {
throw new UnsupportedOperationException("setGSSAuthenticator(" + gssAuthenticator + ")");
}
@Override
public HostBasedAuthenticator getHostBasedAuthenticator() {
return null;
}
@Override
public void setHostBasedAuthenticator(HostBasedAuthenticator hostBasedAuthenticator) {
throw new UnsupportedOperationException("setHostBasedAuthenticator(" + hostBasedAuthenticator + ")");
}
@Override
public KeyPairProvider getKeyPairProvider() {
return null;
}
@Override
public void setKeyPairProvider(KeyPairProvider keyPairProvider) {
throw new UnsupportedOperationException("setKeyPairProvider(" + keyPairProvider + ")");
}
@Override
public HostKeyCertificateProvider getHostKeyCertificateProvider() {
return null;
}
@Override
public void setHostKeyCertificateProvider(HostKeyCertificateProvider provider) {
throw new UnsupportedOperationException("setHostKeyCertificateProvider(" + provider + ")");
}
};
assertEquals("Mismatched initial factories list", "", manager.getUserAuthFactoriesNameList());
String expected = NamedResource.getNames(BuiltinUserAuthFactories.VALUES);
manager.setUserAuthFactoriesNameList(expected);
assertEquals("Mismatched updated factories names", expected, manager.getUserAuthFactoriesNameList());
List<UserAuthFactory> factories = factoriesHolder.get();
assertEquals("Mismatched factories count",
BuiltinUserAuthFactories.VALUES.size(), GenericUtils.size(factories));
for (BuiltinUserAuthFactories f : BuiltinUserAuthFactories.VALUES) {
assertTrue("Missing factory=" + f.name(), factories.contains(f.create()));
}
}
}
|
#!/usr/bin/bash
export IMAGE_GALLERY_SCRIPT_VERSION="1.1"
CONFIG_BUCKET="edu.au.cc.ig-config"
# Install packages
yum -y update
yum install -y python3 git postgresql postgresql-devel gcc python3-devel
amazon-linux-extras install -y nginx1
# Configure/install custom software
cd /home/ec2-user
git clone https://github.com/hootskoot/python-image-gallery.git
chown -R ec2-user:ec2-user python-image-gallery
su ec2-user -l -c "cd ~/python-image-gallery && pip3 install -r requirements.txt --user"
aws s3 cp s3://${CONFIG_BUCKET}/nginx/nginx.conf /etc/nginx
aws s3 cp s3://${CONFIG_BUCKET}/nginx/default.d/image_gallery.conf /etc/nginx/default.d
aws s3 cp s3://${CONFIG_BUCKET}/nginx/index.html /usr/share/nginx/html
chown nginx:nginx /usr/share/nginx/html/index.html
# Start/enable services
systemctl stop postfix
systemctl disable postfix
systemctl start nginx
systemctl enable nginx
su ec2-user -l -c "cd ~/python-image-gallery && ./start" >/var/log/image_gallery.log 2>&1 &
|
echo "Hello World"
|
/*
This file is part of Peers, a java SIP softphone.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Copyright 2010 <NAME>
*/
package net.kislay.goasat.media;
import java.util.ArrayList;
import java.util.List;
import net.kislay.goasat.rtp.RFC4733;
import net.kislay.goasat.rtp.RtpPacket;
public class DtmfFactory {
public List<RtpPacket> createDtmfPackets(char digit) {
List<RtpPacket> packets = new ArrayList<RtpPacket>();
byte[] data = new byte[4];
// RFC4733
if (digit == '*') {
data[0] = 10;
} else if (digit == '#') {
data[0] = 11;
} else if (digit >= 'A' && digit <= 'D') {
data[0] = (byte) (digit - 53);
} else {
data[0] = (byte) (digit - 48);
}
data[1] = 10; // volume 10
// Set Duration to 160
// duration 8 bits
data[2] = 0;
// duration 8 bits
data[3] = -96;
RtpPacket rtpPacket = new RtpPacket();
rtpPacket.setData(data);
rtpPacket.setPayloadType(RFC4733.PAYLOAD_TYPE_TELEPHONE_EVENT);
rtpPacket.setMarker(true);
packets.add(rtpPacket);
// two classical packets
rtpPacket = new RtpPacket();
// set duration to 320
data = data.clone();
data[2] = 1;
data[3] = 64;
rtpPacket.setData(data);
rtpPacket.setMarker(false);
rtpPacket.setPayloadType(RFC4733.PAYLOAD_TYPE_TELEPHONE_EVENT);
packets.add(rtpPacket);
rtpPacket = new RtpPacket();
// set duration to 320
data = data.clone();
data[2] = 1;
data[3] = -32;
rtpPacket.setData(data);
rtpPacket.setMarker(false);
rtpPacket.setPayloadType(RFC4733.PAYLOAD_TYPE_TELEPHONE_EVENT);
packets.add(rtpPacket);
data = data.clone();
// create three end event packets
data[1] = -0x76; // end event flag + volume set to 10
// set Duration to 640
data[2] = 2; // duration 8 bits
data[3] = -128; // duration 8 bits
for (int r = 0; r < 3; r++) {
rtpPacket = new RtpPacket();
rtpPacket.setData(data);
rtpPacket.setMarker(false);
rtpPacket.setPayloadType(RFC4733.PAYLOAD_TYPE_TELEPHONE_EVENT);
packets.add(rtpPacket);
}
return packets;
}
}
|
package geojson
import "time"
// "When" is a datetime info bound to Features objects
// Geojson spec at https://github.com/geojson/geojson-ld
type When struct {
Type string `json:"@type,omitempty"`
Datetime *time.Time `json:"datetime,omitempty"`
}
// NewWhen creates a when clause
func NewWhen(Type string, Datetime *time.Time) When {
return When{Type, Datetime}
}
func (w *When) Valid() bool {
if w.Type == "Instant" || w.Type=="Interval" {
return w.Datetime.Year() != 1
}
return false
}
|
const db = require('../util/database');
module.exports = class Ad {
constructor(id, title, price) {
this.id = id;
this.title = title;
this.price = price;
}
save() {
return db.execute('INSERT INTO ads (title, price) VALUES (?, ?)', [this.title, this.price]);
}
static deleteById(id) {}
static fetchAll() {
return db.execute('SELECT * FROM ads');
}
static findById(id) {
return db.execute('SELECT * FROM ads WHERE ads.id = ?', [id]);
}
};
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package models.des
import play.api.libs.json.Json
case class AmendVariationResponse(
processingDate: String,
etmpFormBundleNumber: String,
registrationFee: Option[BigDecimal],
fpNumbers: Option[Int],
fpFeeRate: Option[BigDecimal],
fpFee: Option[BigDecimal],
responsiblePersonNotCharged: Option[Int],
premiseFYNumber: Option[Int],
premiseFYFeeRate: Option[BigDecimal],
premiseFYTotal: Option[BigDecimal],
premiseHYNumber: Option[Int],
premiseHYFeeRate: Option[BigDecimal],
premiseHYTotal: Option[BigDecimal],
premiseFee: Option[BigDecimal],
totalFees: Option[BigDecimal],
paymentReference: Option[String],
difference: Option[BigDecimal],
approvalCheckNumbers: Option[Int] = None,
approvalCheckFeeRate: Option[BigDecimal] = None,
approvalCheckFee: Option[BigDecimal] = None
)
object AmendVariationResponse {
implicit val format = Json.format[AmendVariationResponse]
}
|
<reponame>markelg/cdsapi
# (C) Copyright 2018 ECMWF.
#
# This software is licensed under the terms of the Apache Licence Version 2.0
# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
# In applying this licence, ECMWF does not waive the privileges and immunities
# granted to it by virtue of its status as an intergovernmental organisation nor
# does it submit to any jurisdiction.
from __future__ import absolute_import, division, print_function, unicode_literals
import json
import time
import os
import logging
import uuid
import requests
try:
from urllib.parse import urljoin
except ImportError:
from urlparse import urljoin
from tqdm import tqdm
def bytes_to_string(n):
u = ['', 'K', 'M', 'G', 'T', 'P']
i = 0
while n >= 1024:
n /= 1024.0
i += 1
return '%g%s' % (int(n * 10 + 0.5) / 10.0, u[i])
def read_config(path):
config = {}
with open(path) as f:
for l in f.readlines():
if ':' in l:
k, v = l.strip().split(':', 1)
if k in ('url', 'key', 'verify'):
config[k] = v.strip()
return config
def toJSON(obj):
to_json = getattr(obj, "toJSON", None)
if callable(to_json):
return to_json()
if isinstance(obj, (list, tuple)):
return [toJSON(x) for x in obj]
if isinstance(obj, dict):
r = {}
for k, v in obj.items():
r[k] = toJSON(v)
return r
return obj
class Result(object):
def __init__(self, client, reply):
self.reply = reply
self._url = client.url
self.session = client.session
self.robust = client.robust
self.verify = client.verify
self.cleanup = client.delete
self.debug = client.debug
self.info = client.info
self.warning = client.warning
self.error = client.error
self.sleep_max = client.sleep_max
self.retry_max = client.retry_max
self.timeout = client.timeout
self.progress = client.progress
self._deleted = False
def toJSON(self):
r = dict(resultType='url',
contentType=self.content_type,
contentLength=self.content_length,
location=self.location)
return r
def _download(self, url, size, target):
if target is None:
target = url.split('/')[-1]
self.info("Downloading %s to %s (%s)", url, target, bytes_to_string(size))
start = time.time()
mode = 'wb'
total = 0
sleep = 10
tries = 0
headers = None
while tries < self.retry_max:
r = self.robust(requests.get)(url,
stream=True,
verify=self.verify,
headers=headers,
timeout=self.timeout)
try:
r.raise_for_status()
with tqdm(total=size,
unit_scale=True,
unit_divisor=1024,
unit='B',
disable=not self.progress,
leave=False,
) as pbar:
pbar.update(total)
with open(target, mode) as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk:
f.write(chunk)
total += len(chunk)
pbar.update(len(chunk))
except requests.exceptions.ConnectionError as e:
self.error("Download interupted: %s" % (e,))
finally:
r.close()
if total >= size:
break
self.error("Download incomplete, downloaded %s byte(s) out of %s" % (total, size))
self.warning("Sleeping %s seconds" % (sleep,))
time.sleep(sleep)
mode = 'ab'
total = os.path.getsize(target)
sleep *= 1.5
if sleep > self.sleep_max:
sleep = self.sleep_max
headers = {'Range': 'bytes=%d-' % total}
tries += 1
self.warning("Resuming download at byte %s" % (total, ))
if total != size:
raise Exception("Download failed: downloaded %s byte(s) out of %s" % (total, size))
elapsed = time.time() - start
if elapsed:
self.info("Download rate %s/s", bytes_to_string(size / elapsed))
return target
def download(self, target=None):
return self._download(self.location,
self.content_length,
target)
@property
def content_length(self):
return int(self.reply['content_length'])
@property
def location(self):
return urljoin(self._url, self.reply['location'])
@property
def content_type(self):
return self.reply['content_type']
def __repr__(self):
return "Result(content_length=%s,content_type=%s,location=%s)" % (self.content_length,
self.content_type,
self.location)
def check(self):
self.debug("HEAD %s", self.location)
metadata = self.robust(self.session.head)(self.location,
verify=self.verify,
timeout=self.timeout)
metadata.raise_for_status()
self.debug(metadata.headers)
return metadata
def delete(self):
if self._deleted:
return
if 'request_id' in self.reply:
rid = self.reply['request_id']
task_url = '%s/tasks/%s' % (self._url, rid)
self.debug("DELETE %s", task_url)
delete = self.session.delete(task_url, verify=self.verify)
self.debug("DELETE returns %s %s", delete.status_code, delete.reason)
try:
delete.raise_for_status()
except Exception:
self.warning("DELETE %s returns %s %s",
task_url, delete.status_code, delete.reason)
self._deleted = True
def __del__(self):
try:
if self.cleanup:
self.delete()
except Exception as e:
print(e)
class Client(object):
logger = logging.getLogger('cdsapi')
def __init__(self,
url=os.environ.get('CDSAPI_URL'),
key=os.environ.get('CDSAPI_KEY'),
quiet=False,
debug=False,
verify=None,
timeout=60,
progress=True,
full_stack=False,
delete=True,
retry_max=500,
sleep_max=120,
info_callback=None,
warning_callback=None,
error_callback=None,
debug_callback=None,
):
if not quiet:
if debug:
level = logging.DEBUG
else:
level = logging.INFO
logging.basicConfig(level=level,
format='%(asctime)s %(levelname)s %(message)s')
dotrc = os.environ.get('CDSAPI_RC', os.path.expanduser('~/.cdsapirc'))
if url is None or key is None:
if os.path.exists(dotrc):
config = read_config(dotrc)
if key is None:
key = config.get('key')
if url is None:
url = config.get('url')
if verify is None:
verify = int(config.get('verify', 1))
if url is None or key is None or key is None:
raise Exception('Missing/incomplete configuration file: %s' % (dotrc))
self.url = url
self.key = key
self.quiet = quiet
self.progress = progress and not quiet
self.verify = True if verify else False
self.timeout = timeout
self.sleep_max = sleep_max
self.retry_max = retry_max
self.full_stack = full_stack
self.delete = delete
self.last_state = None
self.debug_callback = debug_callback
self.warning_callback = warning_callback
self.info_callback = info_callback
self.error_callback = error_callback
self.session = requests.Session()
self.session.auth = tuple(self.key.split(':', 2))
self.debug("CDSAPI %s", dict(url=self.url,
key=self.key,
quiet=self.quiet,
verify=self.verify,
timeout=self.timeout,
progress=self.progress,
sleep_max=self.sleep_max,
retry_max=self.retry_max,
full_stack=self.full_stack,
delete=self.delete
))
def retrieve(self, name, request, target=None):
result = self._api('%s/resources/%s' % (self.url, name), request, 'POST')
if target is not None:
result.download(target)
return result
def service(self, name, *args, **kwargs):
self.delete = False # Don't delete results
name = '/'.join(name.split('.'))
request = toJSON(dict(args=args, kwargs=kwargs))
result = self._api('%s/tasks/services/%s/clientid-%s' % (self.url, name, uuid.uuid4().hex), request, 'PUT')
return result
def workflow(self, code, *args, workflow_name='application', **kwargs):
params = dict(code=code,
args=args,
kwargs=kwargs,
workflow_name=workflow_name)
return self.service("tool.toolbox.orchestrator.run_workflow", params)
def status(self, context=None):
url = '%s/status.json' % (self.url,)
r = requests.get(url, verify=self.verify)
r.raise_for_status()
return r.json()
def _status(self, url):
try:
status = self.status(url)
info = status.get('info', [])
if not isinstance(info, list):
info = [info]
for i in info:
self.info("%s", i)
warning = status.get('warning', [])
if not isinstance(warning, list):
warning = [warning]
for w in warning:
self.warning("%s", w)
except Exception:
pass
def _api(self, url, request, method):
self._status(url)
session = self.session
self.info("Sending request to %s", url)
self.debug("%s %s %s", method, url, json.dumps(request))
if method == 'PUT':
action = session.put
else:
action = session.post
result = self.robust(action)(url,
json=request,
verify=self.verify,
timeout=self.timeout)
reply = None
try:
result.raise_for_status()
reply = result.json()
except Exception:
if reply is None:
try:
reply = result.json()
except Exception:
reply = dict(message=result.text)
self.debug(json.dumps(reply))
if 'message' in reply:
error = reply['message']
if 'context' in reply and 'required_terms' in reply['context']:
e = [error]
for t in reply['context']['required_terms']:
e.append("To access this resource, you first need to accept the terms"
"of '%s' at %s" % (t['title'], t['url']))
error = '. '.join(e)
raise Exception(error)
else:
raise
sleep = 1
while True:
self.debug("REPLY %s", reply)
if reply['state'] != self.last_state:
self.info("Request is %s" % (reply['state'],))
self.last_state = reply['state']
if reply['state'] == 'completed':
self.debug("Done")
if 'result' in reply:
return reply['result']
return Result(self, reply)
if reply['state'] in ('queued', 'running'):
rid = reply['request_id']
self.debug("Request ID is %s, sleep %s", rid, sleep)
time.sleep(sleep)
sleep *= 1.5
if sleep > self.sleep_max:
sleep = self.sleep_max
task_url = '%s/tasks/%s' % (self.url, rid)
self.debug("GET %s", task_url)
result = self.robust(session.get)(task_url,
verify=self.verify,
timeout=self.timeout)
result.raise_for_status()
reply = result.json()
continue
if reply['state'] in ('failed',):
self.error("Message: %s", reply['error'].get('message'))
self.error("Reason: %s", reply['error'].get('reason'))
for n in reply.get('error', {}).get('context', {}).get('traceback', '').split('\n'):
if n.strip() == '' and not self.full_stack:
break
self.error(" %s", n)
raise Exception("%s. %s." % (reply['error'].get('message'), reply['error'].get('reason')))
raise Exception('Unknown API state [%s]' % (reply['state'],))
def info(self, *args, **kwargs):
if self.info_callback:
self.info_callback(*args, **kwargs)
else:
self.logger.info(*args, **kwargs)
def warning(self, *args, **kwargs):
if self.warning_callback:
self.warning_callback(*args, **kwargs)
else:
self.logger.warning(*args, **kwargs)
def error(self, *args, **kwargs):
if self.error_callback:
self.error_callback(*args, **kwargs)
else:
self.logger.error(*args, **kwargs)
def debug(self, *args, **kwargs):
if self.debug_callback:
self.debug_callback(*args, **kwargs)
else:
self.logger.debug(*args, **kwargs)
def _download(self, results, targets=None):
if isinstance(results, Result):
if targets:
path = targets.pop(0)
else:
path = None
return results.download(path)
if isinstance(results, (list, tuple)):
return [self._download(x, targets) for x in results]
if isinstance(results, dict):
if 'location' in results and 'contentLength' in results:
reply = dict(location=results['location'],
content_length=results['contentLength'],
content_type=results.get('contentType'))
if targets:
path = targets.pop(0)
else:
path = None
return Result(self, reply).download(path)
r = {}
for k, v in results.items():
r[v] = self._download(v, targets)
return r
return results
def download(self, results, targets=None):
if targets:
# Make a copy
targets = [t for t in targets]
return self._download(results, targets)
def remote(self, url):
r = requests.head(url)
reply = dict(location=url,
content_length=r.headers['Content-Length'],
content_type=r.headers['Content-Type'])
return Result(self, reply)
def robust(self, call):
def retriable(code, reason):
if code in [requests.codes.internal_server_error,
requests.codes.bad_gateway,
requests.codes.service_unavailable,
requests.codes.gateway_timeout,
requests.codes.too_many_requests,
requests.codes.request_timeout]:
return True
return False
def wrapped(*args, **kwargs):
tries = 0
while tries < self.retry_max:
try:
r = call(*args, **kwargs)
except requests.exceptions.ConnectionError as e:
r = None
self.warning("Recovering from connection error [%s], attemps %s of %s",
e, tries, self.retry_max)
if r is not None:
if not retriable(r.status_code, r.reason):
return r
try:
self.warning(r.json()['reason'])
except Exception:
pass
self.warning("Recovering from HTTP error [%s %s], attemps %s of %s",
r.status_code, r.reason, tries, self.retry_max)
tries += 1
self.warning("Retrying in %s seconds", self.sleep_max)
time.sleep(self.sleep_max)
self.info("Retrying now...")
return wrapped
|
package ru.otus.hw6.atm;
public enum Nominal {
TEN(10),
FIFTY(50),
HUNDRED(100),
FIVE_HUNDRED(500),
THOUSAND(1000);
public int value;
Nominal(int value) {
this.value = value;
}
} |
#!/bin/bash
# Database credentials
user="root"
password="root"
db_name="luya_env_phpunit"
cd public_html
mysqladmin -u$user -p$password drop $db_name
mysqladmin -u$user -p$password create $db_name
php index.php migrate --interactive=0
php index.php import
php index.php admin/setup --email=test@luya.io --password=luyaio --firstname=John --lastname=Doe --interactive=0
php index.php data/setup
chmod 0777 ../sql
mysqldump --user=$user --password=$password $db_name > ../sql/1.0.0.sql
|
<filename>LZUISDK/SDK/LSDeviceManagerFramework.framework/Headers/LSEProductInfo.h
//
// LSEProductInfo.h
// LSWearable
//
// Created by <NAME> on 2017/3/9.
// Copyright © 2017年 lifesense. All rights reserved.
//
#import <Foundation/Foundation.h>
/**
默认绑定方式
- LSEDefaultBindWayQRCode: 二维码
- LSEDefaultBindWaySNCode: SN码和QRcode
- LSEDefaultBindWaySearch: 搜索绑定
*/
typedef NS_ENUM(NSUInteger, LSEDefaultBindWay) {
LSEDefaultBindWayQRCode = 1,
LSEDefaultBindWaySNCodeAndQRCode,
LSEDefaultBindWaySearch,
LSEDefaultBindWay1579
};
@interface FactoryProducts : NSObject
- (instancetype)initWithDict:(NSDictionary *)dict;
/**
工厂型号
*/
@property (nonatomic, copy) NSString *model;
/**
传输协议
*/
@property (nonatomic, copy) NSString *transferProtocal;
/**
蓝牙广播名字
*/
@property (nonatomic, copy) NSString *bluetoothBroadcastName;
/**
通讯方式
1:网络,
2: WIFI,
3:GPRS,
4:蓝牙,
5:WIFI_GPRS,
6: NB_IOT
*/
@property (nonatomic, copy) NSString *communicationType;
/**
工厂产品类型
体重秤("01")
体重脂肪测量仪("02")
手环("04")
血糖仪("06")
血压计("08")
人体成分分析仪("09")
支付卡("11")
*/
@property (nonatomic, copy) NSString *productTypeCode;
/**
是否需要随机码,如果是蓝牙设备需要
*/
@property (nonatomic, assign) BOOL randomCode;
/**
serviceUUID
*/
@property (nonatomic, copy) NSString *serviceUUID;
/**
配网方式
*/
@property (nonatomic, assign) NSInteger configureManne;
/**
产品名称
*/
@property (nonatomic, copy) NSString *name;
@end
@interface LSEProductInfo : NSObject
- (instancetype)initWithDict:(NSDictionary *)dict;
/**
产品名称
*/
@property (nonatomic, copy) NSString *name;
/**
设备图片Url
*/
@property (nonatomic, copy) NSString *imageUrl;
/**
展示分类
1:手环
2:手表
3:智能秤
4:血压计
5:乐心互联
*/
@property (nonatomic, assign) NSInteger productTypeCode;
/**
默认绑定方式
1.二维码
2.sn号
3.蓝牙
*/
@property (nonatomic, assign) NSInteger defaultBindMode;
/**
其它绑定方式
*/
@property (nonatomic, copy) NSArray *otherBindModes;
/**
查找蓝牙设备,过滤的mac地址前缀
*/
@property (nonatomic, copy) NSArray *servieUUIDList;
/**
包含的工厂产品
*/
@property (nonatomic, copy) NSArray<FactoryProducts *> *factoryProducts;
@end
/**
设备列表页面展示依赖的数据模型
*/
@interface LSProductInfoPage: NSObject
- (instancetype)initWithDict:(NSDictionary *)dict;
/**
展示产品类型
*/
@property (nonatomic, strong) NSNumber *productType;
/**
高亮图标图片地址
*/
@property (nonatomic, strong) NSString *highlightImageUrl;
/**
未高亮图标图片地址
*/
@property (nonatomic, strong) NSString *noHighlightImageUrl;
/**
展示产品类型名称
*/
@property (nonatomic, strong) NSString *productTypeName;
/**
产品详情
*/
@property (nonatomic, strong) NSArray<LSEProductInfo *> *iotDisplayProducts;
@end
|
<filename>main/array/main_int_array_combination.c
#include <errno.h>
#include <stdio.h>
#include <stdlib.h>
#include "int.h"
static void int_array_combination_fprint(FILE * out, int m, int n)
{
int prod, i;
int * a;
prod = int_binomial(m, n);
a = (int *) malloc(n * sizeof(int));
if (errno)
{
perror("int_array_combination_fprint - cannot allocate memory for a");
return;
}
int_array_assign_identity(a, n);
i = 0;
while(i < prod - 1)
{
int_array_fprint(out, n, a, "--raw");
int_array_combination_next(a, m, n);
++i;
}
int_array_fprint(out, n, a, "--raw");
free(a);
}
int main(int argc, char * argv[])
{
int m, n;
m = atoi(argv[1]);
n = atoi(argv[2]);
int_array_combination_fprint(stdout, m, n);
if (errno)
{
perror("main - printing error");
goto end;
}
end:
return errno;
}
|
#!/bin/bash
cp -R contrib/debian .
debuild -us -uc
debian/rules clean
rm -rf debian
|
<filename>simpleplot.h
#pragma once
#include "simpleplot/plots/hist.h"
#include "simpleplot/plots/line.h"
#include "simpleplot/plots/series.h"
#include "simpleplot/canvas.h" |
package org.galaxyproject.dockstore_galaxy_interface.language;
import static org.galaxyproject.gxformat2.Cytoscape.END_ID;
import static org.galaxyproject.gxformat2.Cytoscape.START_ID;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import io.dockstore.common.DescriptorLanguage;
import io.dockstore.common.VersionTypeValidation;
import io.dockstore.language.CompleteLanguageInterface;
import io.dockstore.language.RecommendedLanguageInterface;
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import org.galaxyproject.gxformat2.Cytoscape;
import org.galaxyproject.gxformat2.Lint;
import org.galaxyproject.gxformat2.LintContext;
import org.pf4j.Extension;
import org.pf4j.Plugin;
import org.pf4j.PluginWrapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.yaml.snakeyaml.Yaml;
import org.yaml.snakeyaml.error.YAMLException;
/** @author jmchilton */
public class GalaxyWorkflowPlugin extends Plugin {
public static final Logger LOG = LoggerFactory.getLogger(GalaxyWorkflowPlugin.class);
public static final String[] TEST_SUFFIXES = {"-tests", "_tests", "-test", "-tests"};
public static final String[] TEST_EXTENSIONS = {".yml", ".yaml", ".json"};
/**
* Constructor to be used by plugin manager for plugin instantiation. Your plugins have to provide
* constructor with this exact signature to be successfully loaded by manager.
*
* @param wrapper
*/
public GalaxyWorkflowPlugin(PluginWrapper wrapper) {
super(wrapper);
}
@Extension
public static class GalaxyWorkflowPluginImpl implements CompleteLanguageInterface {
@Override
public String launchInstructions(String trsID) {
return null;
}
@Override
public Map<String, Object> loadCytoscapeElements(
String initialPath,
String contents,
Map<String, Pair<String, GenericFileType>> indexedFiles) {
final Map<String, Object> workflow = loadWorkflow(contents);
return Cytoscape.getElements(workflow);
}
@Override
public List<RowData> generateToolsTable(
String initialPath,
String contents,
Map<String, Pair<String, GenericFileType>> indexedFiles) {
final Map<String, Object> workflow = loadWorkflow(contents);
final Map<String, Object> elements = Cytoscape.getElements(workflow);
final List<Map> nodes = (List<Map>) elements.getOrDefault("nodes", Lists.newArrayList());
removeStartAndEndNodes(nodes);
return nodes.stream()
.map(
node -> {
final RowData rowData = new RowData();
final Map<String, Object> nodeData = (Map<String, Object>) node.get("data");
rowData.label = (String) nodeData.getOrDefault("label", "");
rowData.dockerContainer = (String) nodeData.getOrDefault("docker", "");
rowData.filename = (String) nodeData.getOrDefault("run", "");
// TODO: get a sane link here when Docker is hooked up
try {
rowData.link = new URL((String) node.getOrDefault("repo_link", ""));
} catch (MalformedURLException e) {
rowData.link = null;
}
rowData.rowType = RowType.TOOL;
rowData.toolid = (String) nodeData.getOrDefault("id", "");
return rowData;
})
.collect(Collectors.toList());
}
// Remove start and end nodes that were added for DAG
private void removeStartAndEndNodes(List<Map> nodes) {
nodes.removeIf(
node -> {
Map data = (Map) node.get("data");
String id = (String) data.get("id");
return START_ID.equals(id) || END_ID.equals(id);
});
}
@Override
public VersionTypeValidation validateWorkflowSet(
String initialPath,
String contents,
Map<String, Pair<String, GenericFileType>> indexedFiles) {
final LintContext lintContext = Lint.lint(loadWorkflow(contents));
final boolean valid;
valid = !lintContext.getFoundErrors();
final Map<String, String> messagesAsMap = new HashMap<>();
final List<String> validationMessages = lintContext.collectMessages();
final StringBuilder builder = new StringBuilder();
if (validationMessages.size() == 1) {
builder.append(validationMessages.get(0));
} else if (validationMessages.size() > 1) {
for (final String validationMessage : validationMessages) {
builder.append("- ").append(validationMessage).append("\n");
}
}
final String validationMessageMerged = builder.toString();
if (validationMessageMerged.length() > 0) {
messagesAsMap.put(initialPath, validationMessageMerged);
}
return new VersionTypeValidation(valid, messagesAsMap);
}
@Override
public VersionTypeValidation validateTestParameterSet(
Map<String, Pair<String, GenericFileType>> indexedFiles) {
return new VersionTypeValidation(true, new HashMap<>());
}
@Override
public DescriptorLanguage getDescriptorLanguage() {
return DescriptorLanguage.GXFORMAT2;
}
@Override
public Pattern initialPathPattern() {
// Why doesn't this seem to be called anywhere?
return Pattern.compile("/(.*\\.gxwf\\.y[a]?ml|.*\\.ga)");
}
@Override
public Map<String, Pair<String, GenericFileType>> indexWorkflowFiles(
final String initialPath, final String contents, final FileReader reader) {
Map<String, Pair<String, GenericFileType>> results = new HashMap<>();
final Optional<String> testParameterFile = findTestParameterFile(initialPath, reader);
testParameterFile.ifPresent(
s -> results.put(s, new ImmutablePair<>(s, GenericFileType.TEST_PARAMETER_FILE)));
return results;
}
protected Optional<String> findTestParameterFile(
final String initialPath, final FileReader reader) {
final int extensionPos = initialPath.lastIndexOf(".");
final String base = initialPath.substring(0, extensionPos);
final Path parent = Paths.get(initialPath).getParent();
// listing files is more rate limit friendly (e.g. GitHub counts each 404 "miss" as an API
// call,
// but listing a directory can be free if previously requested/cached)
final Set<String> filenameset =
parent == null ? Sets.newHashSet() : Sets.newHashSet(reader.listFiles(parent.toString()));
for (final String suffix : TEST_SUFFIXES) {
for (final String extension : TEST_EXTENSIONS) {
final String testFile = base + suffix + extension;
if (filenameset.contains(testFile)) {
return Optional.of(testFile);
}
}
}
return Optional.empty();
}
protected Boolean pathExistsFromReader(final FileReader reader, final String path) {
try {
reader.readFile(path);
return true;
} catch (Exception e) {
return false;
}
}
static Map<String, Object> loadWorkflow(final String content) {
final Yaml yaml = new Yaml();
final Map map = yaml.loadAs(content, Map.class);
return (Map<String, Object>) map;
}
@Override
public RecommendedLanguageInterface.WorkflowMetadata parseWorkflowForMetadata(
String initialPath,
String content,
Map<String, Pair<String, GenericFileType>> indexedFiles) {
RecommendedLanguageInterface.WorkflowMetadata metadata =
new RecommendedLanguageInterface.WorkflowMetadata();
if (content != null && !content.isEmpty()) {
try {
final Map<String, Object> map = loadWorkflow(content);
String name = null;
try {
name = (String) map.get("name");
} catch (ClassCastException e) {
LOG.debug("\"name:\" is malformed");
}
// FOLLOWING CODE PULLED FROM cwl handler...
String label = null;
try {
label = (String) map.get("label");
} catch (ClassCastException e) {
LOG.debug("\"label:\" is malformed");
}
String annotation = null;
try {
annotation = (String) map.get("annotation");
} catch (ClassCastException e) {
LOG.debug("\"annotation:\" is malformed");
}
// "doc:" added for CWL 1.0
String doc = null;
if (map.containsKey("doc")) {
Object objectDoc = map.get("doc");
if (objectDoc instanceof String) {
doc = (String) objectDoc;
} else if (objectDoc instanceof Map) {
Map docMap = (Map) objectDoc;
if (docMap.containsKey("$include")) {
String enclosingFile = (String) docMap.get("$include");
Optional<Pair<String, GenericFileType>> first =
indexedFiles.values().stream()
.filter(pair -> pair.getLeft().equals(enclosingFile))
.findFirst();
if (first.isPresent()) {
// No way to fetch this here...
LOG.info(
"$include would have this but reader not passed through, not implemented");
doc = null;
}
}
} else if (objectDoc instanceof List) {
// arrays for "doc:" added in CWL 1.1
List docList = (List) objectDoc;
doc = String.join(System.getProperty("line.separator"), docList);
}
}
final String finalChoiceForDescription =
ObjectUtils.firstNonNull(doc, annotation, name, label);
if (finalChoiceForDescription != null) {
metadata.setDescription(finalChoiceForDescription);
} else {
LOG.info("Description not found!");
}
} catch (YAMLException | NullPointerException | ClassCastException ex) {
String message;
if (ex.getCause() != null) {
// seems to be possible to get underlying cause in some cases
message = ex.getCause().toString();
} else {
// in other cases, the above will NullPointer
message = ex.toString();
}
LOG.info("Galaxy Workflow file is malformed " + message);
// CWL parser gets to put validation information in here,
// plugin interface doesn't consume the right information though.
// https://github.com/dockstore/dockstore/blob/develop/dockstore-webservice/src/main/java/io/dockstore/webservice/languages/CWLHandler.java#L139
}
}
return metadata;
}
}
}
|
from django.urls import re_path, include
urlpatterns = [
re_path(r'', include('django_private_chat2.urls', namespace='django_private_chat2')),
]
|
import org.apache.spark.sql.SparkSession
object AverageSaleCalculator {
def main(args: Array[String]): Unit = {
val spark = SparkSession.builder.appName("Average Sales Calculator").master("local[*]").getOrCreate()
val data = spark.read.format("csv").option("header", "true").option("inferSchema", "true").load("sales_data.csv")
data.show()
val totalSales = data.groupBy().sum("sales").collect()(0)(0).asInstanceOf[Long]
val numItems = data.count()
val averageSales = totalSales / numItems
println("Average sale = "+averageSales)
spark.stop()
}
} |
#!/bin/bash -e
set -e
source /opt/rh/php55/enable
# Create required directories just in case.
mkdir -p /var/www/logs/php-fpm /var/www/files-private /var/www/docroot
echo "*" > /var/www/logs/.gitignore
# Set the apache user and group to match the host user.
# Optionally use the HOST_USER env var if provided.
if [ "$HOST_USER" ]; then
OWNER=$(echo $HOST_USER | cut -d: -f1)
GROUP=$(echo $HOST_USER | cut -d: -f2)
else
OWNER=$(stat -c '%u' /var/www)
GROUP=$(stat -c '%g' /var/www)
fi
if [ "$OWNER" != "0" ]; then
usermod -o -u $OWNER apache
groupmod -o -g $GROUP apache
fi
usermod -s /bin/bash apache
usermod -d /var/www apache
# Add www-data user as same as apache user
if [ ! $(id -u www-data &>/dev/null) ]; then
OWNER=$(id -u apache)
GROUP=$(id -g apache)
useradd -o -u $OWNER -g $GROUP -M -d /var/www www-data
grep -c www-data /etc/group || groupadd -o -g $GROUP www-data
fi
echo The apache user and group has been set to the following:
id apache
exec "$@"
|
<reponame>mashery/i18n.js
var i18n = (function () {
'use strict';
//
// Variables
//
var exp = {};
var dict;
var current;
var rtlLangs;
//
// Methods
//
if (!Element.prototype.matches) {
Element.prototype.matches = Element.prototype.msMatchesSelector || Element.prototype.webkitMatchesSelector;
}
var toggleVisibility = function (lang) {
var elems = Array.prototype.slice.call(document.querySelectorAll('[class*="lang-"]'));
elems.forEach(function (elem) {
if (elem.classList.contains('lang-' + lang)) {
elem.removeAttribute('hidden');
} else {
elem.setAttribute('hidden', true);
}
});
};
var setCurrent = function (lang) {
current = lang;
localStorage.setItem('portalLang', lang);
document.documentElement.lang = lang;
};
var getSelected = function () {
var lang = localStorage.getItem('portalLang');
return lang;
}
exp.getCurrent = function () {
return current;
};
var translateText = function (elem, lang) {
if (elem.nodeType === 1 || elem.nodeType === 3) {
var temp = elem.textContent;
dict.forEach(function (def) {
if (!def[current] || !def[lang]) return;
temp = temp.replace(new RegExp(def[current], 'g'), def[lang]);
});
elem.textContent = temp;
}
};
var rtl = function (lang) {
if (rtlLangs.indexOf(lang) < 0) {
document.documentElement.dir = 'ltr';
} else {
document.documentElement.dir = 'rtl';
}
};
var translate = function (elem, lang) {
toggleVisibility(lang);
rtl(lang);
var elems = Array.prototype.slice.call(elem.childNodes);
if (elem.matches && elem.matches('[class*="lang-"]')) return;
if (elems.length < 1) {
translateText(elem, lang);
} else {
elems.forEach(function (elem) {
translate(elem, lang);
});
}
};
var eventHandler = function (event) {
var toggle = event.target.closest('[data-lang], [data-lang-select]');
if (!toggle) return;
var lang = toggle.getAttribute('data-lang') || toggle.value;
translate(document.body, lang);
setCurrent(lang);
};
var setToggle = function (lang) {
var toggles = Array.prototype.slice.call(document.querySelectorAll('[data-lang-select]'));
toggles.forEach(function (toggle) {
toggle.value = lang;
});
};
var setInitialLang = function (def) {
current = def;
var selected = getSelected();
var lang = selected ? selected : current;
exp.setLang(lang);
setToggle(lang);
};
exp.setLang = function (lang) {
lang = lang ? lang : current;
translate(document.body, lang);
setCurrent(lang);
};
exp.init = function (def, dictionary, rtl) {
if (!def || !dictionary) return;
dict = dictionary.slice();
rtlLangs = rtl ? rtl.slice() : [];
setInitialLang(def);
document.addEventListener('click', eventHandler);
document.addEventListener('change', eventHandler);
};
//
// Return Public Methods
//
return exp;
})(); |
package lx.calibre.web.config;
import java.util.concurrent.TimeUnit;
import javax.servlet.http.HttpServletRequest;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.web.servlet.FilterRegistrationBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.http.CacheControl;
import org.springframework.web.filter.ShallowEtagHeaderFilter;
import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter;
import org.springframework.web.servlet.support.ServletUriComponentsBuilder;
import org.springframework.web.util.UrlPathHelper;
import lx.calibre.util.HttpUtils;
@Configuration
public class MvcConfig extends WebMvcConfigurerAdapter {
@Value("${calibre.thumb}")
private String thumbDir;
private UrlPathHelper urlPathHelper = new UrlPathHelper();
@Bean
public FilterRegistrationBean shallowEtagBean() {
FilterRegistrationBean frb = new FilterRegistrationBean();
frb.setFilter(new ShallowEtagHeaderFilter());
frb.addUrlPatterns("/static/**");
frb.setOrder(2);
return frb;
}
@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
registry.addResourceHandler("/static/thumb/**")
.addResourceLocations("file:" + thumbDir)
.setCacheControl(CacheControl.maxAge(30, TimeUnit.DAYS));
}
@Override
public void addInterceptors(InterceptorRegistry registry) {
// GlobalVariableInterceptor globalVariableInterceptor = new GlobalVariableInterceptor();
// globalVariableInterceptor.setCategoryService(categoryService);
// registry.addInterceptor(globalVariableInterceptor);
}
/**
* thymeleaf-add-parameter-to-current-url
* {@linkplain http://stackoverflow.com/questions/27623405/thymeleaf-add-parameter-to-current-url}
*/
@Bean
public Function<String, String> currentUrlWithoutParam() {
return new Function<String, String>() {
@Override
public String apply(String param) {
HttpServletRequest request = HttpUtils.getCurrentRequest();
String path = urlPathHelper.getPathWithinApplication(request);
return ServletUriComponentsBuilder.fromRequest(request)
.replacePath(path).replaceQueryParam(param)
.scheme(null).host(null).build(false).toUriString();
}
};
}
@Bean
public Function<Iterable<String>, String> currentUrlWithoutParams() {
return new Function<Iterable<String>, String>() {
@Override
public String apply(Iterable<String> params) {
HttpServletRequest request = HttpUtils.getCurrentRequest();
String path = urlPathHelper.getPathWithinApplication(request);
ServletUriComponentsBuilder builder = ServletUriComponentsBuilder.fromRequest(request);
builder.replacePath(path);
for (String param : params) {
builder.replaceQueryParam(param);
}
return builder.scheme(null).host(null).build(false).toUriString();
}
};
}
public interface Function<T, R> {
R apply(T t);
}
}
|
<filename>jhiRoot/plantsMS/src/main/java/fr/syncrase/ecosyst/service/TypeSemisQueryService.java
package fr.syncrase.ecosyst.service;
import fr.syncrase.ecosyst.domain.*; // for static metamodels
import fr.syncrase.ecosyst.domain.TypeSemis;
import fr.syncrase.ecosyst.repository.TypeSemisRepository;
import fr.syncrase.ecosyst.service.criteria.TypeSemisCriteria;
import java.util.List;
import javax.persistence.criteria.JoinType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.domain.Specification;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import tech.jhipster.service.QueryService;
/**
* Service for executing complex queries for {@link TypeSemis} entities in the database.
* The main input is a {@link TypeSemisCriteria} which gets converted to {@link Specification},
* in a way that all the filters must apply.
* It returns a {@link List} of {@link TypeSemis} or a {@link Page} of {@link TypeSemis} which fulfills the criteria.
*/
@Service
@Transactional(readOnly = true)
public class TypeSemisQueryService extends QueryService<TypeSemis> {
private final Logger log = LoggerFactory.getLogger(TypeSemisQueryService.class);
private final TypeSemisRepository typeSemisRepository;
public TypeSemisQueryService(TypeSemisRepository typeSemisRepository) {
this.typeSemisRepository = typeSemisRepository;
}
/**
* Return a {@link List} of {@link TypeSemis} which matches the criteria from the database.
* @param criteria The object which holds all the filters, which the entities should match.
* @return the matching entities.
*/
@Transactional(readOnly = true)
public List<TypeSemis> findByCriteria(TypeSemisCriteria criteria) {
log.debug("find by criteria : {}", criteria);
final Specification<TypeSemis> specification = createSpecification(criteria);
return typeSemisRepository.findAll(specification);
}
/**
* Return a {@link Page} of {@link TypeSemis} which matches the criteria from the database.
* @param criteria The object which holds all the filters, which the entities should match.
* @param page The page, which should be returned.
* @return the matching entities.
*/
@Transactional(readOnly = true)
public Page<TypeSemis> findByCriteria(TypeSemisCriteria criteria, Pageable page) {
log.debug("find by criteria : {}, page: {}", criteria, page);
final Specification<TypeSemis> specification = createSpecification(criteria);
return typeSemisRepository.findAll(specification, page);
}
/**
* Return the number of matching entities in the database.
* @param criteria The object which holds all the filters, which the entities should match.
* @return the number of matching entities.
*/
@Transactional(readOnly = true)
public long countByCriteria(TypeSemisCriteria criteria) {
log.debug("count by criteria : {}", criteria);
final Specification<TypeSemis> specification = createSpecification(criteria);
return typeSemisRepository.count(specification);
}
/**
* Function to convert {@link TypeSemisCriteria} to a {@link Specification}
* @param criteria The object which holds all the filters, which the entities should match.
* @return the matching {@link Specification} of the entity.
*/
protected Specification<TypeSemis> createSpecification(TypeSemisCriteria criteria) {
Specification<TypeSemis> specification = Specification.where(null);
if (criteria != null) {
// This has to be called first, because the distinct method returns null
if (criteria.getDistinct() != null) {
specification = specification.and(distinct(criteria.getDistinct()));
}
if (criteria.getId() != null) {
specification = specification.and(buildRangeSpecification(criteria.getId(), TypeSemis_.id));
}
if (criteria.getType() != null) {
specification = specification.and(buildStringSpecification(criteria.getType(), TypeSemis_.type));
}
if (criteria.getDescription() != null) {
specification = specification.and(buildStringSpecification(criteria.getDescription(), TypeSemis_.description));
}
}
return specification;
}
}
|
package top.luyuni.qaa.controller;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import top.luyuni.qaa.model.EntityType;
import top.luyuni.qaa.model.Question;
import top.luyuni.qaa.model.ViewObject;
import top.luyuni.qaa.service.IFollowService;
import top.luyuni.qaa.service.IQuestionService;
import top.luyuni.qaa.service.ISearchService;
import top.luyuni.qaa.service.IUserService;
import java.util.ArrayList;
import java.util.List;
@Controller
public class SearchController {
private static final Logger logger = LoggerFactory.getLogger(SearchController.class);
@Autowired
private ISearchService ISearchService;
@Autowired
private IFollowService IFollowService;
@Autowired
private IUserService IUserService;
@Autowired
private IQuestionService IQuestionService;
@RequestMapping(path = {"/search"}, method = {RequestMethod.GET})
public String search(Model model, @RequestParam("q") String keyword,
@RequestParam(value = "offset", defaultValue = "0") int offset,
@RequestParam(value = "count", defaultValue = "10") int count) {
try {
List<Question> questionList = ISearchService.searchQuestion(keyword, offset, count,
"<em>", "</em>");
List<ViewObject> vos = new ArrayList<>();
for (Question question : questionList) {
Question q = IQuestionService.getById(question.getId());
ViewObject vo = new ViewObject();
if (question.getContent() != null) {
q.setContent(question.getContent());
}
if (question.getTitle() != null) {
q.setTitle(question.getTitle());
}
vo.set("question", q);
vo.set("followCount", IFollowService.getFollowerCount(EntityType.ENTITY_QUESTION, question.getId()));
vo.set("user", IUserService.getUser(q.getUserId()));
vos.add(vo);
}
model.addAttribute("vos", vos);
model.addAttribute("keyword", keyword);
} catch (Exception e) {
logger.error("搜索评论失败" + e.getMessage());
}
return "result";
}
}
|
package io.opensphere.core.net.manager.view;
import io.opensphere.core.net.manager.model.HttpKeyValuePair;
import io.opensphere.core.net.manager.model.NetworkTransaction;
import io.opensphere.core.util.javafx.ConcurrentObjectProperty;
import javafx.beans.property.ObjectProperty;
import javafx.scene.control.ListView;
import javafx.scene.control.TitledPane;
import javafx.scene.layout.VBox;
/**
* The panel on which the parameters used in the transaction are detailed. F
*/
public class ParametersPanel extends VBox
{
/**
* The model of the network transaction currently being detailed by the
* panel.
*/
private final ObjectProperty<NetworkTransaction> myTransactionProperty = new ConcurrentObjectProperty<>();
/** Creates a new parameters panel bound to the selected transaction. */
public ParametersPanel()
{
ListView<HttpKeyValuePair> queryParametersList = new ListView<>();
queryParametersList.setCellFactory(p -> new HeaderListCell());
TitledPane queryParametersPane = new TitledPane("Query Parameters", queryParametersList);
VBox parametersAccordion = new VBox(queryParametersPane);
queryParametersPane.setExpanded(true);
myTransactionProperty.addListener((obs, ov, nv) ->
{
if (nv != null)
{
queryParametersList.itemsProperty().get().clear();
queryParametersList.itemsProperty().get().addAll(myTransactionProperty.get().getRequestParameters());
}
else
{
queryParametersList.itemsProperty().get().clear();
}
queryParametersList.refresh();
});
getChildren().add(parametersAccordion);
}
/**
* Gets the value of the {@link #myTransactionProperty} field.
*
* @return the value stored in the {@link #myTransactionProperty} field.
*/
public ObjectProperty<NetworkTransaction> transactionProperty()
{
return myTransactionProperty;
}
}
|
<gh_stars>0
workers ENV.fetch("PUMA_WORKERS") { 3 }
port ENV.fetch("PUMA_LISTEN_PORT") { 3000 }
preload_app!
on_worker_boot do
ActiveSupport.on_load(:active_record) do
ActiveRecord::Base.establish_connection
end
end
|
package Chapter1_1Low;
import edu.princeton.cs.algs4.StdIn;
import edu.princeton.cs.algs4.StdOut;
//Exercise 1.1.21
public class Tabulate {
public static void main(String[] args) {
int M = 3;
int index = 0;
String[] strs = new String[M];
while (index < M)
strs[index++] = StdIn.readLine();
for (int i = 0; i < strs.length; ++i) {
String[] arr = strs[i].split("\\s+");
double temp = Double.parseDouble(arr[1]) / Double.parseDouble(arr[2]);
StdOut.printf("%-10s %-10s %-13.3f\n", arr[0], arr[1], arr[2], temp);
}
}
}
|
<reponame>jhroemer/AmodSimulator<filename>jgrapht-master/jgrapht-core/src/main/java/org/jgrapht/alg/interfaces/EulerianCycleAlgorithm.java
/*
* (C) Copyright 2016-2018, by <NAME> and Contributors.
*
* JGraphT : a free Java graph-theory library
*
* This program and the accompanying materials are dual-licensed under
* either
*
* (a) the terms of the GNU Lesser General Public License version 2.1
* as published by the Free Software Foundation, or (at your option) any
* later version.
*
* or (per the licensee's choosing)
*
* (b) the terms of the Eclipse Public License v1.0 as published by
* the Eclipse Foundation.
*/
package org.jgrapht.alg.interfaces;
import org.jgrapht.*;
/**
* Computes an Eulerian cycle of an Eulerian graph. An
* <a href="http://mathworld.wolfram.com/EulerianGraph.html">Eulerian graph</a> is a graph
* containing an <a href="http://mathworld.wolfram.com/EulerianCycle.html">Eulerian cycle</a>.
*
*
* @param <V> the graph vertex type
* @param <E> the graph edge type
*
* @author <NAME>
* @since October 2016
*/
public interface EulerianCycleAlgorithm<V, E>
{
/**
* Compute an Eulerian cycle of a graph.
*
* @param graph the input graph
* @return an Eulerian cycle
* @throws IllegalArgumentException in case the graph is not Eulerian
*/
GraphPath<V, E> getEulerianCycle(Graph<V, E> graph);
}
|
#!/bin/bash
npm publish --access public |
/**
* Copyright 2016 The AMP HTML Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {dev} from '../log';
/**
* The internal structure for the task.
* @typedef {{
* id: string,
* resource: !./resource.Resource,
* priority: number,
* forceOutsideViewport: boolean,
* callback: function(),
* scheduleTime: time,
* startTime: time,
* promise: (?Promise|undefined)
* }}
*/
export let TaskDef;
/**
* A scheduling queue for Resources.
*
* @package
*/
export class TaskQueue {
constructor() {
/** @private @const {!Array<!TaskDef>} */
this.tasks_ = [];
/** @private @const {!Object<string, !TaskDef>} */
this.taskIdMap_ = {};
/** @private {!time} */
this.lastEnqueueTime_ = 0;
/** @private {!time} */
this.lastDequeueTime_ = 0;
}
/**
* Size of the queue.
* @return {number}
*/
getSize() {
return this.tasks_.length;
}
/**
* Last time a task was enqueued.
* @return {!time}
*/
getLastEnqueueTime() {
return this.lastEnqueueTime_;
}
/**
* Last time a task was dequeued.
* @return {!time}
*/
getLastDequeueTime() {
return this.lastDequeueTime_;
}
/**
* Returns the task with the specified ID or null.
* @param {string} taskId
* @return {?TaskDef}
*/
getTaskById(taskId) {
return this.taskIdMap_[taskId] || null;
}
/**
* Enqueues the task. If the task is already in the queue, the error is
* thrown.
* @param {!TaskDef} task
*/
enqueue(task) {
dev().assert(
!this.taskIdMap_[task.id], 'Task already enqueued: %s', task.id);
this.tasks_.push(task);
this.taskIdMap_[task.id] = task;
this.lastEnqueueTime_ = Date.now();
}
/**
* Dequeues the task and returns "true" if dequeueing is successful. Otherwise
* returns "false", e.g. when this task is not currently enqueued.
* @param {!TaskDef} task
* @return {boolean}
*/
dequeue(task) {
const existing = this.taskIdMap_[task.id];
const dequeued = this.removeAtIndex(task, this.tasks_.indexOf(existing));
if (!dequeued) {
return false;
}
this.lastDequeueTime_ = Date.now();
return true;
}
/**
* Returns the task with the minimal score based on the provided scoring
* callback.
* @param {function(!TaskDef):number} scorer
* @return {?TaskDef}
*/
peek(scorer) {
let minScore = 1e6;
let minTask = null;
for (let i = 0; i < this.tasks_.length; i++) {
const task = this.tasks_[i];
const score = scorer(task);
if (score < minScore) {
minScore = score;
minTask = task;
}
}
return minTask;
}
/**
* Iterates over all tasks in queue in the insertion order.
* @param {function(!TaskDef)} callback
*/
forEach(callback) {
this.tasks_.forEach(callback);
}
/**
* Removes the task and returns "true" if dequeueing is successful. Otherwise
* returns "false", e.g. when this task is not currently enqueued.
* @param {!TaskDef} task
* @param {number} index of the task to remove.
* @return {boolean}
*/
removeAtIndex(task, index) {
const existing = this.taskIdMap_[task.id];
if (!existing || this.tasks_[index] != existing) {
return false;
}
this.tasks_.splice(index, 1);
delete this.taskIdMap_[task.id];
return true;
}
/**
* Removes tasks in queue that pass the callback test.
* @param {function(!TaskDef):boolean} callback Return true to remove the task.
*/
purge(callback) {
let index = this.tasks_.length;
while (index--) {
if (callback(this.tasks_[index])) {
this.removeAtIndex(this.tasks_[index], index);
}
}
}
}
|
<!DOCTYPE html>
<html>
<head>
<title>Table of Names and Ages</title>
</head>
<body>
<h1>Table of Names and Ages</h1>
<table>
<tr>
<th>Name</th>
<th>Age</th>
</tr>
<tr>
<td>John</td>
<td>30</td>
</tr>
<tr>
<td>Jane</td>
<td>25</td>
</tr>
<tr>
<td>Alice</td>
<td>28</td>
</tr>
</table>
</body>
</html> |
#!/bin/bash
###
# Updates the AWS CloudFormation Resource Specification using the files published on the AWS Documentaiton.
# See: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/cfn-resource-specification.html
###
set -euo pipefail
scriptdir=$(cd $(dirname $0) && pwd)
rm -f CHANGELOG.md.new
function update-spec() {
local title=$1
local url=$2
local target=$3
local gunzip=$4
local intermediate="$(mktemp -d)/new.json"
# fail if the spec has changes, otherwise we won't be able to determine the diff
if [ -n "$(git status --porcelain ${target})" ]; then
echo "The file ${target} has changes, revert them before cfn-update"
exit 1
fi
echo >&2 "Downloading from ${url}..."
if ${gunzip}; then
curl -sL "${url}" | gunzip - > ${intermediate}
else
curl -sL "${url}" > ${intermediate}
fi
echo >&2 "Sorting..."
sort-json ${intermediate}
echo >&2 "Updating CHANGELOG.md..."
node build-tools/spec-diff.js "${title}" "${target}" "${intermediate}" >> CHANGELOG.md.new
echo "" >> CHANGELOG.md.new
echo >&2 "Updarting source spec..."
rm -f ${target}
cp ${intermediate} ${target}
}
update-spec \
"CloudFormation Resource Specification" \
"https://d1uauaxba7bl26.cloudfront.net/latest/gzip/CloudFormationResourceSpecification.json" \
spec-source/000_CloudFormationResourceSpecification.json \
true
update-spec \
"Serverless Application Model (SAM) Resource Specification" \
"https://raw.githubusercontent.com/awslabs/goformation/master/generate/sam-2016-10-31.json" \
spec-source/000_sam.spec.json \
false
npm run build
echo >&2 "Creating missing AWS construct libraries for new resource types..."
node ${scriptdir}/create-missing-libraries.js || {
echo "------------------------------------------------------------------------------------"
echo "cfn-spec update script failed when trying to create modules for new services"
echo "Fix the error (you will likely need to add RefKind patches), and then run 'npm run update' again"
exit 1
}
# update decdk dep list
(cd ${scriptdir}/../../../decdk && node ./deps.js || true)
# append old changelog after new and replace as the last step because otherwise we will not be idempotent
cat CHANGELOG.md >> CHANGELOG.md.new
cp CHANGELOG.md.new CHANGELOG.md
|
#!/bin/bash
if [ $# -gt 2 ]; then
echo "Usage: faban_client WEB_SERVER_IP [LOAD_SCALE]"
exit 1
fi
if [ $# -lt 1 ]; then
echo "Web server IP is a mandatory parameter."
exit 1
fi
WEB_SERVER_IP=$1
LOAD_SCALE=${2:-7}
while [ "$(curl -sSI ${WEB_SERVER_IP}:8080 | grep 'HTTP/1.1' | awk '{print $2}')" != "200" ]; do
echo "Could not perform HTTP 200 GET from: ${WEB_SERVER_IP}:8080"
sleep 2
done
sed -i -e"s/num_users=500/num_users=${LOAD_SCALE}/" /faban/usersetup.properties
/faban/master/bin/startup.sh
cd /web20_benchmark/build && java -jar Usergen.jar http://${WEB_SERVER_IP}:8080
sed -i "s/<fa:scale.*/<fa:scale>${LOAD_SCALE}<\\/fa:scale>/" /web20_benchmark/deploy/run.xml
sed -i "s/<fa:rampUp.*/<fa:rampUp>10<\\/fa:rampUp>/" /web20_benchmark/deploy/run.xml
sed -i "s/<fa:rampDown.*/<fa:rampDown>10<\\/fa:rampDown>/" /web20_benchmark/deploy/run.xml
sed -i "s/<fa:steadyState.*/<fa:steadyState>30<\\/fa:steadyState>/" /web20_benchmark/deploy/run.xml
sed -i "s/<host.*/<host>${WEB_SERVER_IP}<\\/host>/" /web20_benchmark/deploy/run.xml
sed -i "s/<port.*/<port>8080<\\/port>/" /web20_benchmark/deploy/run.xml
sed -i "s/<outputDir.*/<outputDir>\/faban\/output<\\/outputDir>/" /web20_benchmark/deploy/run.xml
cd /web20_benchmark && ant run
cat /faban/output/*/summary.xml
|
package de.unistuttgart.ims.coref.annotator.action;
import javax.swing.Action;
import de.unistuttgart.ims.coref.annotator.CAAbstractTreeSelectionListener;
@Deprecated
public interface CAAction extends Action {
void setEnabled(CAAbstractTreeSelectionListener l);
}
|
import Utils from '../shared/utils';
class UsersService {
static async getAll( db, params ) {
const page = +(params.page || 1);
const pageSize = +(params.pageSize || 10);
const clientWhere = JSON.parse(params.where || {});
const clientOrder = JSON.parse(params.order || {});
// --------------------------------------------------------------------
// WHERE
const where = {};
// name
if (typeof clientWhere.name === 'string' && clientWhere.name.length) {
let queryString = clientWhere.name;
where.name = {
[db.Sequelize.Op.or]: [
{[db.Sequelize.Op.like]: queryString},
{[db.Sequelize.Op.like]: '%' + queryString},
{[db.Sequelize.Op.like]: '%' + queryString + '%'},
{[db.Sequelize.Op.like]: queryString + '%'}
]
};
}
// email
if (typeof clientWhere.email === 'string' && clientWhere.email.length) {
let queryString = clientWhere.email;
where.email = {
[db.Sequelize.Op.or]: [
{[db.Sequelize.Op.like]: queryString},
{[db.Sequelize.Op.like]: '%' + queryString},
{[db.Sequelize.Op.like]: '%' + queryString + '%'},
{[db.Sequelize.Op.like]: queryString + '%'}
]
};
}
// filter
if (typeof clientWhere.filter === 'string' && clientWhere.filter.length) {
let queryString = clientWhere.filter;
where[db.Sequelize.Op.or] = [
{name: {[db.Sequelize.Op.like]: queryString}},
{name: {[db.Sequelize.Op.like]: '%' + queryString}},
{name: {[db.Sequelize.Op.like]: '%' + queryString + '%'}},
{name: {[db.Sequelize.Op.like]: queryString + '%'}},
{email: {[db.Sequelize.Op.like]: queryString}},
{email: {[db.Sequelize.Op.like]: '%' + queryString}},
{email: {[db.Sequelize.Op.like]: '%' + queryString + '%'}},
{email: {[db.Sequelize.Op.like]: queryString + '%'}},
];
}
// console.log('where', where);
// --------------------------------------------------------------------
// ORDER
const order = [];
Object.keys(clientOrder).forEach((key) => {
order.push([ key, clientOrder[key] ]);
});
// console.log('order', order);
// console.log('---------------------------------------------------');
const result = await db.user.findAndCountAll(
Utils.paginate(
{
// logging: console.log,
where: where,
order: order,
include: [
{
model: db.role,
as: 'roles',
attributes: ['id', 'name'],
}
],
distinct:true
}, {
page, pageSize
}
)
);
result.page = page;
result.pageSize = pageSize;
return result;
}
static async getById( db, userId ) {
return await db.user.findByPk( userId, {
include: [
{
model: db.role,
as: 'roles',
attributes: ['id', 'name'],
}
]
});
}
static async getBySsid( db, userSsid ) {
return await db.user.findOne({
where: {
ssid: userSsid
},
include: [
{
model: db.role,
as: 'roles',
attributes: ['id', 'name'],
}
]
});
}
static async create( db, userValue ) {
let newUser;
let transaction;
let newData = {
name: userValue.name,
email: userValue.email,
password: <PASSWORD>
};
if ( userValue.ssid ) {
newData.ssid = userValue.ssid;
}
try {
transaction = await db.sequelize.transaction();
newUser = await db.user.create( newData, { transaction } );
if ( userValue.roles ) {
await newUser.setRoles( userValue.roles, {transaction} );
}
await transaction.commit();
} catch (err) {
if (err) await transaction.rollback();
throw err;
}
return await this.getById(db, newUser.id);
}
static async update( db, userId, userValue ) {
if ( +userId === 1 ) {
throw new Error('Access denied');
}
let newData = {
name: userValue.name,
email: userValue.email
};
if (userValue.password) {
newData.password = userValue.password;
}
let transaction;
try {
transaction = await db.sequelize.transaction();
let user = await db.user.findByPk(userId);
await user.update(newData, {
where: {id: userId}
}, {transaction});
if (userValue.roles) {
await user.setRoles(userValue.roles, {transaction});
}
await transaction.commit();
} catch (err) {
if (err) await transaction.rollback();
throw err;
}
return await this.getById(db, userId);
}
static async delete( db, userId ) {
if ( +userId === 1 ) {
throw new Error('Access denied');
}
return await db.user.destroy({
where: {
id: userId
}
})
}
}
export default UsersService;
|
<filename>api/routes/RouterCheckin.ts
import { Router } from 'express'
import check from '../controllers/Checkin'
import CheckJwt from '../middlewares/CheckJwt'
const routes = Router()
routes.use(CheckJwt.checkJwt)
routes.get('/api/auth/checkin', check.checkin)
export default routes
|
import requests
import youtube_dl
from bs4 import BeautifulSoup
def program_urls():
page = requests.get("http://www.oppetarkiv.se/program")
soup = BeautifulSoup(page.content, 'html.parser')
a = soup.find_all('a')
for x in a:
href = x.get('href')
if isinstance(href, str) and 'etikett' in href:
url = "http://www.oppetarkiv.se{}".format(href)
yield url
def video_urls(program):
videos = []
page = requests.get(program)
soup = BeautifulSoup(page.content, 'html.parser')
a = soup.find_all('a')
for x in a:
href = x.get('href')
if isinstance(href, str) and 'video' in href:
url = "http://www.oppetarkiv.se{}".format(href)
yield url
def download(videos):
ytdl = youtube_dl.YoutubeDL()
ytdl.download(videos)
def main():
programs = program_urls()
for program in programs:
videos = video_urls(program)
for video in videos:
download([video])
print("\n")
if __name__ == "__main__":
main()
|
#include "pixart_object.hpp"
#include <algorithm>
#include <cstring>
void PA_object::render_ascii(char *output, int pitch, char symbol) const
{
int lx = std::min((uint8_t) 97, boundary_left);
int rx = std::min((uint8_t) 97, boundary_right);
int uy = std::min((uint8_t) 97, boundary_up);
int dy = std::min((uint8_t) 97, boundary_down);
for (int y = uy; y <= dy; y++)
{
for (int x = lx; x <= rx; x++)
{
output[y * pitch + x] = symbol;
}
}
}
void PA_object::load(const uint8_t *data, int format)
{
memset(this, 0, sizeof(this));
// Formats 1-4
area = data[0] | ((data[1] & 0x3f) << 8);
cx = data[2] | ((data[3] & 0x0f) << 8);
cy = data[4] | ((data[5] & 0x0f) << 8);
// Format 1, 3
if (format == 1 || format == 3)
{
average_brightness = data[6];
max_brightness = data[7];
range = data[8] >> 4;
radius = data[8] & 0xf;
}
if (format == 1 || format == 4)
{
int offset = format == 4 ? 3 : 0;
boundary_left = data[9 - offset] & 0x7f;
boundary_right = data[10 - offset] & 0x7f;
boundary_up = data[11 - offset] & 0x7f;
boundary_down = data[12 - offset] & 0x7f;
aspect_ratio = data[13 - offset];
vx = data[14 - offset];
vy = data[15 - offset];
}
}
PA_object::PA_object(const uint8_t *data, int format)
{
load(data, format);
}
|
<gh_stars>10-100
#include <profile.h>
#include <config/config.h>
namespace View
{
void Profile::updateAllComboBoxesItems()
{
updateComboBoxItems(m_app.config().root().tools(), toolComboBox);
updateComboBoxItems(m_app.config().root().profiles(), profileComboBox);
}
Profile::Profile(Model::Application& app)
:DocumentModelObserver(app),
m_app(app),
m_outsideToolChangeBlocked(false),
m_outsideProfileChangeBlocked(false)
{
setupUi(this);
updateAllComboBoxesItems();
connect(&app, &Model::Application::configChanged, this, &Profile::configChanged);
connect(toolComboBox, &QComboBox::currentTextChanged, this, &Profile::currentToolTextChanged);
connect(profileComboBox, &QComboBox::currentTextChanged, this, &Profile::currentProfileTextChanged);
}
void Profile::documentChanged()
{
connect(document(), &Model::Document::toolConfigChanged, this, &Profile::toolConfigChanged);
connect(document(), &Model::Document::profileConfigChanged, this, &Profile::profileConfigChanged);
toolComboBox->setCurrentText(QString::fromStdString(document()->toolConfig().name()));
profileComboBox->setCurrentText(QString::fromStdString(document()->profileConfig().name())); // TODO updateTextFromProfileConfig
}
void Profile::configChanged(const Config::Config &config)
{
updateAllComboBoxesItems();
}
void Profile::toolConfigChanged(const Config::Tools::Tool& tool)
{
if (!m_outsideToolChangeBlocked) {
toolComboBox->setCurrentText(QString::fromStdString(tool.name()));
}
}
void Profile::currentToolTextChanged(const QString& toolName)
{
m_outsideToolChangeBlocked = true;
m_app.selectTool(toolName);
m_outsideToolChangeBlocked = false;
}
void Profile::profileConfigChanged(const Config::Profiles::Profile& profile)
{
if (!m_outsideProfileChangeBlocked) {
profileComboBox->setCurrentText(QString::fromStdString(profile.name()));
}
}
void Profile::currentProfileTextChanged(const QString& profileName)
{
m_outsideProfileChangeBlocked = true;
m_app.selectProfile(profileName);
m_outsideProfileChangeBlocked = false;
}
}
|
#!/usr/bin/env bash
# Usage: create_salmon_index.sh <Config.ini>
### Setting as an interactive BASH session and forcing history to capture commands to a log/README file
HISTFILE=~/.bash_history
set -o history
set -ue
# Check resources.ini was provided on the command line
if [ -n "$1" ]
then
echo "Required ini file detected"
else
echo "Input INI file not provided, exiting due to missing requirement"
exit 1
fi
# Read required variables from configuration file
. ${1}
####################################
## Navigate Directory Structure
###################################
# Check top level directory if not available
if [ -e ${TOPLEVEL_DIR} ]
then
echo "Top level directory: ${TOPLEVEL_DIR} exists, moving into it"
cd ${TOPLEVEL_DIR}
else
echo "Top level directory NOT found, IT IS REQUIRED, EXITING"
exit 1
fi
# Check that the reference genome for RNA was created successfully
if [ -e RNA_FASTA_GENERATION_COMPLETE ]
then
echo "RNA fasta exists, moving forward"
else
echo "RNA fasta generation complete flag NOT found"
echo "Try again later as this is required"
exit 2
fi
# Check gene_model directory if not available
if [ -e gene_model ]
then
echo "Gene Model directory exists, moving into it"
cd gene_model
else
echo "Gene Model directory NOT found, IT IS REQUIRED, EXITING"
exit 1
fi
# Check specific gene model directory
if [ -e ${GENE_MODEL_NAME} ]
then
echo "Specific Gene Model directory exists, moving into it"
cd ${GENE_MODEL_NAME}
else
echo "Specific Gene Model directory NOT found, IT IS REQUIRED, EXITING"
exit 1
fi
# Check to ensure that transcriptome fasta was created
if [ -e TRANSCRIPTOME_FASTA_GENERATION_COMPLETE ]
then
echo "Transcriptome fasta exists, moving forward"
else
echo "Transcriptome fasta does not exist or was not created succesfully"
echo "Please try again or check gene model output"
exit 1
fi
# Make gene_model specific tool_resources directory if not available
if [ -e tool_resources ]
then
echo "Gene Model tool_resources directory exists, moving into it"
cd tool_resources
else
echo "Gene Model tool_resources directory NOT found, creating and entering it now"
mkdir tool_resources
cd tool_resources
fi
# Make salmon index directory if not available
if [ -e "salmon_${SALMON_VERSION}" ]
then
echo "Salmon directory exists, moving into it"
cd salmon_${SALMON_VERSION}
else
echo "Salmon directory NOT found, creating and moving into it now"
mkdir salmon_${SALMON_VERSION}
cd salmon_${SALMON_VERSION}
fi
####################################
## Generate Salmon Index
####################################
# Initialize a salmon specific README
touch README
echo >> README
echo "For details on file creation see the associated github repository:" >> README
echo "https://github.com/tgen/jetstream_resources/${WORKFLOW_NAME}" >> README
echo "Created and downloaded by ${CREATOR}" >> README
date >> README
echo >> README
# Determine the fullpath to the transcriptome fasta file
GENE_MODEL_BASENAME=`basename ${GENE_MODEL_FILENAME} ".gtf"`
GENE_MODEL_TRANSCRIPTOME_FASTA=${TOPLEVEL_DIR}/gene_model/${GENE_MODEL_NAME}/${GENE_MODEL_BASENAME}.transcriptome.fasta
# Determine the fullpath to the RNA reference fasta
REFERENCE_RNA_GENOME_FASTA=${TOPLEVEL_DIR}/genome_reference/${REFERENCE_RNA_GENOME_NAME}
# Prepare meta-data needed for salmon index with whole genome decoy
grep "^>" ${REFERENCE_RNA_GENOME_FASTA} | cut -d " " -f 1 > decoys.txt
fc -ln -1 >> README
sed -i.bak -e 's/>//g' decoys.txt
fc -ln -1 >> README
# Create contatenated transcriptome and reference file for indexing
cat ${GENE_MODEL_TRANSCRIPTOME_FASTA} ${REFERENCE_RNA_GENOME_FASTA} > transcriptome_genome_index.fa
fc -ln -1 >> README
# Create the Salmon index
if [ $ENVIRONMENT == "TGen" ]
then
sbatch -c 4 --wait -J salmon_index --wrap="module load Salmon/${SALMON_VERSION} ; salmon index --threads 4 --transcripts transcriptome_genome_index.fa --decoys decoys.txt --index salmon_${SALMON_TYPE}_75merPlus --type ${SALMON_TYPE} --kmerLen 31"
# Error Capture
if [ "$?" = "0" ]
then
echo "PASSED_SALMON_INDEX" >> README
else
touch FAILED_SALMON_INDEX
echo "FAILED_SALMON_INDEX" >> README
exit 1
fi
elif [ $ENVIRONMENT == "LOCAL" ]
then
echo
echo "SALMON Index will be created on the local compute"
# Generate Salmon Index Files
salmon index --threads 4 --transcripts transcriptome_genome_index.fa --decoys decoys.txt --index salmon_${SALMON_TYPE}_75merPlus --type ${SALMON_TYPE} --kmerLen 31
# Error Capture
if [ "$?" = "0" ]
then
echo "PASSED_SALMON_INDEX" >> README
else
touch FAILED_SALMON_INDEX
echo "FAILED_SALMON_INDEX" >> README
exit 1
fi
else
echo "Unexpected Entry in ${WORKFLOW_NAME}_resources.ini Enviroment Variable"
touch FAILED_SALMON_INDEX
echo "FAILED_SALMON_INDEX" >> README
exit 1
fi
|
/* - Coeus web framework -------------------------
*
* Licensed under the Apache License, Version 2.0.
*
* Author: <NAME>
*/
package com.tzavellas.coeus.mvc.controller
import org.junit.Test
import org.junit.Assert._
import com.tzavellas.coeus.mvc.view.{ View, ViewName, NullView }
import com.tzavellas.coeus.core.Handler
class BeforeAfterFilterTest {
import BeforeAfterFilterTest._
@Test
def controller_interception() {
execute(new InterceptedController, "index") match {
case ViewName(name) => assertEquals("set-from-interceptor", name)
case r => fail("result must be a view name but was: '"+r+"'")
}
}
@Test
def interceptor_prevented_execution_of_hander_method() {
execute(new NoHandlerExecutionController, "index") match {
case ViewName(name) => assertEquals("intercepted", name)
case r => fail("result must be a view name but was: '"+r+"'")
}
}
@Test(expected=classOf[IllegalArgumentException])
def after_gets_called_even_when_before_returns_a_view() {
execute(new EnsureAfterCalledController, "index")
}
@Test(expected=classOf[IllegalArgumentException])
def after_gets_called_with_an_occurred_exception() {
execute(new ErroneousController, "throwException")
}
@Test
def handle_returns_the_view_of_after_filter() {
val result = execute(new ControllerWithExceptionHandler, "raiseError")
assertEquals(NullView, result)
}
private def execute(c: Controller, handlerMethod: String) = {
val controllerClass = c.getClass
val method = controllerClass.getMethod(handlerMethod)
(new Handler(c, method)).handle()
}
}
object BeforeAfterFilterTest {
class ErroneousController extends Controller with AfterFilter {
def throwException() = throw new IllegalStateException
def after(error: Option[Exception]): Option[View] = {
assertTrue(error.isDefined)
assertTrue(error.get.isInstanceOf[IllegalStateException])
throw new IllegalArgumentException
}
}
class InterceptedController extends Controller with BeforeFilter with AfterFilter {
var result: View = _
def index(): View = result
def before() {
result = ViewName("set-from-interceptor")
}
def after(e: Option[Exception]): Option[View] = None
}
class NoHandlerExecutionController extends InterceptedController {
override def before() { stopAndRender(ViewName("intercepted")) }
override def index(): View = throw new AssertionError("interceptor should have prevented the execution of this method")
}
class EnsureAfterCalledController extends NoHandlerExecutionController {
override def after(e: Option[Exception]): Option[View] = {
throw new IllegalArgumentException
}
}
class ControllerWithExceptionHandler extends Controller with AfterFilter {
def raiseError() = throw new RuntimeException
def after(error: Option[Exception]) = {
assertTrue("expected runtime exception!", error.get.isInstanceOf[RuntimeException])
Some(NullView)
}
}
} |
module.exports =
function solveSudoku(matrix) {
return solver(0, -1);
function checkX(x, y) {
let element = matrix[x][y];
for (let j = 0; j < 9; j++) {
if (matrix[x][j] == element && y != j) {
return false;
}
}
return true;
}
function checkY(x, y) {
let element = matrix[x][y];
for (let i = 0; i < 9; i++) {
if (matrix[i][y] == element && x != i) {
return false;
}
}
return true;
}
function checkSquare(x, y) {
let element = matrix[x][y];
let squareX = Math.floor(x / 3);
let squareY = Math.floor(y / 3);
for (let i = squareX * 3; i < squareX * 3 + 3; i++) {
for (let j = squareY * 3; j < squareY * 3 + 3; j++) {
if (matrix[i][j] == element && x != i && y != j) {
return false;
}
}
}
return true;
}
function solver(x, y) {
y = y + 1;
if (y > 8) {
y = 0;
x = x + 1;
if (x > 8) {
return matrix;
}
}
if (matrix[x][y] != null && matrix[x][y] != 0) {
if (!(checkX(x, y) && checkY(x, y) && checkSquare(x, y))) {
return false
}
return solver(x, y);
} else {
for (let element = 1; element < 10; element++) {
matrix[x][y] = element;
/* console.log(matrix);
console.log("***************************************");*/
if (checkX(x, y) && checkY(x, y) && checkSquare(x, y)) {
if (solver(x, y)) {
return matrix;
}
}
}
matrix[x][y] = null;
return false;
}
}
}
|
#include "KeyMap.h"
KeyMap::KeyMap()
{
}
|
def searchElement(x, matrix):
row_length = len(matrix[0])
col_length = len(matrix)
for i in range(0, col_length):
for j in range(0, row_length):
if matrix[i][j] == x:
return True
return False |
class NoMatchError(Exception):
"""Occurs when there is no match in a dataset"""
pass
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.