text
stringlengths 1
1.05M
|
|---|
#!/bin/bash
#CHANGES CONSOLE TITLE
echo -ne "\033]0;Anwar CLI IDE setup\007"
|
package com.github.chen0040.leetcode.day06.medium;
import java.util.*;
/**
* Created by xschen on 1/8/2017.
*
* summary:
* Given an array of strings, group anagrams together.
*
* link: https://leetcode.com/problems/group-anagrams/description/
*/
public class GroupAnagrams {
public class Solution {
private String sort(String str) {
Character[] chars = new Character[str.length()];
for (int i = 0; i < chars.length; i++)
chars[i] = str.charAt(i);
Arrays.sort(chars);
StringBuilder sb = new StringBuilder(chars.length);
for (char c : chars) sb.append(c);
return sb.toString();
}
public List<List<String>> groupAnagrams(String[] strs) {
Map<String, List<String>> group = new HashMap<String, List<String>>();
for(int i=0; i < strs.length; ++i) {
String str = sort(strs[i]);
if(group.containsKey(str)){
group.get(str).add(strs[i]);
} else {
List<String> g = new ArrayList<String>();
g.add(strs[i]);
group.put(str, g);
}
}
return new ArrayList<List<String>>(group.values());
}
}
}
|
<gh_stars>0
/*
* Copyright © 2019 <NAME>.
*/
package internal
import (
"fmt"
"github.com/hedzr/cmdr"
"github.com/hedzr/cmdr/plugin/daemon"
"github.com/hedzr/voxr-common/vxconf"
"github.com/hedzr/voxr-lite/internal/restful"
"github.com/sirupsen/logrus"
"golang.org/x/crypto/acme/autocert"
"net/http"
"os"
)
type (
daemonImpl struct {
appTag string
certManager *autocert.Manager
mux *http.ServeMux
doDeregister func()
doStart func()
handlers restful.Handlers
buildH2Routes func(mux *http.ServeMux) (err error)
}
)
var chStop, chDone chan struct{}
//
//
//
// NewDaemon creates an `daemon.Daemon` object
func NewDaemon(doStart func(), doDeregister func(), h restful.Handlers, buildRoutes func(mux *http.ServeMux) (err error)) daemon.Daemon {
return &daemonImpl{
doStart: doStart,
doDeregister: doDeregister,
handlers: h,
buildH2Routes: buildRoutes,
}
}
func AppStopCh() chan struct{} {
return chStop
}
func AppDoneCh() chan struct{} {
return chDone
}
//
//
//
func (d *daemonImpl) OnInstall(cxt *daemon.Context, cmd *cmdr.Command, args []string) (err error) {
logrus.Debugf("%s daemon OnInstall", cmd.GetRoot().AppName) // panic("implement me")
return
}
func (d *daemonImpl) OnUninstall(cxt *daemon.Context, cmd *cmdr.Command, args []string) (err error) {
logrus.Debugf("%s daemon OnUninstall", cmd.GetRoot().AppName) // panic("implement me")
return
}
func (d *daemonImpl) OnStatus(cxt *daemon.Context, cmd *cmdr.Command, p *os.Process) (err error) {
fmt.Printf("%s v%v\n", cmd.GetRoot().AppName, cmd.GetRoot().Version)
fmt.Printf("PID=%v\nLOG=%v\n", cxt.PidFileName, cxt.LogFileName)
// panic("implement me")
return
}
func (d *daemonImpl) OnReload() {
logrus.Debugf("%s daemon OnReload", d.appTag) // panic("implement me")
}
func (d *daemonImpl) OnStop(cmd *cmdr.Command, args []string) (err error) {
logrus.Debugf("%s daemon OnStop", cmd.GetRoot().AppName)
d.doDeregister()
return
}
func (d *daemonImpl) OnRun(cmd *cmdr.Command, args []string, stopCh, doneCh chan struct{}) (err error) {
d.appTag = cmd.GetRoot().AppName
logrus.Debugf("%s daemon OnRun, pid = %v, ppid = %v", d.appTag, os.Getpid(), os.Getppid())
port := vxconf.GetIntR("server.port", 2300)
if port == 0 {
logrus.Fatal("port not defined")
}
if vxconf.GetBoolR("server.xs-server.enabled", true) {
d.doStart()
return restful.NewXsServer(cmd, args, stopCh, doneCh, d.handlers)
}
return restful.NewH2(cmd, args, stopCh, doneCh, port, d.buildH2Routes)
}
|
/* eslint-env jest */
import { visit } from './testUtils'
describe('Letterhead page', () => {
it('loads in /letterhead', async () => {
const page = visit('/letterhead')
const text = await page.evaluate(() => document.body.textContent).end()
expect(text).not.toContain('Page not found')
})
it('has 4 letterheads', async () => {
const page = visit('/letterhead')
const letterheadCount = await page
.evaluate(() => document.querySelectorAll('.letterhead-card').length)
.end()
expect(letterheadCount).toEqual(4)
})
it('has 4 images from placeimg.com', async () => {
const page = visit('/letterhead')
const selector = '.letterhead-card img[src*="placeimg.com"]'
const letterheadCount = await page
.evaluate(sel => document.querySelectorAll(sel).length, selector)
.end()
expect(letterheadCount).toEqual(4)
})
it('has at least one letterhead with a button', async () => {
const page = visit('/letterhead')
const buttonIsVisible = await page.visible('.letterhead-card button').end()
expect(buttonIsVisible).toEqual(true)
})
it('has some text before the letterheads', async () => {
const page = visit('/letterhead')
const textIsVisible = await page
.visible('.container > .row:first-child p')
.end()
expect(textIsVisible).toEqual(true)
})
})
|
package sort;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.PriorityQueue;
import java.util.StringTokenizer;
/**
*
* @author exponential-e
* 백준 20949번: 효정과 새 모니터
*
* @see https://www.acmicpc.net/problem/20949
*
*/
public class Boj20949 {
private static final String NEW_LINE = "\n";
private static class Monitor implements Comparable<Monitor>{
int size;
int index;
public Monitor(int size, int index) {
this.size = size;
this.index = index;
}
@Override
public int compareTo(Monitor m) {
if(this.size > m.size) return -1;
else if(this.size < m.size) return 1;
else return this.index - m.index;
}
}
public static void main(String[] args) throws Exception{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
int N = Integer.parseInt(br.readLine());
PriorityQueue<Monitor> pq = new PriorityQueue<>();
for(int i = 1; i <= N; i++){
StringTokenizer st = new StringTokenizer(br.readLine());
int H = Integer.parseInt(st.nextToken());
int W = Integer.parseInt(st.nextToken());
pq.offer(new Monitor((H * H + W * W), i));
}
StringBuilder sb = new StringBuilder();
while(!pq.isEmpty()) {
sb.append(pq.poll().index).append(NEW_LINE);
}
System.out.println(sb.toString());
}
}
|
using System;
public static class EnsureDoubleExtensions
{
public static Param<double> IsLowerThan(this Param<double> param, double limit)
{
if (param.Value >= limit)
throw new ArgumentOutOfRangeException($"The value {param.Value} is not lower than {limit}.");
return param;
}
}
public class Param<T>
{
public T Value { get; }
public Param(T value)
{
Value = value;
}
}
public static class EnsureRes
{
public static string Ensure_IsNotLt = "Value {0} is not lower than {1}.";
}
public static class ExceptionFactory
{
public static Exception CreateForParamValidation<T>(Param<T> param, string message)
{
return new ArgumentException(message);
}
}
public class Program
{
public static void Main()
{
double valueToValidate = 5.0;
double limit = 10.0;
try
{
var validatedParam = new Param<double>(valueToValidate).IsLowerThan(limit);
Console.WriteLine("Validation successful. Value is lower than the limit.");
}
catch (Exception ex)
{
Console.WriteLine($"Validation failed: {ex.Message}");
}
}
}
|
<reponame>Jordan-Gilliam/SocialNews
var express = require("express");
var app = express();
// var router = express.Router();
var axios = require("axios");
var cheerio = require("cheerio");
var db = require("../models");
app.get("/", function(req, res) {
res.render("index");
});
app.get("/scrape", function(req, res) {
axios.get("https://www.reddit.com/r/learnprogramming/").then(function(response) {
var $ = cheerio.load(response.data);
$("p.title").each(function(i, element) {
var result = {};
// Add the text and href of every link, and save them as properties of the result object
result.title = $(this)
.text();
result.link = $(this)
.children("a")
.attr("href");
// Create a new Link using "result object"
db.Article
.create(result)
.then(function(dbArticle) {
res.send("Scrape Complete");
})
.catch(function(err) {
return res.json(err);
});
});
res.redirect("/articles");
});
});
// Route for getting saved Reddit links from the db
app.get("/articles", function(req, res) {
db.Article
.find({})
.then(function(dbArticle) {
res.json(dbArticle);
})
.catch(function(err) {
res.json(err);
});
});
app.post("/saved/", function(req, res) {
db.Article
.find({ saved: true })
.then(function(dbArticle) {
res.render("saved");
})
.catch(function(err) {
res.json(err);
});
});
module.exports = app;
|
<reponame>bodymovin/skia-buildbot
/**
* @module modules/debug-view-sk
* @description Container and manager of the wasm-linked main canvas for the debugger.
* Contains several CSS resizing buttons that do not alter the surface size.
*
* @evt move-cursor: Emitted when the user has moved the cursor by clicking or hovering.
*/
import { define } from 'elements-sk/define';
import { html } from 'lit-html';
import { ElementDocSk } from '../element-doc-sk/element-doc-sk';
import {
DebuggerPageSkLightDarkEventDetail,
DebuggerPageSkCursorEventDetail,
Point,
} from '../debugger-page-sk/debugger-page-sk';
export type FitStyle = 'natural' | 'fit' | 'right' | 'bottom';
export class DebugViewSk extends ElementDocSk {
private static template = (ele: DebugViewSk) =>
html`
<div class="horizontal-flex">
<button title="Original size." @click=${() => ele.fitStyle = 'natural'}>
<img src="/dist/image.png" />
</button>
<button title="Fit in page." @click=${() => ele.fitStyle = 'fit'}>
<img src="/dist/both.png" />
</button>
<button title="Fit to width." @click=${() => ele.fitStyle = 'right'}>
<img src="/dist/right.png" />
</button>
<button title="Fit to height." @click=${() => ele.fitStyle = 'bottom'}>
<img src="/dist/bottom.png" />
</button>
</div>
<div id="backdrop" class="${ele._backdropStyle} grid">
${ ele._renderCanvas
? html`<canvas id="main-canvas" class=${ele._fitStyle}
width=${ele._width} height=${ele._height}></canvas>`
: '' }
<canvas id="crosshair-canvas" class=${ele._fitStyle}
width=${ele._width} height=${ele._height}
@click=${ele._canvasClicked}
@mousemove=${ele._canvasMouseMove}></canvas>
</div>`;
// the native width and height of the main canvas, before css is applied
private _width: number = 400;
private _height: number = 400;
// the css class used to size the canvas.
private _fitStyle: FitStyle = 'fit';
private _backdropStyle = 'light-checkerboard';
private _crossHairActive = false;
private _renderCanvas = true;
get crosshairActive(): boolean {
return this._crossHairActive;
}
constructor() {
super(DebugViewSk.template);
}
connectedCallback() {
super.connectedCallback();
this._render();
this.addDocumentEventListener('render-cursor', (e) => {
const detail = (e as CustomEvent<DebuggerPageSkCursorEventDetail>).detail;
if (!this._crossHairActive || detail.onlyData) {
return;
}
this._drawCrossHairAt(detail.position);
});
this.addDocumentEventListener('light-dark', (e) => {
this._backdropStyle = (e as CustomEvent<DebuggerPageSkLightDarkEventDetail>).detail.mode;
this._render();
});
}
// Pass one of the CSS classes for sizing the debug view canvas.
// It doesn't change the pixel size of the SkSurface, use resize for that.
set fitStyle(fs: FitStyle) {
this._fitStyle = fs;
this._render();
}
get canvas(): HTMLCanvasElement {
this._render();
return this.querySelector<HTMLCanvasElement>('#main-canvas')!
}
// Replace the main canvas element, changing its native size
resize(width = 400, height = 400): HTMLCanvasElement {
this._width = width;
this._height = height;
this._renderCanvas = false;
this._render(); // delete it to clear it's rendering context.
this._renderCanvas = true;
this._render(); // template makes a fresh one.
return this.querySelector('canvas')!;
}
private _visibleSize(): Point {
const element = this.querySelector<HTMLCanvasElement>('#main-canvas')!;
var strW = window.getComputedStyle(element, null).width;
var strH = window.getComputedStyle(element, null).height;
// Trim 'px' off the end of the style string and convert to a number.
const visibleWidth = parseFloat(strW.substring(0, strW.length-2));
const visibleHeight = parseFloat(strH.substring(0, strH.length-2));
return [visibleWidth, visibleHeight];
}
private _mouseOffsetToCanvasPoint(e: MouseEvent): Point {
// The element changes size occasionally, compute visible size just before use.
const size = this._visibleSize();
return [
Math.round(e.offsetX / size[0] * this._width),
Math.round(e.offsetY / size[1] * this._height),
];
}
private _sendCursorMove(p: Point) {
this.dispatchEvent(
new CustomEvent<DebuggerPageSkCursorEventDetail>(
'move-cursor', {
detail: {position: p, onlyData: false},
bubbles: true,
}));
}
private _drawCrossHairAt(p: Point) {
const chCanvas = this.querySelector<HTMLCanvasElement>('#crosshair-canvas')!;
const chx = chCanvas.getContext('2d')!;
chx.clearRect(0, 0, chCanvas.width, chCanvas.height);
chx.lineWidth = this._width / this._visibleSize()[0];
chx.strokeStyle = '#F00';
chx.beginPath();
chx.moveTo(0, p[1]-0.5);
chx.lineTo(chCanvas.width+1, p[1]-0.5);
chx.moveTo(p[0]-0.5, 0);
chx.lineTo(p[0]-0.5, chCanvas.height+1);
chx.stroke();
}
private _canvasClicked(e: MouseEvent) {
if (e.offsetX < 0) { return; } // border
const coords = this._mouseOffsetToCanvasPoint(e);
if (this._crossHairActive) {
this._crossHairActive = false;
this._drawCrossHairAt([-5, -5]); // lazy clear
this._sendCursorMove(coords);
} else {
this._crossHairActive = true;
this._sendCursorMove(coords);
}
}
private _canvasMouseMove(e: MouseEvent) {
if (e.offsetX < 0) { return; } // border
if (this._crossHairActive) { return; }
this._sendCursorMove(this._mouseOffsetToCanvasPoint(e));
}
};
define('debug-view-sk', DebugViewSk);
|
<filename>librbr/include/utilities/a_star.h
/**
* The MIT License (MIT)
*
* Copyright (c) 2014 <NAME>, University of Massachusetts
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
#ifndef A_STAR_H
#define A_STAR_H
#include <vector>
#include <map>
/**
* An implementation of the A* search algorithm.
*/
template <typename T>
class AStar {
public:
/**
* The default constructor for the AStar class. It requires the specification of all
* relevant variables.
* @param heuristic The heuristic function estimating the distance from a node to the goal.
* @param cost The cost from the immediate transition from one node to another.
* @param successors Generate the list of successors nodes.
*/
AStar(double (*heuristic)(T node, T goal), double (*cost)(T n1, T n2), std::vector<T> (*successors)(T node));
/**
* The deconstructor for the AStar class.
*/
virtual ~AStar();
/**
* Solve the search problem given a starting node.
* @param start The initial node.
* @param goal The goal node.
* @throw UtilityException Either there is no path from the start to goal, or path reconstruction failed.
*/
void solve(T start, T goal);
/**
* Get the solution path which was computed from the last call of solve.
* @return The solution path from the start node to the goal node.
*/
const std::vector<T> &get_path();
/**
* Get the number of nodes expanded from the last call of solve.
* @return The number of nodes expanded.
*/
int get_num_nodes_expanded();
private:
/**
* Reconstruct the path given a collection of maps nodes to parents and store it internally.
* @param start The initial node.
* @param goal The goal node.
* @param trace The trace of all expanded nodes to their parents.
* @throw UtilityException The trace of the route was corrupt.
*/
void reconstruct_path(T start, T goal, std::map<T, T> &trace);
/**
* The heuristic function estimating the distance from a node to the goal.
*/
double (*heuristic)(T node, T goal);
/**
* The cost from the immediate transition from one node to another.
*/
double (*cost)(T n1, T n2);
/**
* Generate the list of successors nodes.
*/
std::vector<T> (*successors)(T node);
/**
* The optimal path from the last call of solve.
*/
std::vector<T> path;
/**
* The number of nodes expanded from the last call of solve.
*/
unsigned int numNodesExpanded;
};
#include "../../src/utilities/a_star.tpp"
#endif // A_STAR_H
|
package de.wwu.music2rdf.core;
public class Staff {
private String id;
private ScorePart part;
public Staff() {
super();
this.part = new ScorePart();
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public ScorePart getPart() {
return part;
}
}
|
fn convert_to_slugs(weight: f64) -> Option<f64> {
if weight < 0.0 {
return None;
}
Some(weight / 32.174)
}
fn main() {
let weight_in_pounds = 100.0;
match convert_to_slugs(weight_in_pounds) {
Some(result) => println!("Equivalent weight in slugs: {:.3}", result),
None => println!("Invalid input: Weight cannot be negative"),
}
}
|
<filename>sgalgorithm/permutation_combination.go
package sgalgorithm
//resultList must be a slice
//从srcList中,选出num个做全排列
func GenPermutation(srcList []string, num int, resultList *[]string) {
if num <= 0 {
return
}
if num > len(srcList) {
num = len(srcList)
}
flags := make([]int, num, num*2)
for _, n := range flags {
flags[n] = 0
}
genPermutationItem(srcList, flags, len(srcList), resultList)
return
}
func genPermutationItem(srcList []string, flags []int, srcLength int, resultList *[]string) {
str := ""
for _, n := range flags {
str += srcList[n]
}
*resultList = append(*resultList, str)
if flags[len(flags)-1] < (srcLength - 1) { //变动最后一位
flags[len(flags)-1]++
genPermutationItem(srcList, flags, srcLength, resultList)
} else { //最后一位不能变动时,往前查找可以变动的位置
flags[len(flags)-1] = 0
if len(flags)-2 < 0 {
return
}
for i := len(flags) - 2; i >= 0; i-- {
if flags[i] < srcLength-1 {
flags[i]++
genPermutationItem(srcList, flags, srcLength, resultList)
return
} else {
flags[i] = 0
}
}
return
}
}
|
npx truffle migrate --f 3 --to 3 --network rinkeby
npx truffle migrate --f 4 --to 4 --network skaleSide
KEY=$1 node ./scripts/depositErc20FromMain.js
npx truffle migrate --f 5 --to 5 --network skaleSide
|
<filename>routes/wechat.js
var express = require('express');
var wechat = require('wechat');
var router = express.Router();
var config = {
"appID": "wx9964adbcb6c21bd9",
"appsecret": "91687f6314257936c98269113e3ed2df",
"token": "950815x"
}
router.use(express.query());
router.use('/', wechat(config, function(req, res, next) {
console.log(req.weixin);
var message = req.weixin;
//文本
if (message.Content === '爱') {
res.reply('我最爱我家晶晶~~~');
}else{
res.reply(message.Content);
}
}));
module.exports = router;
|
package seedu.address.logic.parser;
import static seedu.address.commons.core.Messages.MESSAGE_INVALID_COMMAND_FORMAT;
import static seedu.address.logic.commands.CommandTestUtil.ADDRESS_DESC_AMY;
import static seedu.address.logic.commands.CommandTestUtil.ADDRESS_DESC_BOB;
import static seedu.address.logic.commands.CommandTestUtil.BIRTHDAY_DESC_AMY;
import static seedu.address.logic.commands.CommandTestUtil.BIRTHDAY_DESC_BOB;
import static seedu.address.logic.commands.CommandTestUtil.EMAIL_DESC_AMY;
import static seedu.address.logic.commands.CommandTestUtil.EMAIL_DESC_BOB;
import static seedu.address.logic.commands.CommandTestUtil.GENDER_DESC_AMY;
import static seedu.address.logic.commands.CommandTestUtil.GENDER_DESC_BOB;
import static seedu.address.logic.commands.CommandTestUtil.INVALID_ADDRESS_DESC;
import static seedu.address.logic.commands.CommandTestUtil.INVALID_BIRTHDAY_DESC;
import static seedu.address.logic.commands.CommandTestUtil.INVALID_EMAIL_DESC;
import static seedu.address.logic.commands.CommandTestUtil.INVALID_GENDER_DESC;
import static seedu.address.logic.commands.CommandTestUtil.INVALID_PHONE_DESC;
import static seedu.address.logic.commands.CommandTestUtil.INVALID_TAG_DESC;
import static seedu.address.logic.commands.CommandTestUtil.INVALID_VOLUNTEER_NAME_DESC;
import static seedu.address.logic.commands.CommandTestUtil.NAME_DESC_AMY;
import static seedu.address.logic.commands.CommandTestUtil.NAME_DESC_BOB;
import static seedu.address.logic.commands.CommandTestUtil.PHONE_DESC_AMY;
import static seedu.address.logic.commands.CommandTestUtil.PHONE_DESC_BOB;
import static seedu.address.logic.commands.CommandTestUtil.PREAMBLE_NON_EMPTY;
import static seedu.address.logic.commands.CommandTestUtil.PREAMBLE_WHITESPACE;
import static seedu.address.logic.commands.CommandTestUtil.TAG_DESC_DRIVER;
import static seedu.address.logic.commands.CommandTestUtil.TAG_DESC_STUDENT;
import static seedu.address.logic.commands.CommandTestUtil.VALID_ADDRESS_BOB;
import static seedu.address.logic.commands.CommandTestUtil.VALID_BIRTHDAY_BOB;
import static seedu.address.logic.commands.CommandTestUtil.VALID_EMAIL_BOB;
import static seedu.address.logic.commands.CommandTestUtil.VALID_GENDER_BOB;
import static seedu.address.logic.commands.CommandTestUtil.VALID_NAME_BOB;
import static seedu.address.logic.commands.CommandTestUtil.VALID_PHONE_BOB;
import static seedu.address.logic.commands.CommandTestUtil.VALID_TAG_DRIVER;
import static seedu.address.logic.commands.CommandTestUtil.VALID_TAG_STUDENT;
import static seedu.address.logic.parser.CommandParserTestUtil.assertParseFailure;
import static seedu.address.logic.parser.CommandParserTestUtil.assertParseSuccess;
import static seedu.address.testutil.TypicalVolunteers.AMY;
import static seedu.address.testutil.TypicalVolunteers.BOB;
import org.junit.Test;
import seedu.address.logic.commands.AddCommand;
import seedu.address.model.tag.Tag;
import seedu.address.model.volunteer.Address;
import seedu.address.model.volunteer.Birthday;
import seedu.address.model.volunteer.Email;
import seedu.address.model.volunteer.Gender;
import seedu.address.model.volunteer.Name;
import seedu.address.model.volunteer.Phone;
import seedu.address.model.volunteer.Volunteer;
import seedu.address.testutil.VolunteerBuilder;
public class AddCommandParserTest {
private AddCommandParser parser = new AddCommandParser();
@Test
public void parse_allFieldsPresent_success() {
Volunteer expectedVolunteer = new VolunteerBuilder(BOB).withTags(VALID_TAG_STUDENT).build();
// whitespace only preamble
assertParseSuccess(parser, PREAMBLE_WHITESPACE + NAME_DESC_BOB
+ GENDER_DESC_BOB + BIRTHDAY_DESC_BOB + PHONE_DESC_BOB
+ EMAIL_DESC_BOB + ADDRESS_DESC_BOB + TAG_DESC_STUDENT,
new AddCommand(expectedVolunteer));
// multiple names - last name accepted
assertParseSuccess(parser, NAME_DESC_AMY + NAME_DESC_BOB
+ GENDER_DESC_BOB + BIRTHDAY_DESC_BOB + PHONE_DESC_BOB
+ EMAIL_DESC_BOB + ADDRESS_DESC_BOB + TAG_DESC_STUDENT,
new AddCommand(expectedVolunteer));
// multiple genders - last gender accepted
assertParseSuccess(parser, NAME_DESC_BOB + GENDER_DESC_AMY + GENDER_DESC_BOB
+ BIRTHDAY_DESC_BOB + PHONE_DESC_BOB + EMAIL_DESC_BOB
+ ADDRESS_DESC_BOB + TAG_DESC_STUDENT, new AddCommand(expectedVolunteer));
// multiple birthdays - last birthday accepted
assertParseSuccess(parser, NAME_DESC_BOB + GENDER_DESC_BOB + BIRTHDAY_DESC_AMY
+ BIRTHDAY_DESC_BOB + PHONE_DESC_BOB + EMAIL_DESC_BOB
+ ADDRESS_DESC_BOB + TAG_DESC_STUDENT, new AddCommand(expectedVolunteer));
// multiple phones - last phone accepted
assertParseSuccess(parser, NAME_DESC_BOB + GENDER_DESC_BOB + BIRTHDAY_DESC_BOB
+ PHONE_DESC_AMY + PHONE_DESC_BOB + EMAIL_DESC_BOB
+ ADDRESS_DESC_BOB + TAG_DESC_STUDENT, new AddCommand(expectedVolunteer));
// multiple emails - last email accepted
assertParseSuccess(parser, NAME_DESC_BOB + GENDER_DESC_BOB + BIRTHDAY_DESC_BOB
+ PHONE_DESC_BOB + EMAIL_DESC_AMY + EMAIL_DESC_BOB
+ ADDRESS_DESC_BOB + TAG_DESC_STUDENT, new AddCommand(expectedVolunteer));
// multiple addresses - last address accepted
assertParseSuccess(parser, NAME_DESC_BOB + GENDER_DESC_BOB + BIRTHDAY_DESC_BOB
+ PHONE_DESC_BOB + EMAIL_DESC_BOB + ADDRESS_DESC_AMY
+ ADDRESS_DESC_BOB + TAG_DESC_STUDENT, new AddCommand(expectedVolunteer));
// multiple tags - all accepted
Volunteer expectedVolunteerMultipleTags = new VolunteerBuilder(BOB)
.withTags(VALID_TAG_STUDENT, VALID_TAG_DRIVER).build();
assertParseSuccess(parser, NAME_DESC_BOB + GENDER_DESC_BOB + BIRTHDAY_DESC_BOB
+ PHONE_DESC_BOB + EMAIL_DESC_BOB + ADDRESS_DESC_BOB
+ TAG_DESC_DRIVER + TAG_DESC_STUDENT,
new AddCommand(expectedVolunteerMultipleTags));
}
@Test
public void parse_optionalFieldsMissing_success() {
// zero tags
Volunteer expectedVolunteer = new VolunteerBuilder(AMY).withTags().build();
assertParseSuccess(parser, NAME_DESC_AMY + GENDER_DESC_AMY + BIRTHDAY_DESC_AMY
+ PHONE_DESC_AMY + EMAIL_DESC_AMY + ADDRESS_DESC_AMY,
new AddCommand(expectedVolunteer));
}
@Test
public void parse_compulsoryFieldMissing_failure() {
String expectedMessage = String.format(MESSAGE_INVALID_COMMAND_FORMAT, AddCommand.MESSAGE_USAGE);
// missing name prefix
assertParseFailure(parser, VALID_NAME_BOB + GENDER_DESC_BOB + BIRTHDAY_DESC_BOB
+ PHONE_DESC_BOB + EMAIL_DESC_BOB + ADDRESS_DESC_BOB,
expectedMessage);
// missing gender prefix
assertParseFailure(parser, NAME_DESC_BOB + VALID_GENDER_BOB + BIRTHDAY_DESC_BOB
+ PHONE_DESC_BOB + EMAIL_DESC_BOB + ADDRESS_DESC_BOB,
expectedMessage);
// missing birthday prefix
assertParseFailure(parser, NAME_DESC_BOB + GENDER_DESC_BOB + VALID_BIRTHDAY_BOB
+ PHONE_DESC_BOB + EMAIL_DESC_BOB + ADDRESS_DESC_BOB,
expectedMessage);
// missing phone prefix
assertParseFailure(parser, NAME_DESC_BOB + GENDER_DESC_BOB + BIRTHDAY_DESC_BOB
+ VALID_PHONE_BOB + EMAIL_DESC_BOB + ADDRESS_DESC_BOB,
expectedMessage);
// missing email prefix
assertParseFailure(parser, NAME_DESC_BOB + GENDER_DESC_BOB + BIRTHDAY_DESC_BOB
+ PHONE_DESC_BOB + VALID_EMAIL_BOB + ADDRESS_DESC_BOB,
expectedMessage);
// missing address prefix
assertParseFailure(parser, NAME_DESC_BOB + GENDER_DESC_BOB + BIRTHDAY_DESC_BOB
+ PHONE_DESC_BOB + EMAIL_DESC_BOB + VALID_ADDRESS_BOB,
expectedMessage);
// all prefixes missing
assertParseFailure(parser, VALID_NAME_BOB + VALID_GENDER_BOB + VALID_BIRTHDAY_BOB
+ VALID_PHONE_BOB + VALID_EMAIL_BOB + VALID_ADDRESS_BOB,
expectedMessage);
}
@Test
public void parse_invalidValue_failure() {
// invalid name
assertParseFailure(parser, INVALID_VOLUNTEER_NAME_DESC + GENDER_DESC_BOB + BIRTHDAY_DESC_BOB
+ PHONE_DESC_BOB + EMAIL_DESC_BOB + ADDRESS_DESC_BOB
+ TAG_DESC_DRIVER + TAG_DESC_STUDENT, Name.MESSAGE_NAME_CONSTRAINTS);
// invalid gender
assertParseFailure(parser, NAME_DESC_BOB + INVALID_GENDER_DESC + BIRTHDAY_DESC_BOB
+ PHONE_DESC_BOB + EMAIL_DESC_BOB + ADDRESS_DESC_BOB
+ TAG_DESC_DRIVER + TAG_DESC_STUDENT, Gender.MESSAGE_GENDER_CONSTRAINTS);
// invalid birthday
assertParseFailure(parser, NAME_DESC_BOB + GENDER_DESC_BOB + INVALID_BIRTHDAY_DESC
+ PHONE_DESC_BOB + EMAIL_DESC_BOB + ADDRESS_DESC_BOB
+ TAG_DESC_DRIVER + TAG_DESC_STUDENT, Birthday.MESSAGE_BIRTHDAY_CONSTRAINTS);
// invalid phone
assertParseFailure(parser, NAME_DESC_BOB + GENDER_DESC_BOB + BIRTHDAY_DESC_BOB
+ INVALID_PHONE_DESC + EMAIL_DESC_BOB + ADDRESS_DESC_BOB
+ TAG_DESC_DRIVER + TAG_DESC_STUDENT, Phone.MESSAGE_PHONE_CONSTRAINTS);
// invalid email
assertParseFailure(parser, NAME_DESC_BOB + GENDER_DESC_BOB + BIRTHDAY_DESC_BOB
+ PHONE_DESC_BOB + INVALID_EMAIL_DESC + ADDRESS_DESC_BOB
+ TAG_DESC_DRIVER + TAG_DESC_STUDENT, Email.MESSAGE_EMAIL_CONSTRAINTS);
// invalid address
assertParseFailure(parser, NAME_DESC_BOB + GENDER_DESC_BOB + BIRTHDAY_DESC_BOB
+ PHONE_DESC_BOB + EMAIL_DESC_BOB + INVALID_ADDRESS_DESC
+ TAG_DESC_DRIVER + TAG_DESC_STUDENT, Address.MESSAGE_ADDRESS_CONSTRAINTS);
// invalid tag
assertParseFailure(parser, NAME_DESC_BOB + GENDER_DESC_BOB + BIRTHDAY_DESC_BOB
+ PHONE_DESC_BOB + EMAIL_DESC_BOB + ADDRESS_DESC_BOB
+ INVALID_TAG_DESC + VALID_TAG_STUDENT, Tag.MESSAGE_TAG_CONSTRAINTS);
// two invalid values, only first invalid value reported
assertParseFailure(parser, INVALID_VOLUNTEER_NAME_DESC + GENDER_DESC_BOB + BIRTHDAY_DESC_BOB
+ PHONE_DESC_BOB + EMAIL_DESC_BOB + INVALID_ADDRESS_DESC,
Name.MESSAGE_NAME_CONSTRAINTS);
// non-empty preamble
assertParseFailure(parser, PREAMBLE_NON_EMPTY + NAME_DESC_BOB + GENDER_DESC_BOB
+ BIRTHDAY_DESC_BOB + PHONE_DESC_BOB + EMAIL_DESC_BOB
+ ADDRESS_DESC_BOB + TAG_DESC_DRIVER + TAG_DESC_STUDENT,
String.format(MESSAGE_INVALID_COMMAND_FORMAT, AddCommand.MESSAGE_USAGE));
}
}
|
import React, { useEffect, useState } from "react";
import "./style.css";
import GoogleMapReact from "google-map-react";
import MapPinIcon from "../Icons/MapPinIcon";
import MapPinNewIcon from "../Icons/MapPinNewIcon";
import XIcon from "../Icons/XIcon";
import MriIcon from "../Icons/MriIcon";
import HospitalIcon from "../Icons/HospitalIcon";
const distanceToMouse = (pt, mp) => {
if (pt && mp) {
return Math.sqrt(
(pt.x - mp.x) * (pt.x - mp.x) + (pt.y - mp.y) * (pt.y - mp.y)
);
}
};
const places = [
// serbia
{
country: 'Srbija', title: "Beograd", lat: 44.8125, lng: 20.4612, projects: [
{ facility: 'Dr Konjović', mri: 'Philips' },
{ facility: 'Bolnica "Sveti Sava"', mri: 'GE' },
{ facility: 'Univerzitetska dečija bolnica', mri: 'Philips' },
{ facility: 'KBC Zemun', mri: 'Canon' },
{ facility: 'Klinika za digestivne bolesti KCS', mri: 'Philips' },
{ facility: 'Institut za neurologiju', mri: 'Philips' },
]
},
{
country: 'Srbija', title: "Čačak", lat: 43.8914, lng: 20.3506, projects: [
{ facility: 'Hipokrat MR', mri: 'Philips' }
]
},
{
country: 'Srbija', title: "Sremska Mitrovica", lat: 44.9798, lng: 19.6102, projects: [
{ facility: 'Sirmium Medic', mri: 'Philips' }
]
},
{
country: 'Srbija', title: "Užice", lat: 43.8556, lng: 19.8425, projects: [
{ facility: 'Zdravstveni Centar Užice', mri: 'GE' }
]
},
{
country: 'Srbija', title: "<NAME>", lat: 45.24704, lng: 19.84669, projects: [
{ facility: 'Klinički centar Vojvodine', mri: 'GE' },
{ facility: 'Zdravlje plus', mri: 'Philips' },
]
},
{
country: 'Srbija', title: "Sombor", lat: 45.7733, lng: 19.1151, projects: [
{ facility: 'Poliklinika Consilium', mri: 'Philips' }
]
},
// bosnia
{
id: 3, title: "Zenica", lat: 44.2034, lng: 17.9077, projects: [
{ facility: 'Poliklinika "DR Strika"', mri: 'GE' }
]
},
{
id: 3, title: "Bijeljina", lat: 44.7570, lng: 19.2150, projects: [
{ facility: 'Medik - T', mri: 'Philips' }
]
},
{
id: 3, title: "Doboj", lat: 44.7349, lng: 18.0843, projects: [
{ facility: 'Dijagnostički centar Dr Brkic', mri: 'Philips' }
]
},
{
id: 3, title: "Brčko", lat: 44.8727, lng: 18.8106, projects: [
{ facility: 'Spec. Ordinacija ALFA', mri: 'Philips' }
]
},
{
id: 3, title: "Zvornik", lat: 44.3865, lng: 19.1048, projects: [
{ facility: 'Opšta bolnica Zvornik', mri: 'GE' }
]
},
// macedonia
{
id: 3, title: "Tetovo", lat: 42.0069, lng: 20.9715, projects: [
{ facility: 'Klinička bolnica Tetovo', mri: 'GE' }
]
},
{
title: "Skopje", lat: 41.9981, lng: 21.4254, projects: [
{ facility: 'Klinička bolnica "Sistina"', mri: 'GE' },
{ facility: 'Gradska bolnica Skopje', mri: 'GE' },
{ facility: 'Klinička bolnica "Sistina"', mri: 'GE 3T' },
]
},
{
title: "Štip", lat: 41.7464, lng: 22.1997, projects: [
{ facility: 'Klinička bolnica Štip', mri: 'GE' },
]
},
{
title: "Petrovec", lat: 41.9402, lng: 21.6094, projects: [
{ facility: 'Euroitalia', mri: 'GE' }
]
},
// moldavia
{
id: 3, title: "Balti", lat: 47.7540, lng: 27.9184, projects: [
{ facility: 'Incomed', mri: 'Philips' }
]
},
// poland
{
id: 3, title: "Bielsko-biala", lat: 49.8224, lng: 19.0584, projects: [
{ facility: 'Klinika Św. Łukasza', mri: 'Philips' }
]
},
// italy
{ id: 3, title: "Trapani", lat: 38.0174, lng: 12.5365, projects: [
{facility: 'Multimedica Trapanese', mri: 'Neusoft'}
] },
// india
{ title: "Karur", lng: 78.0766, lat: 10.9601, projects: [
{facility: 'Cura Healthcare', mri: 'Philips'}
] },
{ title: "Jaipur", lng: 75.7873, lat: 26.9124, projects: [
{facility: 'Cura Healthcare', mri: 'Philips'}
] },
// belarus
{ id: 2, title: "Minsk", lat: 53.9006, lng: 27.5590, projects: [
{facility: 'Avicenna Medical', mri: 'Philips'},
] },
// montenegro
{ id: 3, title: "Berane", lat: 42.8379, lng: 19.8604, projects: [
{facility: 'Poliklinika Stojanović', mri: 'GE'},
] },
{ id: 3, title: "Podgorica", lat: 42.4304, lng: 19.2594, projects: [
{facility: 'Hotel Lovćen', mri: 'GE'},
] },
// bulgaria
{ id: 3, title: "Sofija", lat: 42.6977, lng: 23.3219, projects: [
{facility: 'Neo Clinic', mri: 'GE'},
// OVO PROVERI I PROMENI
] },
// germany //saarbrucken t
{ id: 3, title: "Saarbrücken", lat: 49.2402, lng: 6.9969, projects: [
{facility: 'Joint Orthopedic Clinic', mri: 'Philips'},
{facility: 'Joint Orthopedic Clinic (zamena magneta)', mri: 'Philips'},
] },
{ id: 3, title: "Schweinfurt", lat: 50.0492, lng: 10.2194, projects: [
{facility: 'Leopoldina', mri: 'Philips'},
] },
{ id: 3, title: "Frankfurt", lat: 50.1109, lng: 8.6821, projects: [
{facility: 'Main Clinic', mri: 'Philips'},
] },
{ id: 3, title: "Föhren", lat: 49.8588, lng: 6.7657, projects: [
{facility: 'Promed', mri: 'Philips 3T'},
{facility: 'InHealth mobile magnet', mri: 'Philips'},
{facility: 'Calumet mobile magnet', mri: 'Philips'},
{facility: 'Smith mobile magnet', mri: 'Philips'},
{facility: 'Smith relocatable magnet', mri: 'Philips'},
{facility: 'Promed', mri: 'Philips'},
] },
{ id: 3, title: "<NAME>", lat: 49.7858, lng: 7.6515, projects: [
{facility: 'Sanomed', mri: 'Philips'},
] },
{ id: 3, title: "Mannheim", lat: 49.4875, lng: 8.4660, projects: [
{facility: 'Praxis Dr J.J. Kirsch & Kollegen', mri: 'Philips'}
] },
];
// tip posla: konstrukcija kabine ```` servisiranje/modifikacija
const SimpleMap = () => {
const [close, setClose] = useState(0);
const InfoWindow = ({ title, active, projects }) => {
if (active) {
return (
<div className={`pin-info-wrapper ${projects.length > 1 ? 'larger' : ''}`}>
<div className="pin-info-header">
<div style={{ width: '18px' }}></div>
<div style={{ display: 'flex', alignItems: 'center', justifyContent: 'center' }}>
<MapPinIcon fill='#444' marginBottom={0} width={15} />
<p>{title}</p>
</div>
<XIcon width={22} fill='#666' />
</div>
<div className="pin-info-inner-wrapper">
{projects?.map((e, index) => (
<>
{projects.length > 1 && <div className="projects-counter">{index + 1}. Projekat:</div>}
<div className="pin-info-body">
<div className="pin-info-row">
<div className="pin-info-desc">
<p>MRI:</p>
</div>
<div className="pin-info-label">
<MriIcon fill='#444' width={22} />
<p>{e.mri}</p>
</div>
</div>
<div className="pin-info-row">
<div className="pin-info-desc">
<p>Ustanova:</p>
</div>
<div className="pin-info-label">
<HospitalIcon fill='#444' width={22} />
<p>{e.facility}</p>
</div>
</div>
</div>
</>
))}
</div>
</div>
);
}
else {
return null;
}
}
const Marker = ({ title, setCurState, lat, lng, projects }) => {
const handleClick = () => {
setCurState({
lat: lat,
lng: lng,
title: title,
active: true,
projects: projects,
})
}
return (
<div onClick={handleClick} className="marker-wrapper">
<MapPinNewIcon width={18} />
</div>
);
};
const [center, setCenter] = useState([46.506, 24.169949]);
const [windowWidth, setWindowWidth] = useState(window.innerWidth);
const [curState, setCurState] = useState({
lat: 37.506,
lng: 20.169949,
title: '',
active: false,
projects: null,
});
useEffect(() => {
setTimeout(() => {
if (windowWidth < 1100) {
setCurState({
lat: 45.506,
lng: 20.169949,
title: '',
active: false,
projects: null,
})
}
}, 500)
}, []);
useEffect(() => {
setCenter([curState.lat, curState.lng])
}, [curState])
let infoBox = <InfoWindow
lat={curState.lat}
lng={curState.lng}
title={curState.title}
active={curState.active}
projects={curState.projects}
/>
return (
<div className="map">
<GoogleMapReact
onClick={() => setCurState({ ...curState, active: false })}
bootstrapURLKeys={{
key: '<KEY>',
language: "en",
region: "US"
}}
defaultCenter={[46.506, 24.169949]}
center={center}
defaultZoom={3.8}
distanceToMouse={distanceToMouse}
>
{places.map(({ lat, lng, title, projects }) => {
return (
<Marker
close={close}
lat={lat}
lng={lng}
title={title}
setCurState={setCurState}
projects={projects}
/>
);
})}
{infoBox}
</GoogleMapReact>
</div>
);
}
export default SimpleMap;
|
<filename>internal/operator/networking/kinds/orb/labels.go
package orb
import "github.com/caos/orbos/pkg/labels"
func mustDatabaseOperator(binaryVersion *string) *labels.Operator {
version := "unknown"
if binaryVersion != nil {
version = *binaryVersion
}
return labels.MustForOperator("ORBOS", "networking.caos.ch", version)
}
|
#!/bin/bash
docker stop demo-docker-domino
|
<reponame>zhouxiang93123/openair-cn<gh_stars>1-10
/*
* Copyright (c) 2015, EURECOM (www.eurecom.fr)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are those
* of the authors and should not be interpreted as representing official policies,
* either expressed or implied, of the FreeBSD Project.
*/
#include <stdio.h>
#include "buffers.h"
#include "ui_main_screen.h"
#include "ui_interface.h"
#ifndef TYPES_H_
#define TYPES_H_
/* Activate to display the type at the beginning of the line (debug option) */
#define ENABLE_DISPLAY_TYPE 0
/* Activate to display the parse information before processing each item (debug option) */
#define ENABLE_DISPLAY_PARSE_INFO 0
/* Activate to show braces, in increase the number of displayed lines (formating option)*/
#define ENABLE_DISPLAY_BRACE 1
#if (ENABLE_DISPLAY_TYPE != 0)
# define DISPLAY_TYPE(tYPE) ui_set_signal_text_cb(user_data, tYPE" ", strlen(tYPE) + 1);
#else
# define DISPLAY_TYPE(tYPE)
#endif
#define DISPLAY_TAB_SIZE (2)
#if (ENABLE_DISPLAY_PARSE_INFO != 0)
# define DISPLAY_PARSE_INFO(tYPE, nAME, oFFSET, pARENToFFSET) \
{ \
char buf[200]; \
sprintf(buf, "/* %s \"%s\" %d %d */\n", tYPE, nAME, oFFSET, pARENToFFSET); \
ui_set_signal_text_cb(user_data, buf, strlen(buf)); \
}
#else
# define DISPLAY_PARSE_INFO(tYPE, nAME, oFFSET, pARENToFFSET)
#endif
#if (ENABLE_DISPLAY_BRACE != 0)
# define DISPLAY_BRACE(cODE) if (ui_main_data.display_brace) {cODE}
#else
# define DISPLAY_BRACE(cODE)
#endif
enum type_e {
TYPE_ENUMERATION,
TYPE_ENUMERATION_VALUE,
TYPE_STRUCT,
TYPE_UNION,
TYPE_FUNDAMENTAL,
TYPE_TYPEDEF,
TYPE_ARRAY,
TYPE_REFERENCE,
TYPE_FIELD,
TYPE_FUNCTION,
TYPE_ARGUMENT,
TYPE_POINTER,
TYPE_FILE,
};
/* Forward declarations */
struct types_s;
typedef int (*type_hr_display_t)(struct types_s *type, int indent);
typedef int (*type_ui_display_t)(struct types_s *type, int indent);
typedef int (*type_file_print_t)(struct types_s *type, int indent, FILE *file);
/**
* type_dissect_from_buffer_t
* @param type The current type
* @param ui_set_signal_text_cb GUI display function
* @param user_data Transparent data to pass to the GUI display function
* @param buffer The buffer containing data to dissect
* @param offset offset of field from the beginning of the parent
* @param parent_offset offset of the parent from begining
**/
typedef int (*type_dissect_from_buffer_t)(
struct types_s *type, ui_set_signal_text_cb_t ui_set_signal_text_cb, gpointer user_data,
buffer_t *buffer, uint32_t offset, uint32_t parent_offset, int indent, gboolean new_line);
typedef struct types_s {
/* The type of the current description */
enum type_e type;
/* Printable name for the current type */
char *name;
int type_xml;
int size;
int align;
int bits;
/* Used only for arrays */
int min;
int max;
int context;
/* Init value for enumerations */
int init_value;
int incomplete;
/* Id of the type as defined in XML file */
int id;
int artificial;
char *mangled;
char *demangled;
/* List of members in constructed types */
char *members;
/* The file containing the definition */
char *file;
/* Line number of the current definition */
int line;
/* offset of the field in the parent type
* -1 means no parent
*/
int offset;
struct types_s *previous;
struct types_s *next;
struct types_s *parent;
struct types_s *child;
struct types_s *file_ref;
/* Reference to the head */
struct types_s *head;
/* For structures or union */
int nb_members;
struct types_s **members_child;
/* Some procedures to display the type on terminal */
type_hr_display_t type_hr_display;
/* Some procedures to display the type on UI */
type_ui_display_t type_ui_display;
/* Procedure to display the type to a file */
type_file_print_t type_file_print;
/* Dissect the type */
type_dissect_from_buffer_t type_dissect_from_buffer;
} types_t;
types_t *type_new(enum type_e type);
int types_insert_tail(types_t **head, types_t *to_insert);
void types_hr_display(types_t *head);
#define INDENTED(fILE, x, y) \
do { \
int indentation = x; \
while(indentation--) fprintf(fILE, " "); \
y; \
} while(0)
#define INDENTED_STRING(sTR, x, y) \
do { \
int indentation = x; \
while(indentation--) ui_set_signal_text_cb(user_data, " ", 1); \
y; \
} while(0)
#endif /* TYPES_H_ */
|
package seedu.address.model.person;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.junit.jupiter.api.Test;
public class LastVisitTest {
@Test
public void equals() {
LastVisit lastVisit = new LastVisit("2021-11-11 12:00");
// same object -> returns true
assertTrue(lastVisit.equals(lastVisit));
// same values -> returns true
LastVisit visitCopy = new LastVisit(lastVisit.value);
assertTrue(lastVisit.equals(visitCopy));
// different types -> returns false
assertFalse(lastVisit.equals(1));
// null -> returns false
assertFalse(lastVisit.equals(null));
// different visit -> returns false
LastVisit differentVisit = new LastVisit("2021-10-01 12:00");
assertFalse(lastVisit.equals(differentVisit));
}
@Test
public void isValidLastVisit() {
// correct time regex -> returns true
assertTrue(LastVisit.isValidLastVisit("2021-01-02 12:00"));
// correct time regex for 24hr HH -> returns true
assertTrue(LastVisit.isValidLastVisit("2021-01-02 23:59"));
// incorrect date format -> returns false
assertFalse(LastVisit.isValidLastVisit("20210304 12:34"));
// incorrect time format -> returns false
assertFalse(LastVisit.isValidLastVisit("2021-04-05 0654"));
// incorrect datetime format -> returns false
assertFalse(LastVisit.isValidLastVisit("2021-04-0515:43"));
// incorrect regex no time -> returns false
assertFalse(LastVisit.isValidLastVisit("2021-01-02"));
// incorrect regex no date -> returns false
assertFalse(LastVisit.isValidLastVisit("12:01"));
// incorrect year range regex -> return false
assertFalse(LastVisit.isValidLastVisit("20211-01-02 12:00"));
// incorrect year range regex -> return false
assertFalse(LastVisit.isValidLastVisit("20211-01-02 12:00"));
// incorrect month range regex -> return false
assertFalse(LastVisit.isValidLastVisit("2021-13-02 12:00"));
// incorrect day range regex -> return false
assertFalse(LastVisit.isValidLastVisit("2021-01-32 12:00"));
// incorrect hour range regex -> return false
assertFalse(LastVisit.isValidLastVisit("2021-01-02 25:00"));
// incorrect minute range regex -> return false
assertFalse(LastVisit.isValidLastVisit("2021-01-02 12:99"));
}
@Test
public void getFormatted() {
// format displayed date
String expectedDate = "01 Feb 2021 23:59";
assertEquals(expectedDate, new LastVisit("2021-02-01 23:59").getFormatted());
// format displayed date for empty visit
assertEquals("", new LastVisit("").getFormatted());
}
}
|
<filename>src/db/migrations/20210315033256-create-userverification.js
'use strict';
module.exports = {
up: async (queryInterface, Sequelize) => {
/**
* Add altering commands here.
*
* Example:
* await queryInterface.createTable('users', { id: Sequelize.INTEGER });
*/
return queryInterface.sequelize.transaction(t => {
return Promise.all([
queryInterface.createTable('UserVerifications', {
id: {
allowNull: false,
autoIncrement: true,
primaryKey: true,
type: Sequelize.INTEGER,
},
userAccountId: {
allowNull: false,
primaryKey: true,
type: Sequelize.INTEGER,
references: {
model: 'UserAccounts',
key: 'id',
},
},
sendTime: {
type: Sequelize.DATE,
},
purpose: {
type: Sequelize.INTEGER,
},
type: {
type: Sequelize.CHAR(10),
},
code: {
type: Sequelize.STRING,
},
time: {
type: Sequelize.DATE,
},
errorCount: {
type: Sequelize.INTEGER,
},
hasVerified: {
type: Sequelize.CHAR(10),
},
createdUser: {
type: Sequelize.INTEGER,
},
createdAt: {
allowNull: false,
type: Sequelize.DATE,
},
updatedUser: {
type: Sequelize.INTEGER,
},
updatedAt: {
allowNull: false,
type: Sequelize.DATE,
},
}),
]);
});
},
down: async (queryInterface, Sequelize) => {
try {
await queryInterface.dropTable('UserVerifications');
} catch (error) {
throw error;
}
},
};
|
<filename>flinkx-core/src/main/java/com/dtstack/flinkx/conf/BaseFileConf.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dtstack.flinkx.conf;
import com.dtstack.flinkx.constants.ConstantValue;
import com.dtstack.flinkx.sink.WriteMode;
import java.nio.charset.StandardCharsets;
/**
* Date: 2021/06/08 Company: www.dtstack.com
*
* @author tudou
*/
public class BaseFileConf extends FlinkxCommonConf {
private String path;
private String fileName;
/** 写入模式 * */
private String writeMode = WriteMode.APPEND.name();
/** 压缩方式 */
private String compress;
private String encoding = StandardCharsets.UTF_8.name();
private long maxFileSize = ConstantValue.STORE_SIZE_G;
private long nextCheckRows = 5000;
public String getPath() {
return path;
}
public void setPath(String path) {
this.path = path;
}
public String getFileName() {
return fileName;
}
public void setFileName(String fileName) {
this.fileName = fileName;
}
public String getWriteMode() {
return writeMode;
}
public void setWriteMode(String writeMode) {
this.writeMode = writeMode;
}
public String getCompress() {
return compress;
}
public void setCompress(String compress) {
this.compress = compress;
}
public String getEncoding() {
return encoding;
}
public void setEncoding(String encoding) {
this.encoding = encoding;
}
public long getMaxFileSize() {
return maxFileSize;
}
public void setMaxFileSize(long maxFileSize) {
this.maxFileSize = maxFileSize;
}
public long getNextCheckRows() {
return nextCheckRows;
}
public void setNextCheckRows(long nextCheckRows) {
this.nextCheckRows = nextCheckRows;
}
@Override
public String toString() {
return "BaseFileConf{"
+ "path='"
+ path
+ '\''
+ ", fileName='"
+ fileName
+ '\''
+ ", writeMode='"
+ writeMode
+ '\''
+ ", compress='"
+ compress
+ '\''
+ ", encoding='"
+ encoding
+ '\''
+ ", maxFileSize="
+ maxFileSize
+ ", nextCheckRows="
+ nextCheckRows
+ '}';
}
}
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/MCPlayer/MCPlayer.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/MCPlayer/MCPlayer.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
<reponame>shin-kinoshita/dbflute-core
/*
* Copyright 2014-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.dbflute.logic.jdbc.metadata.basic;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import org.dbflute.exception.DfJDBCException;
import org.dbflute.helper.message.ExceptionMessageBuilder;
import org.dbflute.logic.jdbc.metadata.info.DfColumnMeta;
import org.dbflute.logic.jdbc.metadata.info.DfTableMeta;
import org.dbflute.util.Srl;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The handler of auto increment.
* @author jflute
*/
public class DfAutoIncrementExtractor extends DfAbstractMetaDataBasicExtractor {
// ===================================================================================
// Definition
// ==========
private static final Logger _log = LoggerFactory.getLogger(DfAutoIncrementExtractor.class);
// ===================================================================================
// Column Determination
// ====================
/**
* Is the column auto-increment?
* @param conn database connection for meta data. (NotNull)
* @param tableInfo The meta information of table from which to retrieve PK information. (NotNull)
* @param primaryKeyColumnInfo The meta information of primary-key column. (NotNull)
* @return The determination, true or false.
*/
public boolean isAutoIncrementColumn(Connection conn, DfTableMeta tableInfo, DfColumnMeta primaryKeyColumnInfo) throws SQLException {
if (analyzeByDatabaseDependencyMeta(tableInfo, primaryKeyColumnInfo)) {
return true;
}
final String primaryKeyColumnName = primaryKeyColumnInfo.getColumnName();
return isAutoIncrementColumn(conn, tableInfo, primaryKeyColumnName);
}
/**
* Is the column auto-increment?
* @param conn database connection for meta data. (NotNull)
* @param tableInfo The meta information of table from which to retrieve PK information. (NotNull)
* @param primaryKeyColumnName The name of primary-key column. (NotNull)
* @return The determination, true or false.
*/
public boolean isAutoIncrementColumn(Connection conn, DfTableMeta tableInfo, String primaryKeyColumnName) throws SQLException {
return analyzeByResultSetMeta(conn, tableInfo, primaryKeyColumnName);
}
// ===================================================================================
// Analyze by DatabaseDependency
// =============================
protected boolean analyzeByDatabaseDependencyMeta(DfTableMeta tableInfo, DfColumnMeta primaryKeyColumnInfo) {
if (isDatabaseSybase()) {
return primaryKeyColumnInfo.isSybaseAutoIncrement();
} else {
return false;
}
}
// ===================================================================================
// Analyze by ResultSet
// ====================
protected boolean analyzeByResultSetMeta(Connection conn, DfTableMeta tableInfo, String primaryKeyColumnName) throws SQLException {
final String tableSqlName = tableInfo.getTableSqlName();
final String sql = buildMetaDataSql(primaryKeyColumnName, tableSqlName);
return executeAutoIncrementQuery(conn, tableInfo, primaryKeyColumnName, tableSqlName, sql);
}
protected boolean executeAutoIncrementQuery(Connection conn, DfTableMeta tableInfo, String primaryKeyColumnName, String tableSqlName,
String sql) throws DfJDBCException {
try {
return doExecuteAutoIncrementQuery(conn, tableInfo, primaryKeyColumnName, sql);
} catch (SQLException e) {
if (isDatabasePostgreSQL()) { // the table name needs quote e.g. upper case
final String retrySql = buildMetaDataSql(primaryKeyColumnName, Srl.quoteDouble(tableSqlName));
try {
return doExecuteAutoIncrementQuery(conn, tableInfo, primaryKeyColumnName, retrySql);
} catch (SQLException continued) {
_log.info("Failed to retry auto-increment query: sql=" + retrySql + ", msg=" + continued.getMessage());
}
}
throwAutoIncrementDeterminationFailureException(tableInfo, primaryKeyColumnName, sql, e);
return false; // unreachable
}
}
protected String buildMetaDataSql(String pkName, String tableName) {
return "select " + quoteColumnNameIfNeedsDirectUse(pkName) + " from " + tableName + " where 0 = 1";
}
protected String quoteColumnNameIfNeedsDirectUse(String pkName) {
return getProperties().getLittleAdjustmentProperties().quoteColumnNameIfNeedsDirectUse(pkName);
}
protected boolean doExecuteAutoIncrementQuery(Connection conn, DfTableMeta tableInfo, String primaryKeyColumnName, String sql)
throws SQLException {
Statement st = null;
ResultSet rs = null;
try {
st = conn.createStatement();
rs = st.executeQuery(sql);
final ResultSetMetaData md = rs.getMetaData();
for (int i = 1; i <= md.getColumnCount(); i++) {
final String currentColumnName = md.getColumnName(i);
if (primaryKeyColumnName.equals(currentColumnName)) {
return md.isAutoIncrement(i);
}
}
throwPrimaryKeyColumnNotFoundException(primaryKeyColumnName, tableInfo);
return false; // unreachable
} finally {
if (st != null) {
try {
st.close();
} catch (SQLException ignored) {}
}
if (rs != null) {
try {
rs.close();
} catch (SQLException ignored) {}
}
}
}
protected void throwPrimaryKeyColumnNotFoundException(String primaryKeyColumnName, DfTableMeta tableMeta) {
String msg = "The primaryKeyColumnName is not found in the table: " + tableMeta.getTableDbName() + "." + primaryKeyColumnName;
throw new IllegalStateException(msg); // unreachable
}
protected void throwAutoIncrementDeterminationFailureException(DfTableMeta tableInfo, String primaryKeyColumnName, String sql,
SQLException e) throws DfJDBCException {
final ExceptionMessageBuilder br = new ExceptionMessageBuilder();
br.addNotice("Failed to execute the SQL for getting auto-increment");
br.addItem("Advice");
br.addElement("DBFlute executes the SQL to get auto-increment meta data.");
br.addElement("The table might not exist on your schema. Or the schema");
br.addElement("to be set at 'dfprop' might be mistake in the first place.");
br.addElement("");
br.addElement("And other points can be causes");
br.addElement(" e.g. reservation word, authentication, ...");
br.addElement("If your primary key of the table is reservation word in the DBMS,");
br.addElement("set quatation settings 'quoteColumnNameList' of littleAdjustmentMap.dfprop.");
br.addElement("");
br.addElement("So check your settings and environments.");
br.addItem("Table");
br.addElement(tableInfo.getTableFullQualifiedName());
br.addItem("PrimaryKey");
br.addElement(primaryKeyColumnName);
br.addItem("SQL for getting");
br.addElement(sql);
final String msg = br.buildExceptionMessage();
throw new DfJDBCException(msg, e);
}
}
|
#!/bin/sh
#PBS -N 1x060
#PBS -q regular
#PBS -l mppwidth=60
#PBS -l mppnppn=4
#PBS -l mppdepth=1
#PBS -l walltime=04:00:00
#PBS -A m106
#PBS -j eo
prefix=1x060
cd $PBS_O_WORKDIR
rm -rf $prefix.cache
log=$prefix.log
rm -f $log
touch $log
yaml=$prefix.yaml
rm -f $yaml
cat << EOF > $yaml
---
grid :
bin_width : 0.30
v_size : 100
v_outer_max : 30.00
opacity :
line_dir : /project/projectdirs/snfactry/rthomas/local/share/es/lines
ref_file : /project/projectdirs/snfactry/rthomas/local/share/es/refs.dat
form : exp
v_ref : 10.00
log_tau_min : -2.00
source :
mu_size : 10
spectrum :
p_size : 60
flatten : No
evaluator :
target_file : ../../data/SNF20080514-002_M000075_restframe.dat
vector_norm : 2.00
regions :
apply : [ Yes, Yes, Yes ]
weight : [ 0.00, 1.00, 1.00 ]
lower : [ 0, 3200, 7400 ]
upper : [ 10000, 6400, 8800 ]
config :
fit_file : SNF20080514-002_M000075_restframe.fit
cache_file : SNF20080514-002_M000075_restframe.cache
a0 : { fixed: No, start: 1.00, lower: 0.00, upper: 10.00, scale: 1.00 }
a1 : { fixed: No, start: -1.04, lower: -10.00, upper: 10.00, scale: 1.00 }
a2 : { fixed: No, start: 1.07, lower: -10.00, upper: 10.00, scale: 1.00 }
v_phot : { fixed: No, start: 11.50, lower: 5.00, upper: 15.00, scale: 1.00 }
v_outer : { fixed: Yes, start: 30.00, lower: 15.00, upper: 30.00, scale: 1.00 }
t_phot : { fixed: No, start: 8.90, lower: 5.00, upper: 10.00, scale: 1.00 }
ions : [ 601, 800, 1100, 1201, 1401, 1402, 1601, 2001, 2601, 2602, 2701, 2801 ]
active : [ Yes, Yes, Yes, Yes, Yes, Yes, Yes, Yes, Yes, Yes, Yes, Yes ]
detach : [ No, No, No, No, No, No, No, No, No, No, No, No ]
log_tau :
fixed : [ No, No, No, No, No, No, No, No, No, No, No, No ]
start : [ 0.73, 0.94, -0.57, 0.45, 2.38, 0.98, 2.00, 4.00, -2.96, 1.88, 0.34, 0.06 ]
lower : [ -3.00, -3.00, -3.00, -3.00, -3.00, -3.00, -3.00, -3.00, -3.00, -3.00, -3.00, -3.00 ]
upper : [ 4.00, 4.00, 4.00, 4.00, 4.00, 4.00, 4.00, 4.00, 4.00, 4.00, 4.00, 4.00 ]
scale : [ 1.00, 1.00, 1.00, 1.00, 1.00, 1.00, 1.00, 1.00, 1.00, 1.00, 1.00, 1.00 ]
v_min :
fixed : [ No, No, No, No, No, No, No, No, No, No, No, No ]
start : [ 11.50, 11.50, 11.50, 11.50, 11.50, 11.50, 11.50, 11.50, 11.50, 11.50, 11.50, 11.50 ]
lower : [ 5.00, 5.00, 5.00, 5.00, 5.00, 5.00, 5.00, 5.00, 5.00, 5.00, 5.00, 5.00 ]
upper : [ 15.00, 15.00, 15.00, 15.00, 15.00, 15.00, 15.00, 15.00, 15.00, 15.00, 15.00, 15.00 ]
scale : [ 1.00, 1.00, 1.00, 1.00, 1.00, 1.00, 1.00, 1.00, 1.00, 1.00, 1.00, 1.00 ]
v_max :
fixed : [ Yes, Yes, Yes, Yes, Yes, Yes, Yes, Yes, Yes, Yes, Yes, Yes ]
start : [ 30.00, 30.00, 30.00, 30.00, 30.00, 30.00, 30.00, 30.00, 30.00, 30.00, 30.00, 30.00 ]
lower : [ 15.00, 15.00, 15.00, 15.00, 15.00, 15.00, 15.00, 15.00, 15.00, 15.00, 15.00, 15.00 ]
upper : [ 30.00, 30.00, 30.00, 30.00, 30.00, 30.00, 30.00, 30.00, 30.00, 30.00, 30.00, 30.00 ]
scale : [ 1.00, 1.00, 1.00, 1.00, 1.00, 1.00, 1.00, 1.00, 1.00, 1.00, 1.00, 1.00 ]
aux :
fixed : [ No, No, No, No, No, No, No, No, No, No, No, No ]
start : [ 0.37, 1.91, 6.86, 1.51, 0.91, 0.80, 0.55, 0.63, 0.34, 0.53, 1.08, 0.76 ]
lower : [ 0.10, 0.10, 0.10, 0.10, 0.10, 0.10, 0.10, 0.10, 0.10, 0.10, 0.10, 0.10 ]
upper : [ 10.00, 10.00, 10.00, 10.00, 10.00, 10.00, 10.00, 10.00, 10.00, 10.00, 10.00, 10.00 ]
scale : [ 1.00, 1.00, 1.00, 1.00, 1.00, 1.00, 1.00, 1.00, 1.00, 1.00, 1.00, 1.00 ]
temp :
fixed : [ No, No, No, No, No, No, No, No, No, No, No, No ]
start : [ 12.50, 12.90, 10.00, 22.40, 7.10, 5.00, 5.00, 11.30, 10.00, 13.20, 10.20, 13.70 ]
lower : [ 5.00, 5.00, 5.00, 5.00, 5.00, 5.00, 5.00, 5.00, 5.00, 5.00, 5.00, 5.00 ]
upper : [ 25.00, 25.00, 25.00, 25.00, 25.00, 25.00, 25.00, 25.00, 25.00, 25.00, 25.00, 25.00 ]
scale : [ 1.00, 1.00, 1.00, 1.00, 1.00, 1.00, 1.00, 1.00, 1.00, 1.00, 1.00, 1.00 ]
EOF
export OMP_NUM_THREADS=1
aprun -n 60 -N 4 -d 1 synapps $yaml | tee -a $log
|
/// Get the cursor position based on the current platform.
#[cfg(unix)]
pub fn get_cursor_position() -> (u16, u16) {
if unsafe { RAW_MODE_ENABLED } {
if let Ok(pos) = pos_raw() {
return pos;
}
}
// Default position if raw mode is not enabled or retrieval fails
(0, 0)
}
|
<reponame>virta-jasonmay/tilt<gh_stars>1-10
package logstore
import (
"fmt"
"strings"
"github.com/windmilleng/tilt/pkg/model"
)
func SourcePrefix(n model.ManifestName) string {
if n == "" || n == model.TiltfileManifestName {
return ""
}
max := 13
spaces := ""
if len(n) > max {
n = n[:max-1] + "…"
} else {
spaces = strings.Repeat(" ", max-len(n))
}
return fmt.Sprintf("%s%s │ ", spaces, n)
}
|
#!/usr/bin/env bash
curl -X POST -H "Content-Type: application/json" -d '{"k": 10, "state": {"nodeIds": [611]}, "maxIterations": 20}' localhost:8000/provider/pagerank/; echo
|
package libsys;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import java.io.File;
/**
* Test Settings related operations
*/
public class SettingsTest extends TestCase
{
private Settings settings;
/**
* Test Settings related operations
* @param testName name of the test case
*/
public SettingsTest(String testName)
{
super(testName);
}
/**
* @return suite of tests being tested
*/
public static Test suite()
{
return new TestSuite(SettingsTest.class);
}
/**
* Run all tests
*/
public void testApp()
{
constructorTest();
setUsersFilenameTest();
setBooksFilenameTest();
}
/**
* Test the constructor
*/
private void constructorTest()
{
settings = new Settings(".");
settings.setTitle("Failed to save");
File file = new File(".");
if (file.exists() && !file.isDirectory())
{
assert false;
}
else
{
assert true;
}
String filename = "noSuchFile.txt";
file = new File(filename);
if (file.exists())
{
file.delete();
}
settings = new Settings(filename);
assertEquals("Default size is 3", settings.size(), 3);
assertEquals("Default title", settings.get(settings.getKey(0)), "Welcome to the library");
assertEquals("Default users filename", settings.get(settings.getKey(1)), "users.json");
assertEquals("Default books filename", settings.get(settings.getKey(2)), "books.json");
settings.setTitle("noSuchFile");
assertTrue("noSuchFile.txt is created", file.exists());
settings = new Settings(filename);
assertEquals("Default title", settings.get(settings.getKey(0)), "noSuchFile");
}
/**
* Test the setUsersFilename function
*/
private void setUsersFilenameTest()
{
String newUsersFilename = "newUsersFilename";
settings.setUsersFilename(newUsersFilename);
assertEquals("Set settings usersFilename to \"newUsersFilename\"", settings.get(settings.getKey(1)), newUsersFilename);
}
/**
* Test the setBooksFilename function
*/
private void setBooksFilenameTest()
{
String newBooksFilename = "newBooksFilename";
settings.setBooksFilename(newBooksFilename);
assertEquals("Set settings booksFilename to \"newBooksFilename\"", settings.get(settings.getKey(2)), newBooksFilename);
}
/**
* Delete test files
* @throws Exception Exception
*/
public void tearDown() throws Exception
{
super.tearDown();
File file = new File("noSuchFile.txt");
if (!file.delete())
System.out.println("noSuchFile.txt was not deleted");
}
}
|
export class CtrlBase<T> {
value: T;
setValue(newValue: T): void {
this.value = newValue;
}
getValue(): T {
return this.value;
}
clearValue(): void {
this.value = null;
}
}
|
import React from 'react';
import { CalendarMonth } from '@patternfly/react-core';
export const CalendarMonthDefault: React.FunctionComponent = () => <CalendarMonth date={new Date()} />;
|
<filename>func-futurestream/src/test/java/cyclops/async/reactive/futurestream/react/lazy/LazySeqLazyTest.java<gh_stars>0
package cyclops.async.reactive.futurestream.react.lazy;
import static java.util.Arrays.asList;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import cyclops.container.immutable.tuple.Tuple2;
import cyclops.async.reactive.futurestream.FutureStream;
import cyclops.async.reactive.futurestream.LazyReact;
import cyclops.async.reactive.futurestream.react.base.BaseSeqLazyTest;
import java.util.Arrays;
import java.util.List;
import java.util.function.Supplier;
import org.junit.Test;
public class LazySeqLazyTest extends BaseSeqLazyTest {
@Test
public void testZipDifferingLength() {
List<Tuple2<Integer, String>> list = of(1,
2).zip(of("a",
"b",
"c",
"d"))
.foldLazy(s -> s.toList())
.get();
assertEquals(2,
list.size());
assertTrue(asList(1,
2).contains(list.get(0)
._1()));
assertTrue("" + list.get(1)
._2(),
asList(1,
2).contains(list.get(1)
._1()));
assertTrue(asList("a",
"b",
"c",
"d").contains(list.get(0)
._2()));
assertTrue(asList("a",
"b",
"c",
"d").contains(list.get(1)
._2()));
}
@Override
protected <U> FutureStream<U> of(U... array) {
return LazyReact.sequentialBuilder()
.of(array);
}
@Override
protected <U> FutureStream<U> ofThread(U... array) {
return LazyReact.sequentialCommonBuilder()
.of(array);
}
@Override
protected <U> FutureStream<U> react(Supplier<U>... array) {
return LazyReact.sequentialCommonBuilder()
.react(Arrays.asList(array));
}
}
|
// @flow
import { Button, ButtonProps } from 'reactstrap';
import { useFormContext } from 'react-hook-form';
import { LOCALES_NAMESPACE, useTranslation } from '@server/i18n';
import { FunctionComponent } from 'react';
export const HButton: FunctionComponent<ButtonProps> = (props: ButtonProps): any => {
const { saveTitle, disabled, children } = props;
const { t } = useTranslation(LOCALES_NAMESPACE.COMMON);
const { formState } = useFormContext();
const { isDirty, isSubmitting } = formState;
return (
<Button id="btnHButton" color="success" {...props} disabled={!isDirty || isSubmitting || disabled}>
{isSubmitting && `${t('PROCESSING')}...`}
{!isSubmitting && (children || saveTitle)}
</Button>
);
};
HButton.defaultProps = {
saveTitle: 'Save',
};
|
func countUniqueDomains(in imageURLs: [String]) -> String {
var uniqueDomains = Set<String>()
for url in imageURLs {
if let domain = extractDomain(from: url) {
uniqueDomains.insert(domain)
}
}
let domainCount = uniqueDomains.count
let domainString = domainCount == 1 ? "domain" : "domains"
return "\(domainCount) unique \(domainString)"
}
func extractDomain(from url: String) -> String? {
guard let url = URL(string: url), let host = url.host else {
return nil
}
return host
}
|
#include <bits/stdc++.h>
using namespace std;
class Solution
{
public:
string restoreString(string s, vector<int> &indices)
{
string ans;
for (int i = 0; i < indices.size(); i++) ans[indices[i]] = s[i];
return ans;
}
};
|
<reponame>talenguyen/Counter
package vn.tale.counter.ui.settime;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import android.widget.TextView;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;
import java.util.ArrayList;
import java.util.List;
import vn.tale.counter.R;
import vn.tale.counter.ui.component.radio.RadioGroupController;
import vn.tale.counter.ui.component.radio.RadioItem;
import vn.tale.counter.ui.component.radio.TextRadioItem;
import vn.tale.counter.util.SimpleCountable;
import vn.tale.counter.util.TimeBuilder;
import vn.tale.counter.widget.numberkeyboardlayout.NumberKeyboardLayout;
import vn.tale.counterapi.Countable;
public class SetTimeActivity extends AppCompatActivity {
private static final String TAG = "SetTimeActivity";
private RadioGroupController radioGroupController;
private List<Countable> countableList = new ArrayList<>();
@BindView(R.id.tvMinutes) TextView tvMinutes;
@BindView(R.id.tvSeconds) TextView tvSeconds;
@BindView(R.id.vNumberKeyboardLayout) NumberKeyboardLayout vNumberKeyboardLayout;
@Override protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
ButterKnife.bind(this);
setupRadioGroupController();
onTapSecondView();
}
private void addTime(String minutes, String seconds) {
Log.d(TAG,
"addTime() called with: " + "minutes = [" + minutes + "], seconds = [" + seconds + "]");
final int mins = Integer.parseInt(minutes);
final int secs = Integer.parseInt(seconds);
final int resultSeconds = new TimeBuilder().addMinutes(mins).addSeconds(secs).getSeconds();
final Countable countable = new SimpleCountable(resultSeconds);
countableList.add(countable);
}
private void setupRadioGroupController() {
radioGroupController = new RadioGroupController();
radioGroupController.addItem(new TextRadioItem(tvMinutes));
radioGroupController.addItem(new TextRadioItem(tvSeconds));
radioGroupController.setOnItemSelectedListener(
new RadioGroupController.OnItemSelectedListener() {
@Override public void onItemSelected(RadioItem radioItem) {
final TextView textView = ((TextRadioItem) radioItem).getTextView();
vNumberKeyboardLayout.setTarget(textView);
}
});
}
@OnClick(R.id.tvMinutes) public void onTapMinuteView() {
radioGroupController.setSelection(0);
vNumberKeyboardLayout.setupCta(getString(R.string.next), new View.OnClickListener() {
@Override public void onClick(View v) {
onTapSecondView();
}
});
}
@OnClick(R.id.tvSeconds) public void onTapSecondView() {
radioGroupController.setSelection(1);
vNumberKeyboardLayout.setupCta(getString(R.string.ok), new View.OnClickListener() {
@Override public void onClick(View v) {
final String minutes = tvMinutes.getText().toString();
final String seconds = tvSeconds.getText().toString();
addTime(minutes, seconds);
}
});
}
}
|
// Sends a request to the server to get data
const getDataFromServer = async () => {
const response = await fetch('/data');
const data = await response.json();
return data;
};
// Updates the UI with the data from the server
const updateUI = async () => {
const data = await getDataFromServer();
updateHtml(data);
};
// Updates the HTML on the page with the data
const updateHtml = (data) => {
const html = `<h1>${data.title}</h1><h2>${data.subtitle}</h2>`;
document.querySelector('#data-container').innerHTML = html;
};
|
#!/bin/bash
cd "$(dirname "$0")"
pybuilderInstalled=`pip freeze | grep 'pybuilder' | wc -l`
if [ $pybuilderInstalled != 1 ]
then
echo "Installing pybuilder"
pip install pybuilder
fi
pyb install_dependencies clean publish
tox
if [ ! -d "bin" ]; then
mkdir 'bin'
fi
cp target/dist/dataproducts*/dist/* bin/
mv bin/dataproducts-*.tar.gz bin/dataproducts.tar.gz
mv bin/dataproducts-*.whl bin/dataproducts.whl
rm -rf target
|
#!/bin/bash
if test -z "$COFFEE_SHOP_PASSWORD"; then
echo "COFFEE_SHOP_PASSWORD not defined"
exit 1
fi
auth="-u user -p $COFFEE_SHOP_PASSWORD"
# MONGODB USER CREATION
(
echo "setup mongodb auth"
create_user="if (!db.getUser('onlyu')) { db.createUser({ user: 'onlyu', pwd: '$COFFEE_SHOP_PASSWORD', roles: [ {role:'readWrite', db:'coffee-shop'} ]}) }"
until mongo coffee-shop --eval "$create_user" || mongo coffee-shop $auth --eval "$create_user"; do sleep 5; done
killall mongod
sleep 1
killall -9 mongod
) &
# INIT DUMP EXECUTION
(
if test -n "$INIT_DUMP"; then
echo "execute dump file"
until mongo coffee-shop $auth $INIT_DUMP; do sleep 5; done
fi
) &
echo "start mongodb without auth"
chown -R mongodb /data/db
gosu mongodb mongod --bind_ip_all "$@"
echo "restarting with auth on"
sleep 5
exec gosu mongodb mongod --bind_ip_all --auth "$@"
|
let app = new Vue({
el: "#app",
mixins:[
MergeUrls,
],
data: {
ids:[], // it is shown when the duplicate models were load
name: '', // it is shown when the duplicate models were load
loading: {
all: true, // controls were duplicate models are loaded
modal: true, // controls when to show the loading animation on the modal form
fusion:false, // it indicates if the fusion process has been initialiced
result:false,
},
modelFilter:{
projectId:'',
countryCode:'',
organizationId:'',
},
modalState:'select', // stores the state of the modal view [select, resolve, fusion]
modelsNames:[], // stores all the data from all the duplicate models by name
models:[], // stores all the data from all the duplicate models by name
modelSelected:null,
modelCurrent:null,
modelsResolve:{},
modelLabels:{},
modelEmpty:{},
modelMerge:{}, // stores all the data from all the duplicate models by name
list_organizations:{}, // stores the organizations listData ( id => name )
list_projects:{}, // stores the projects listData ( id => name )
list_types:{}, // stores the types data_list ( id => name ) values
list_countries:{}, // stores the countries data_list ( id => name ) values
list_education:{}, // stores the countries data_list ( id => name ) values
noShowAttributes:[
'id',
'organizationName',
'created',
'modified',
'errors',
],
noShowFields:[
'id',
'country',
'organization_id',
'education_id',
'type_id',
'created',
'modified',
'errors',
],
fusionResult:null,
fusionFlags:{
result:false
},
errorFlags:{
fusion:false,
finish:false,
},
errorMessage: {
fusion:null,
finish:null,
},
},
methods: {
load: function () {
let self = this;
self.loading.all = true;
self.loadBaseUrl(self.$el);
/* var modelFilter is global */
if(typeof modelFilter !== 'undefined')
self.modelFilter = modelFilter;
/* var modelFilter is global */
if(typeof gModels !== 'undefined')
self.modelsNames = gModels;
// ------------------------------------------------------------------------------ Getting label information
$.get(self.getUrlModelLabels(), (data, textStatus, jqXHR) => {
if(textStatus != 'success' ) console.log([textStatus, jqXHR]);
self.modelLabels = data;
})
.fail(() => {
alertify.error("Problema al cargar las etiquetas");
console.log("Error al cargar la información de los etiquetas");
});
// ------------------------------------------------------------------------------ Getting Empty Model
$.get(self.getUrlModelEmpty(), (data, textStatus, jqXHR) => {
if(textStatus != 'success' ) console.log([textStatus, jqXHR]);
self.modelEmpty = data;
})
.fail(() => {
alertify.error("Problema al cargar los datos de guardado");
console.log("Error al cargar la información del modelo vacío");
});
// ------------------------------------------------------------------------------ Getting Organization List
$.get(self.getUrlOrganizations(), (data, textStatus, jqXHR) => {
if(textStatus != 'success' ) console.log([textStatus, jqXHR]);
self.list_organizations = data;
})
.fail(() => {
alertify.error("Problema al cargar los datos de organizaciones");
console.log("Error al cargar la información de las organizaciones");
});
// ------------------------------------------------------------------------------ Getting Countries List
$.get(self.getUrlCountries(), (data, textStatus, jqXHR) => {
if(textStatus != 'success' ) console.log([textStatus, jqXHR]);
self.list_countries = data;
})
.fail(() => {
alertify.error("Problema al cargar los datos de los países");
console.log("Problema al cargar la información de los países");
});
// ------------------------------------------------------------------------------ Getting Projects List
$.get(self.getUrlProjects(), (data, textStatus, jqXHR) => {
if(textStatus != 'success' ) console.log([textStatus, jqXHR]);
self.list_projects = data;
})
.fail(() => {
alertify.error("Problema al cargar los datos de los proyectos");
console.log("Problema al cargar los datos de los proyectos");
});
// ------------------------------------------------------------------------------ Getting Types List
$.get(self.getUrlTypes(), (data, textStatus, jqXHR) => {
if(textStatus != 'success' ) console.log([textStatus, jqXHR]);
self.list_types = data;
})
.fail(() => {
alertify.error("Problema al cargar los datos de los tipos de beneficiarios");
console.log("Problema al cargar los datos de los tipos de beneficiarios");
});
// ------------------------------------------------------------------------------ Getting Types List
$.get(self.getUrlEducation(), (data, textStatus, jqXHR) => {
if(textStatus != 'success' ) console.log([textStatus, jqXHR]);
self.list_education= data;
})
.fail(() => {
alertify.error("Problema al cargar el catalogo de tipo de educacion");
console.log("Problema al cargar el catalogo de tipo de educacion");
});
},
removingFromDuplicateListImported:function(ids){
let self = this;
let indexes = [];
let models = self.modelsNames.filter(function (model, index) {
if (ids.indexOf(model.contact_id * 1) !== -1){
indexes.push(index);
return true;
}
return false;
});
console.log([models, ids, indexes, self.modelsNames]);
$.each(models, function (index, model) {
let i = self.modelsNames.indexOf(model);
console.log(self.modelsNames.splice(i, 1));
});
},
//----------------------------------------------------------------------------------------- MODAL URL FUNCTIONS
fusionCancelar: function (modalName){
let self = this;
self.load.modal = false;
self.load.fusion = false;
switch (self.modalState){
case 'resolve': self.modalState = 'select'; break;
case 'fusion': self.modalState = 'resolve'; break;
case 'finish':
default:
$(modalName).modal('hide');
}
},
fusionExclude: function (model) {
let self = this;
self.models.splice(self.models.indexOf(model), 1);
},
fusionSelect: function () {
let self = this;
self.loading.modal = true;
self.modalState = 'resolve';
self.ids = [];
for (let i=0; i < self.models.length; i++){
self.ids.push(self.models[i].id);
}
// ------------------------------------------------------------------------------ Getting Types List
$.post(self.getUrlNameValues(), { ids: self.ids}, (data, textStatus, jqXHR) => {
if(textStatus != 'success' ) console.log([textStatus, jqXHR]);
self.modelMerge = data.values;
let resolve = data.resolve;
for(var attr in resolve){
self.modelMerge[attr] = resolve[attr][0];
}
self.modelsResolve = resolve;
self.loading.modal = false;
})
.fail(() => {
alertify.error("Problema al cargar los registros");
});
},
fusionResolve: function () {
let self = this;
self.modalState = 'fusion';
},
fusionStart: function () {
let self = this;
self.loading.modal = true;
self.loading.fusion = true;
let data = {
id: self.modelSelected,
ids: self.ids,
values: self.modelMerge,
};
// ------------------------------------------------------------------------------ Getting Types List
$.post(self.getUrlFusion(), data, (data, textStatus, jqXHR) => {
if(textStatus != 'success' )
console.log([textStatus, jqXHR]);
else{
self.removingFromDuplicateListImported(self.ids);
}
self.fusionResult = data.result;
self.fusionFlags.result = false;
self.loading.modal = false;
self.loading.fusion = false;
self.modalState = 'finish';
})
.fail(() => {
alertify.error("Problema al fusionar los registros de contacto.");
self.loading.modal = false;
self.loading.fusion = false;
});
},
fusionFinish: function (){
let self = this;
if (self.modelCurrent)
{
// var index = self.modelsNames.indexOf(self.modelCurrent);
// self.modelsNames.splice(index, 1);
self.modelCurrent = null;
}
},
//---------------------------------------------------------------------------------------------- PREPARING DATA
showAttribute: function(field){
let self = this;
return self.noShowAttributes.indexOf(field) != -1 ? false : true;
},
showField: function(field){
let self = this;
return self.noShowFields.indexOf(field) != -1 ? false : true;
},
//---------------------------------------------------------------------------------------------- PREPARING DATA
preparingFusionForm: function (model) {
let self = this;
self.ids = [];
self.models = [];
self.modelMerge = {};
self.modelsResolve = {};
self.modelSelected = null;
self.fusionResult = null;
self.fusionFlags.result = false;
self.modelCurrent = model;
self.loading.modal = true;
self.name = model.contact_name;
self.modalState = 'select';
let url = self.getUrlId(model.contact_id);
if (!url) {
self.loading.modal = false;
console.log("No se logró generar la URL para obtener la información del contacto");
}
else{
$.get(url, (data, textStatus, jqXHR)=>{
if(textStatus !== 'success' ) console.log([textStatus, jqXHR]);
self.models = data.models;
})
.fail(()=>{
console.log("No se logró generar la URL para obtener la información del contacto");
})
.always(() => {
self.loading.modal = false;
});
}
return false;
}
},
mounted: function () {
this.load();
}
});
|
<reponame>premss79/zignaly-webapp
export { default } from "./WinRate";
|
#!/bin/bash
INIT_IMAGE=${GALAXY_INIT_TAG:-"quay.io/bgruening/galaxy-init:dev"}
# Sets the image of postgres to use
POSTGRES=postgres:11.2
# User and password to use.
POSTGRES_USER=galaxy
POSTGRES_PASSWORD=chaopagoosaequuashie
POSTGRES_DB=galaxy
echo "Create postgres in detached mode"
pg_start=`date +%s`
docker run -d --name "dumpsql_postgres" -e "POSTGRES_PASSWORD=$POSTGRES_PASSWORD" -e "POSTGRES_USER=$POSTGRES_USER" -e "POSTGRES_DB=$POSTGRES_DB" $POSTGRES
echo "Output postgres log"
docker logs -f dumpsql_postgres &
echo "Wait until postgres has initialized"
until docker run --rm --link "dumpsql_postgres:pg" $POSTGRES pg_isready -U postgres -h pg >/dev/null; do sleep 1; done
pg_end=`date +%s`
init_start=`date +%s`
echo "Create the DB with the galaxy-init container, which is linked to the postgres container."
docker run -i --rm --name "dumpsql_galaxy_installdb" \
-e "GALAXY_CONFIG_FILE=/etc/galaxy/galaxy.yml" \
-e "GALAXY_CONFIG_DATABASE_CONNECTION=postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@db/$POSTGRES_DB?client_encoding=utf8" \
--link "dumpsql_postgres:db" $INIT_IMAGE install_db.sh
init_end=`date +%s`
dump_start=`date +%s`
echo "Exec into the postgres container and dump the DB."
docker exec "dumpsql_postgres" pg_dump --no-tablespace --no-acl --no-owner -U galaxy galaxy > galaxy-postgres/init-galaxy-db.sql.in
dump_end=`date +%s`
docker rm -f dumpsql_postgres
echo "Stats:"
echo "Startup postgres: $((pg_end-pg_start)) sec"
echo "install_db: $((init_end-init_start)) sec"
echo "pg_dumpall: $((dump_end-dump_start)) sec"
echo "Total: $((dump_end-pg_start)) sec"
|
import React from 'react';
import { createMount } from '@material-ui/core/test-utils';
import RemoveArtifact from './removeartifact';
describe('RemoveArtifact Component', () => {
it('renders correctly', () => {
const tree = createMount()(<RemoveArtifact open={true} />).html();
expect(tree).toMatchSnapshot();
});
});
|
#!/bin/sh
# create file if not exist
if [ ! -f "$DATA_PATH/permissions.json" ]; then
cp $DEFAULT_CONFIG_PATH/permissions.json $DATA_PATH/permissions.json
fi
if [ ! -f "$DATA_PATH/whitelist.json" ]; then
cp $DEFAULT_CONFIG_PATH/whitelist.json $DATA_PATH/whitelist.json
fi
if [ ! -f "$DATA_PATH/server.properties" ]; then
cp $DEFAULT_CONFIG_PATH/server.properties $DATA_PATH/server.properties
fi
if [ ! -d "$DATA_PATH/worlds" ]; then
mkdir -p $DATA_PATH/worlds
fi
ln -sb $DATA_PATH/permissions.json $SERVER_PATH/permissions.json
ln -sb $DATA_PATH/whitelist.json $SERVER_PATH/whitelist.json
ln -sb $DATA_PATH/server.properties $SERVER_PATH/server.properties
ln -sb $DATA_PATH/worlds $SERVER_PATH/worlds
exec "$@"
|
import {migrate} from "src/js/test/helpers/migrate"
test("migrating 202007151457_addScrollPosToSearchRecord", async () => {
const next = await migrate({state: "v0.13.1", to: "202007151457"})
const windows = Object.values(next.windows)
for (const win of windows) {
// @ts-ignore
for (const tab of win.state.tabs.data) {
expect(tab.viewer.scrollPos.x).toBe(0)
expect(tab.viewer.scrollPos.y).toBe(0)
expect(tab.viewer.scrollX).toBeUndefined()
expect(tab.viewer.scrollY).toBeUndefined()
}
}
})
|
#!/bin/bash
#
# Copyright (c) 2018-2020 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
#
set -o errexit
set -o nounset
set -o pipefail
set -o errtrace
yq_path="/usr/local/bin/yq"
yq_pkg="github.com/mikefarah/yq"
goos="linux"
case "$(uname -m)" in
aarch64) goarch="arm64";;
ppc64le) goarch="ppc64le";;
x86_64) goarch="amd64";;
s390x) goarch="s390x";;
*) echo "unsupported architecture: $(uname -m)"; exit 1;;
esac
yq_version=3.4.1
yq_url="https://${yq_pkg}/releases/download/${yq_version}/yq_${goos}_${goarch}"
curl -o "${yq_path}" -LSsf "${yq_url}"
chmod +x "${yq_path}"
|
<reponame>liu-a-wei/geekbang-lessons
package org.geektimes.configuration.microprofile.config;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import org.eclipse.microprofile.config.Config;
import org.eclipse.microprofile.config.ConfigValue;
import org.eclipse.microprofile.config.spi.ConfigSource;
import org.eclipse.microprofile.config.spi.Converter;
import org.geektimes.configuration.microprofile.config.converter.Converters;
import org.geektimes.configuration.microprofile.config.source.ConfigSources;
import static java.util.stream.StreamSupport.stream;
class DefaultConfig implements Config {
private final ConfigSources configSources;
private final Converters converters;
DefaultConfig(ConfigSources configSources, Converters converters) {
this.configSources = configSources;
this.converters = converters;
}
@Override
public <T> T getValue(String propertyName, Class<T> propertyType) {
String propertyValue = getPropertyValue(propertyName);
// String 转换成目标类型
Converter<T> converter = doGetConverter(propertyType);
return converter == null ? null : converter.convert(propertyValue);
}
@Override
public ConfigValue getConfigValue(String propertyName) {
return null;
}
protected String getPropertyValue(String propertyName) {
String propertyValue = null;
for (ConfigSource configSource : configSources) {
propertyValue = configSource.getValue(propertyName);
if (propertyValue != null) {
break;
}
}
return propertyValue;
}
@Override
public <T> Optional<T> getOptionalValue(String propertyName, Class<T> propertyType) {
T value = getValue(propertyName, propertyType);
return Optional.ofNullable(value);
}
@Override
public Iterable<String> getPropertyNames() {
return stream(configSources.spliterator(), false)
.map(ConfigSource::getPropertyNames)
.collect(LinkedHashSet::new, Set::addAll, Set::addAll);
}
@Override
public Iterable<ConfigSource> getConfigSources() {
return configSources;
}
@Override
public <T> Optional<Converter<T>> getConverter(Class<T> forType) {
Converter converter = doGetConverter(forType);
return converter == null ? Optional.empty() : Optional.of(converter);
}
protected <T> Converter<T> doGetConverter(Class<T> forType) {
List<Converter> converters = this.converters.getConverters(forType);
return converters.isEmpty() ? null : converters.get(0);
}
@Override
public <T> T unwrap(Class<T> type) {
return null;
}
}
|
<gh_stars>1-10
package math;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.math.BigInteger;
import java.util.HashMap;
/**
*
* @author exponential-e
* 백준 10425번: 피보나치 인버스
*
* @see https://www.acmicpc.net/problem/10425/
*
*/
public class Boj10425 {
private static final BigInteger INF = BigInteger.TEN.pow(21000);
private static final String ONE = "1";
private static final String NEW_LINE = "\n";
private static HashMap<BigInteger, Integer> fiboMap = new HashMap<>();
private static BigInteger[] fibo = new BigInteger[100_500];
public static void main(String[] args) throws Exception {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
int T = Integer.parseInt(br.readLine());
fibonacci();
StringBuilder sb = new StringBuilder();
while(T-- > 0){
String target = br.readLine();
if(target.equals(ONE)) sb.append(2);
else sb.append(fiboMap.get(new BigInteger(target)) + 1);
sb.append(NEW_LINE);
}
System.out.println(sb.toString());
}
private static void fibonacci() {
fibo[0] = BigInteger.ONE;
fibo[1] = BigInteger.ONE;
fiboMap.put(BigInteger.ONE, 0);
fiboMap.put(BigInteger.ONE, 1);
for(int i = 2; i < fibo.length; i++){
fibo[i] = fibo[i - 1].add(fibo[i - 2]);
fiboMap.put(fibo[i], i); // make map with BigInteger
if(fibo[i].compareTo(INF) == 1) break;
}
}
}
|
/*
* Copyright (c) 2018, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import MUIDataTable from "mui-datatables";
import PropTypes from "prop-types";
import React from "react";
import {withStyles} from "@material-ui/core/styles";
const styles = (theme) => ({
table: {
minWidth: 1020
},
tableWrapper: {
overflowX: "auto",
marginLeft: theme.spacing.unit * 3,
marginRight: theme.spacing.unit * 3
}
});
const DataTable = (props) => {
const {classes} = props;
const options = {
download: false,
selectableRows: false,
print: false,
filterType: "dropdown",
responsive: "scroll"
};
return (
<div className={classes.tableWrapper}>
<MUIDataTable data={props.data} columns={props.columns} options={options}/>
</div>
);
};
DataTable.propTypes = {
data: PropTypes.arrayOf(PropTypes.arrayOf(
PropTypes.any
)).isRequired,
columns: PropTypes.arrayOf(PropTypes.any).isRequired,
classes: PropTypes.object.isRequired
};
export default withStyles(styles)(DataTable);
|
;(async () => {
console.log('Heya!')
const users = await fetchUsers()
function start() {
const searchForm = document.querySelector('#nameSearch')
const searchInput = searchForm.querySelector('#searchInput')
const searchButton = searchForm.querySelector('#searchButton')
cleanInput()
searchForm.addEventListener('submit', _ => _.preventDefault())
searchButton.addEventListener('click', _ => search(users))
searchInput.addEventListener('keyup', ({ key, target }) => {
const handleSubmit = !!target.value && target.value.trim() !== ''
searchInput.value.length > 0 && handleSubmit
? searchButton.classList.remove('disabled')
: searchButton.classList.add('disabled')
key === 'Enter' && handleSubmit ? search(users) : null
})
}
start()
// Troca de Temas
const themeButton = document.querySelector('#themeButton')
themeButton.addEventListener('click', () => changeTheme())
async function fetchUsers() {
//const response = await fetch('https://randomuser.me/api/?seed=javascript&results=100&nat=BR&noinfo')
// const data = await response.json()
const arr = data.results
const filteredInfo = arr.map(
({
name: { first, last },
dob: { age },
gender,
picture: { large },
}) => {
return {
name: `${first} ${last}`,
gender,
age,
photo: large,
}
}
)
return filteredInfo
}
/**
* Search users based on the desired string on the searchInput
* @param {Array<{name: String, gender: String, age: Number, photo: String}>} users
*/
function search(users) {
const searchForm = document.querySelector('#nameSearch')
const searchInput = searchForm.querySelector('#searchInput')
const searchedString = searchInput.value.trim()
const filtroDiv = document.querySelector('#divFiltro')
filtroDiv.textContent = ''
let results = users.filter(({ name }) => {
let found = name.toLowerCase().match(searchedString.toLowerCase())
return found
})
results.sort((a, b) => a.name.localeCompare(b.name))
results.length === 0
? (results = [{ name: null, gender: null, age: null, photo: null }])
: results
cleanInput()
filtroDiv.textContent = `Filtro Aplicado: ${searchedString}`
render(results)
}
/**
* Render all Elements
* @param {Array<{name: String, gender: String, age: Number, photo: String}>} users
*/
function render(users) {
loading()
setTimeout(() => {
renderResults(users)
renderSummary(users)
start()
}, 3000)
}
/**
* Render the Results in the User Screen
* @param {Array<{name: String, gender: String, age: Number, photo: String}>} users
*/
function renderResults(users) {
const resultsDiv = document.querySelector('#userResults')
const title = resultsDiv.querySelector('.title')
title.textContent = 'Usuários Encontrados:'
const results = resultsDiv.querySelector('#results')
results.innerHTML = ''
users.forEach(({ name, age, photo }) => {
const resultDiv = document.createElement('div')
resultDiv.classList.add('result')
resultDiv.classList.add('col')
resultDiv.classList.add('s8')
name === null ? (name = 'Usuário Não encontrado') : name
photo === null ? (photo = './icons/search-24px.svg') : photo
const profilePic = document.createElement('img')
profilePic.src = `${photo}`
profilePic.classList.add('profile-picture')
profilePic.alt = `Foto de ${name}`
const userInfor = document.createElement('div')
age === null
? (userInfor.textContent = `${name}`)
: (userInfor.textContent = `${name}, ${age} anos`)
userInfor.classList.add('informations')
resultDiv.appendChild(profilePic)
resultDiv.appendChild(userInfor)
results.appendChild(resultDiv)
})
}
/**
* Render the summary in the User Screen
* @param {Array<{name: String, gender: String, age: Number, photo: String}>} users
*/
function renderSummary(users) {
const findLength = users.length
const sumAges = users.reduce((acc, curr) => acc + curr.age, 0)
const averageAges = sumAges / users.length
let maleCount = users.filter(({ gender }) => gender === 'male').length
let femaleCount = users.filter(({ gender }) => gender === 'female')
.length
const summaryDiv = document.querySelector('#summary')
const content = document.querySelector('#summaryContent')
const title = summaryDiv.querySelector('.title')
title.textContent = `Estatísticas: (${findLength} Usuários encontrados)`
const summaryInfo = document.createElement('div')
summaryInfo.id = 'summaryInfo'
summaryInfo.classList.add('col')
summaryInfo.classList.add('s12')
const maleCountHTML = document.createElement('li')
maleCountHTML.textContent = `Sexo Masculino: ${maleCount}`
const femaleCountHTML = document.createElement('li')
femaleCountHTML.textContent = `Sexo Feminino: ${femaleCount}`
const sumAgesHTML = document.createElement('li')
sumAgesHTML.textContent = `Soma das idades: ${sumAges}`
const averageAgesHTML = document.createElement('li')
averageAgesHTML.textContent = `Média das idades: ${averageAges.toFixed(
2
)}`
const summaryUL = document.createElement('ul')
summaryUL.appendChild(maleCountHTML)
summaryUL.appendChild(femaleCountHTML)
summaryUL.appendChild(sumAgesHTML)
summaryUL.appendChild(averageAgesHTML)
const ChildList = Array.from(content.childNodes)
ChildList.length > 0 && ChildList[0].id === 'summaryInfo'
? content.removeChild(content.childNodes.item(0))
: null
summaryInfo.appendChild(summaryUL)
content.appendChild(summaryInfo)
summaryDiv.appendChild(content)
}
function loading() {
const searchForm = document.querySelector('#nameSearch')
const search = searchForm.querySelector('#searchPosition')
const temp = search.innerHTML
const loading = `<div class="col s12 center" id="loading"><div class="preloader-wrapper big active"><div class="spinner-layer spinner-blue-only"><div class="circle-clipper left"><div class="circle"></div></div><div class="gap-patch"><div class="circle"></div></div><div class="circle-clipper right"><div class="circle"></div></div></div></div></div>`
search.innerHTML = loading
setTimeout(() => {
search.innerHTML = temp
}, 2000)
}
function cleanInput() {
const searchForm = document.querySelector('#nameSearch')
const searchInput = searchForm.querySelector('#searchInput')
const searchButton = searchForm.querySelector('#searchButton')
searchInput.value = ''
searchInput.focus()
searchButton.classList.remove('disabled')
searchButton.classList.add('disabled')
}
function changeTheme() {
const themeButton = document.querySelector('#themeButton')
const body = document.querySelector('body')
const actualTheme = body.classList.item(0)
let newTheme = ''
let themeText = ''
console.log(actualTheme)
console.log(newTheme)
actualTheme === 'light-theme'
? (newTheme = 'dark-theme')
: (newTheme = 'light-theme')
newTheme === 'dark-theme'
? (themeText = '🌚 Tema Escuro')
: (themeText = '🌞 Tema Claro')
themeButton.textContent = themeText
body.classList.remove(actualTheme)
body.classList.add(newTheme)
}
})()
|
<filename>lib/leafy/field.rb
# frozen_string_literal: true
require 'securerandom'
module Leafy
class Field
attr_accessor :id, :name, :type, :metadata
def initialize(attributes = {})
raise ArgumentError, "attributes is not a Hash" unless attributes.is_a?(Hash)
attributes = Leafy::Utils.symbolize_keys(attributes)
self.name = attributes.fetch(:name)
self.type = attributes.fetch(:type)
self.id = attributes.fetch(:id) { [name.downcase.strip.tr(" ", "-"), SecureRandom.uuid].join("-") }
self.metadata = attributes.fetch(:metadata, {})
end
def serializable_hash
{
name: name,
type: type,
id: id,
metadata: metadata
}
end
end
end
|
<reponame>amoylel/NCUI
// Created by amoylel on 30/08/2018.
// Copyright (c) 2018 amoylel All rights reserved.
#ifndef AMO_CONSOLE_SINKS_D2328EDB_0E61_46AA_95AF_A14D6917552A_H__
#define AMO_CONSOLE_SINKS_D2328EDB_0E61_46AA_95AF_A14D6917552A_H__
#include <spdlog/details/null_mutex.h>
#include <spdlog/sinks/base_sink.h>
#include <cstdio>
#include <memory>
#include <mutex>
namespace spdlog {
namespace sinks {
class log_filter {
public:
log_filter() {
}
~log_filter() {
}
// 添加允许的规则
void add_allowed_rule(const std::string& str) {
std::unique_lock<std::recursive_mutex> lock(m_mutex);
allowed_rules.insert(str);
}
// 添加不允许写的规则
void add_disallowed_rule(const std::string& str) {
std::unique_lock<std::recursive_mutex> lock(m_mutex);
disallowed_rules.insert(str);
}
void remove_allowed_rule(const std::string& str) {
std::unique_lock<std::recursive_mutex> lock(m_mutex);
if (str.empty()) {
allowed_rules.clear();
} else {
allowed_rules.erase(str);
}
}
void remove_disallowed_rule(const std::string& str) {
std::unique_lock<std::recursive_mutex> lock(m_mutex);
if (str.empty()) {
disallowed_rules.clear();
} else {
disallowed_rules.erase(str);
}
}
bool need_match()const {
return allowed_rules.size() > 0 || disallowed_rules.size() > 0;
}
// 判断日志是否满足过滤规则
bool match(const std::string str) {
std::unique_lock<std::recursive_mutex> lock(m_mutex);
try {
// 如果满足排除规则,那么直接退出
for (auto& p : disallowed_rules) {
if (std::regex_search(str, std::regex(p))) {
return false;
}
}
bool valid_log = allowed_rules.empty() ? true : false;
for (auto& p : allowed_rules) {
if (std::regex_search(str, std::regex(p))) {
valid_log = true;
break;
}
}
return valid_log;
} catch (const std::exception& e) {
std::cout << e.what() << std::endl;
return false;
}
}
public:
std::recursive_mutex m_mutex;
std::set<std::string> allowed_rules;
std::set<std::string> disallowed_rules;
};
template <class Mutex>
class console_sink : public base_sink<Mutex> {
using MyType = console_sink<Mutex>;
public:
console_sink() {}
static std::shared_ptr<MyType> instance() {
static std::shared_ptr<MyType> instance = std::make_shared<MyType>();
return instance;
}
void _sink_it(const details::log_msg& msg) override {
if (m_filter.need_match() && !m_filter.match(msg.formatted.data())) {
return;
}
std::string str(msg.formatted.data(), msg.formatted.size());
std::cout << str;
//fwrite(msg.formatted.data(), sizeof(char), msg.formatted.size(), stdout);
flush();
}
void flush() override {
std::cout.flush();
//fflush(stdout);
}
void add_allowed_rule(const std::string& str) {
m_filter.add_allowed_rule(str);
}
// 添加不允许写的规则
void add_disallowed_rule(const std::string& str) {
m_filter.add_disallowed_rule(str);
}
void remove_allowed_rule(const std::string& str) {
m_filter.remove_allowed_rule(str);
}
void remove_disallowed_rule(const std::string& str) {
m_filter.remove_disallowed_rule(str);
}
log_filter m_filter;
};
typedef console_sink<details::null_mutex> console_sink_st;
typedef console_sink<std::mutex> console_sink_mt;
}
}
#endif //AMO_CONSOLE_SINKS_D2328EDB_0E61_46AA_95AF_A14D6917552A_H__
|
#!/bin/bash
g++ -fvisibility=hidden -O3 -ffast-math -fPIC -Wl,-Bstatic -Wl,-Bdynamic -Wl,--as-needed -shared -pthread `pkg-config --cflags lv2` -lm `pkg-config --libs lv2` src/lv2/lv2OvenMit.cpp -o builds/lv2/linux64/OvenMit.so
## Common usage for quick testing, (assuming the output .so is already linked on the system)
#scripts/makeLV2.sh && jalv.gtk3 https://github.com/kenakofer/oven-mit-synth
|
package surprise;
import java.util.Random;
public class FortuneCookie implements ISurprise{
private String fortune;
private static String[] zicale = {"Norocul vine când ai cel mai tare nevoie de el.",
" Ochiul altuia vede mai exigent decât al tău. ",
" Cum îţi vei aşterne, aşa vei dormi. ",
" Vântul doboară copacul cu rădacina mai slabă! ",
" Tot ce se bate de la obisnuit, deranjeaza. neobisnuitul e intotdeauna suspect pana ce lumea se familiarizeaza cu el, banalizandu-l ",
" Indrazneala omului superior o numim cutezanta, si o pretuim. indrazneala prostului o numim tupeu, si o dispretuim. dar cand aceasta din urma e incununata de succes, orice comentariu e de prisos: ramanem fara replica ",
" Un an dintr-o viata este o viata rezumata intr-un an. caci an dupa an repetam, o data cu aceleasi anotimpuri, aceleasi idei despre lume si viata. ",
" Cu cat suntem mai stersi si mai timizi, cu atat ne aratam mai trufasi, cu cat suntem mai vulnerabili, cu atat suntem iritabili, pentru ca nu ne-am putut afirma, pretindem ca am ales anonimatul. ",
" Indoiala e o moneda cu doua fete: cine o priveste trebuie sa le aiba in vedere pe amandoua. ",
"Cei ce intreaba primesc raspunsuri.",
"Tot ce ii doresti vecinului o sa se intoarca impotriva ta.",
"Varsta aduce expreienta si par carunt.",
"Pentru orice lucru de care te ocupi cu seriozitate o sa fii rasplatit.",
"Vrei să ştii ce îţi rezervă anul viitor? Mănâncă şi nu cerceta, oricum nu îl poţi schimba!",
" O sa fii surprins de ce ascunde in spatele unor masti.",
"Esti un master in pornirea problemelor.",
"Fa-ti timp pentru hobby-urile tale.",
"O discutie calma evita neintelegerile.",
"Ideile tale sunt nepretuite.",
"Dragostea incepe cu ochii.",
"Ti-ai caldit o casa,\n" +
"Bogata, frumoasa,\n" +
"Dar vezi ca-n ea sa fie\n" +
"Si ceva armonie"};
private FortuneCookie(String zicala) {
this.fortune = zicala;
}
public static FortuneCookie generate() {
FortuneCookie fortuneCookie = new FortuneCookie(zicale[random.nextInt(zicale.length)]);
return fortuneCookie;
}
@Override
public void enjoy() {
System.out.println("Felicitari, ai primit fortune cookie cu mesajul: " + this.fortune);
}
}
|
package joist.domain.orm.queries;
import java.util.ArrayList;
import java.util.List;
import joist.domain.DomainObject;
import joist.domain.exceptions.NotFoundException;
import joist.domain.exceptions.TooManyException;
import joist.domain.orm.mappers.DataTransferObjectMapper;
import joist.domain.orm.mappers.DomainObjectMapper;
import joist.domain.orm.mappers.IdsMapper;
import joist.domain.orm.mappers.ValuesMapper;
import joist.domain.orm.queries.columns.AliasColumn;
import joist.domain.orm.queries.columns.IdAliasColumn;
import joist.domain.uow.UoW;
import joist.jdbc.Jdbc;
import joist.jdbc.RowMapper;
import joist.util.Copy;
import joist.util.Join;
import joist.util.StringBuilderr;
import joist.util.Wrap;
public class Select<T extends DomainObject> {
public static <T extends DomainObject> Select<T> from(Alias<T> alias) {
return new Select<T>(alias);
}
private final Alias<T> from;
private final List<JoinClause<?, ?>> joins = new ArrayList<JoinClause<?, ?>>();
private final List<SelectItem> selectItems = new ArrayList<SelectItem>();
private Where where = null;
private Where having = null;
private Order[] orderBy = null;
private AliasColumn<?, ?, ?>[] groupBy = null;
private Integer limit;
private Integer offset;
private Select(Alias<T> alias) {
this.from = alias;
this.addInnerJoinsForBaseClasses();
for (AliasColumn<T, ?, ?> c : alias.getColumns()) {
this.selectItems.add(new SelectItem(c));
}
this.addOuterJoinsForSubClasses();
}
public Select<T> join(JoinClause<?, ?> join) {
return this.join(join, true);
}
public Select<T> select(SelectItem... selectItems) {
this.selectItems.clear();
this.selectItems.addAll(Copy.list(selectItems));
return this;
}
public Select<T> where(Where where) {
this.where = where;
return this;
}
public Select<T> having(Where where) {
this.having = where;
return this;
}
public Select<T> orderBy(Order... columns) {
this.orderBy = columns;
return this;
}
public Select<T> limit(Integer limit) {
this.limit = limit;
return this;
}
public Select<T> offset(Integer offset) {
this.offset = offset;
return this;
}
public List<T> list() {
return this.list(this.from.getDomainClass());
}
public PagedList<T> paged() {
return new PagedList<T>(this);
}
public T unique() {
return this.unique(this.from.getDomainClass());
}
public T uniqueOrNull() {
return this.uniqueOrNull(this.from.getDomainClass());
}
public <R> List<R> list(Class<R> rowType) {
final List<R> results = new ArrayList<R>();
RowMapper mapper = null;
if (this.isLoadingDomainObjects(rowType)) {
mapper = new DomainObjectMapper<T>(this.from, (List<T>) results);
} else {
mapper = new DataTransferObjectMapper<T, R>(this.selectItems, rowType, results);
}
Jdbc.query(UoW.getConnection(), this.toSql(), this.getParameters(), mapper);
return results;
}
public <R> List<R> listValues(Class<R> valueType) {
final List<R> results = new ArrayList<R>();
if (this.selectItems.size() != 1) {
throw new IllegalStateException("listValues expects to only query a single column");
}
Jdbc.query(//
UoW.getConnection(),
this.toSql(),
this.getParameters(),
new ValuesMapper<R>(this.selectItems.get(0), results));
return results;
}
public <R> R unique(Class<R> rowType) {
R result = this.uniqueOrNull(rowType);
if (result == null) {
throw new NotFoundException(rowType);
}
return result;
}
public <R> R uniqueOrNull(Class<R> rowType) {
List<R> results = this.list(rowType);
if (results.size() == 0) {
return null;
} else if (results.size() > 1) {
throw new TooManyException(rowType, results);
}
return results.get(0);
}
public <R> R uniqueValueOrNull(Class<R> rowType) {
List<R> results = this.listValues(rowType);
if (results.size() == 0) {
return null;
} else if (results.size() > 1) {
throw new TooManyException(rowType, results);
}
return results.get(0);
}
public List<Long> listIds() {
List<Long> ids = new ArrayList<Long>();
this.select(this.from.getIdColumn().as("id"));
this.orderBy(this.from.getIdColumn().asc()); // determinism
Jdbc.query(UoW.getConnection(), this.toSql(), this.getParameters(), new IdsMapper<T>(this.from, ids));
return ids;
}
public long count() {
// Make a copy countQuery so we can discard any select items and order bys and not mess up this query
Select<T> countQuery = Select.from(this.from);
countQuery.joins.addAll(this.joins.subList(countQuery.joins.size(), this.joins.size()));
countQuery.where = this.where;
countQuery.offset = this.offset;
countQuery.limit = this.limit;
countQuery.select(new SelectItem("count(distinct " + this.from.getIdColumn().getQualifiedName() + ")", "count"));
return countQuery.unique(Count.class).count;
}
public static class Count {
public Long count;
}
public Where getWhere() {
return this.where;
}
public Order[] getOrderBy() {
return this.orderBy;
}
public void groupBy(AliasColumn<?, ?, ?>... columns) {
this.groupBy = columns;
}
public SelectCountBuilder count = new SelectCountBuilder();
@Deprecated
/**
* Use joist.domain.orm.queries.columns.Aggregate.count() instead
*/
public class SelectCountBuilder {
public SelectItem as(String name) {
return new SelectItem("count(*)", name);
}
public Where lessThan(int number) {
return new Where("count(*) < ?", number);
}
public Where greatherThan(int number) {
return new Where("count(*) > ?", number);
}
}
public String toSql() {
StringBuilderr s = new StringBuilderr();
// since Select is only ever used to return entities (or ids/DTOS
// of entities), we don't want a join to return multiple rows of
// the parent entity for each child entity in the join.
if (this.joins.size() > 0) {
s.line("SELECT DISTINCT {}", Join.commaSpace(this.selectItems));
} else {
s.line("SELECT {}", Join.commaSpace(this.selectItems));
}
s.line(" FROM {} {}", Wrap.quotes(this.from.getTableName()), this.from.getName());
for (JoinClause<?, ?> join : this.joins) {
s.line(" {}", join);
}
if (this.getWhere() != null) {
s.line(" WHERE {}", this.getWhere());
}
if (this.groupBy != null) {
s.line(" GROUP BY {}", Join.comma(this.groupBy));
}
if (this.having != null) {
s.line(" HAVING {}", this.having);
}
if (this.getOrderBy() != null) {
s.line(" ORDER BY {}", Join.commaSpace(this.getOrderBy()));
}
if (this.limit != null) {
s.line(" LIMIT {}", this.limit);
}
if (this.offset != null) {
s.line(" OFFSET {}", this.offset);
}
return s.stripTrailingNewLine().toString();
}
public List<Object> getParameters() {
List<Object> params = new ArrayList<Object>();
if (this.getWhere() != null) {
params.addAll(this.getWhere().getParameters());
}
if (this.having != null) {
params.addAll(this.having.getParameters());
}
return params;
}
@SuppressWarnings("rawtypes")
private Select<T> join(JoinClause<?, ?> join, boolean addBaseAliases) {
this.joins.add(join);
if (join.getAlias() != null && addBaseAliases) {
Alias<?> base = join.getAlias().getBaseClassAlias();
while (base != null) {
IdAliasColumn<?> id = base.getSubClassIdColumn() == null ? base.getIdColumn() : base.getSubClassIdColumn();
this.join(new JoinClause(join.getType(), base, join.getAlias().getSubClassIdColumn(), id), false);
base = base.getBaseClassAlias();
}
}
return this;
}
@SuppressWarnings("rawtypes")
private void addOuterJoinsForSubClasses() {
int i = 0;
List<String> subClassCases = new ArrayList<String>();
for (Alias<?> sub : this.from.getSubClassAliases()) {
this.join(new JoinClause("LEFT OUTER JOIN", sub, this.from.getIdColumn(), sub.getSubClassIdColumn()), false);
for (AliasColumn<?, ?, ?> c : sub.getColumns()) {
this.selectItems.add(new SelectItem(c));
}
subClassCases.add(0, "WHEN " + sub.getSubClassIdColumn().getQualifiedName() + " IS NOT NULL THEN " + (i++));
}
if (i > 0) {
this.selectItems.add(new SelectItem("CASE " + Join.space(subClassCases) + " ELSE -1 END", "_clazz"));
}
}
@SuppressWarnings("rawtypes")
private void addInnerJoinsForBaseClasses() {
Alias<?> base = this.from.getBaseClassAlias();
while (base != null) {
List<SelectItem> selectItems = new ArrayList<SelectItem>();
IdAliasColumn<?> id = base.getSubClassIdColumn() == null ? base.getIdColumn() : base.getSubClassIdColumn();
this.join(new JoinClause("INNER JOIN", base, this.from.getSubClassIdColumn(), id), false);
for (AliasColumn<?, ?, ?> c : base.getColumns()) {
selectItems.add(new SelectItem(c));
}
base = base.getBaseClassAlias();
this.selectItems.addAll(0, selectItems);
}
}
private boolean isLoadingDomainObjects(Class<?> type) {
return this.from.getDomainClass().isAssignableFrom(type);
}
}
|
<gh_stars>0
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.sdb.test;
import org.apache.jena.sdb.SDBFactory ;
import org.apache.jena.sdb.Store ;
import org.apache.jena.sdb.StoreDesc ;
import org.apache.jena.sdb.layout2.hash.* ;
import org.apache.jena.sdb.layout2.index.* ;
import org.apache.jena.sdb.sql.JDBC ;
import org.apache.jena.sdb.sql.SDBConnection ;
import org.apache.jena.sdb.store.DatabaseType ;
import org.apache.jena.sdb.store.LayoutType ;
/**
* A cheap (but not cheerful) class to give access to empty models,
* sharing stores in the background.
*/
public class StoreCreator {
private static StoreTriplesNodesHashPGSQL sdbpgh;
private static StoreTriplesNodesIndexPGSQL sdbpgi;
private static StoreTriplesNodesHashMySQL sdbmsh;
private static StoreTriplesNodesIndexMySQL sdbmsi;
private static StoreTriplesNodesIndexSQLServer sdbssi;
private static StoreTriplesNodesHashSQLServer sdbssh;
private static StoreTriplesNodesIndexHSQL sdbhsi;
private static StoreTriplesNodesHashHSQL sdbhsh;
private static StoreTriplesNodesHashDerby sdbdh;
private static StoreTriplesNodesIndexDerby sdbdi;
private static StoreTriplesNodesHashOracle sdboh;
private static StoreTriplesNodesIndexOracle sdboi;
private static StoreTriplesNodesIndexDB2 sdbdb2i;
private static StoreTriplesNodesHashDB2 sdbdb2h;
private static StoreTriplesNodesHashSAP sdbsaph;
private static StoreTriplesNodesIndexSAP sdbsapi;
public static Store getIndexMySQL() {
if (sdbmsi == null) {
JDBC.loadDriverMySQL();
SDBConnection sdb = SDBFactory.createConnection(
// "sdb_test", "jena", "swara"
"jdbc:mysql://localhost/test2-index", "user", "password");
StoreDesc desc = new StoreDesc(LayoutType.LayoutTripleNodesIndex, DatabaseType.MySQL) ;
sdbmsi = new StoreTriplesNodesIndexMySQL(sdb, desc);
sdbmsi.getTableFormatter().create();
}
else
sdbmsi.getTableFormatter().truncate();
return sdbmsi;
}
public static Store getHashMySQL() {
if (sdbmsh == null) {
JDBC.loadDriverMySQL();
SDBConnection sdb = SDBFactory.createConnection(
"jdbc:mysql://localhost/test2-hash", "user", "password");
StoreDesc desc = new StoreDesc(LayoutType.LayoutTripleNodesHash, DatabaseType.MySQL) ;
sdbmsh = new StoreTriplesNodesHashMySQL(sdb, desc);
sdbmsh.getTableFormatter().create();
}
else
sdbmsh.getTableFormatter().truncate();
return sdbmsh;
}
public static Store getIndexHSQL() {
if (sdbhsi == null) {
JDBC.loadDriverHSQL();
SDBConnection sdb = SDBFactory.createConnection(
"jdbc:hsqldb:mem:aname", "sa", "");
StoreDesc desc = new StoreDesc(LayoutType.LayoutTripleNodesIndex, DatabaseType.HSQLDB) ;
sdbhsi = new StoreTriplesNodesIndexHSQL(sdb, desc);
sdbhsi.getTableFormatter().create();
}
else
sdbhsi.getTableFormatter().truncate();
return sdbhsi;
}
public static Store getHashHSQL() {
if (sdbhsh == null) {
JDBC.loadDriverHSQL();
SDBConnection sdb = SDBFactory.createConnection(
"jdbc:hsqldb:mem:bname", "sa", "");
StoreDesc desc = new StoreDesc(LayoutType.LayoutTripleNodesHash, DatabaseType.HSQLDB) ;
sdbhsh = new StoreTriplesNodesHashHSQL(sdb, desc);
sdbhsh.getTableFormatter().create();
}
else
sdbhsh.getTableFormatter().truncate();
return sdbhsh;
}
public static Store getIndexPgSQL() {
if (sdbpgi == null) {
JDBC.loadDriverPGSQL();
SDBConnection sdb = SDBFactory.createConnection(
"jdbc:postgresql://localhost/test2-index", "user", "password");
StoreDesc desc = new StoreDesc(LayoutType.LayoutTripleNodesIndex, DatabaseType.PostgreSQL) ;
sdbpgi = new StoreTriplesNodesIndexPGSQL(sdb, desc);
sdbpgi.getTableFormatter().create() ;
}
else
sdbpgi.getTableFormatter().truncate();
return sdbpgi;
}
public static Store getHashPgSQL() {
if (sdbpgh == null) {
JDBC.loadDriverPGSQL();
SDBConnection sdb = SDBFactory.createConnection(
"jdbc:postgresql://localhost/test2-hash", "user", "password");
StoreDesc desc = new StoreDesc(LayoutType.LayoutTripleNodesHash, DatabaseType.PostgreSQL) ;
sdbpgh = new StoreTriplesNodesHashPGSQL(sdb, desc);
sdbpgh.getTableFormatter().create();
}
else
sdbpgh.getTableFormatter().truncate();
return sdbpgh;
}
// MS SQL express : jdbc:sqlserver://localhost\\SQLEXPRESS;databaseName=sdbtest"
// user / password
// MS SQL server: jdbc:sqlserver://localhost;databaseName=SWEB
// "jena" / "@ld1s1774"
private static final String MSSQL_url = "jdbc:sqlserver://localhost\\SQLEXPRESS;databaseName=" ;
private static final String MSSQL_user = "user" ;
private static final String MSSQL_password = "password" ;
public static Store getIndexSQLServer() {
if (sdbssi == null) {
JDBC.loadDriverSQLServer();
SDBConnection sdb = SDBFactory.createConnection(MSSQL_url+"test2-index", MSSQL_user, MSSQL_password) ;
StoreDesc desc = new StoreDesc(LayoutType.LayoutTripleNodesIndex, DatabaseType.SQLServer) ;
sdbssi = new StoreTriplesNodesIndexSQLServer(sdb, desc);
sdbssi.getTableFormatter().create();
}
else
sdbssi.getTableFormatter().truncate();
return sdbssi;
}
public static Store getHashSQLServer() {
if (sdbssh == null) {
JDBC.loadDriverSQLServer();
SDBConnection sdb = SDBFactory.createConnection(MSSQL_url+"test2-hash", MSSQL_user, MSSQL_password) ;
StoreDesc desc = new StoreDesc(LayoutType.LayoutTripleNodesHash, DatabaseType.SQLServer) ;
sdbssh = new StoreTriplesNodesHashSQLServer(sdb, desc);
sdbssh.getTableFormatter().create();
}
else
sdbssh.getTableFormatter().truncate();
return sdbssh;
}
public static Store getHashDerby() {
if (sdbdh == null) {
JDBC.loadDriverDerby() ;
String url = JDBC.makeURL("derby", "localhost", "DB/test2-hash") ;
SDBConnection sdb = new SDBConnection(url, null, null) ;
StoreDesc desc = new StoreDesc(LayoutType.LayoutTripleNodesHash, DatabaseType.Derby) ;
sdbdh = new StoreTriplesNodesHashDerby(sdb, desc);
sdbdh.getTableFormatter().create();
}
else
sdbdh.getTableFormatter().truncate();
return sdbdh;
}
public static Store getIndexDerby() {
if (sdbdi == null) {
JDBC.loadDriverDerby() ;
String url = JDBC.makeURL("derby", "localhost", "DB/test2-index") ;
SDBConnection sdb = new SDBConnection(url, null, null) ;
StoreDesc desc = new StoreDesc(LayoutType.LayoutTripleNodesIndex, DatabaseType.Derby) ;
sdbdi = new StoreTriplesNodesIndexDerby(sdb, desc);
sdbdi.getTableFormatter().create();
}
else
sdbdi.getTableFormatter().truncate();
return sdbdi;
}
public static Store getHashOracle() {
if (sdboh == null) {
JDBC.loadDriverOracle() ;
// "jena", "swara"
String url = JDBC.makeURL("oracle:thin", "localhost:1521", "XE") ;
SDBConnection sdb = new SDBConnection(url, "test2-hash", "test2-hash") ;
StoreDesc desc = new StoreDesc(LayoutType.LayoutTripleNodesHash, DatabaseType.Oracle) ;
sdboh = new StoreTriplesNodesHashOracle(sdb, desc);
sdboh.getTableFormatter().create();
}
else
sdboh.getTableFormatter().truncate();
return sdboh;
}
public static Store getIndexOracle() {
if (sdboi == null) {
JDBC.loadDriverOracle() ;
String url = JDBC.makeURL("oracle:thin", "localhost:1521", "XE") ;
SDBConnection sdb = new SDBConnection(url, "test2-index", "test2-index") ;
StoreDesc desc = new StoreDesc(LayoutType.LayoutTripleNodesIndex, DatabaseType.Oracle) ;
sdboi = new StoreTriplesNodesIndexOracle(sdb, desc);
sdboi.getTableFormatter().create();
}
else
sdboi.getTableFormatter().truncate();
return sdboi;
}
public static Store getHashDB2() {
if (sdbdb2h == null) {
JDBC.loadDriverDB2() ;
String url = JDBC.makeURL("db2", "sweb-sdb-4:50000", "TEST2H") ;
SDBConnection sdb = new SDBConnection(url, "user", "password") ;
StoreDesc desc = new StoreDesc(LayoutType.LayoutTripleNodesHash, DatabaseType.DB2) ;
sdbdb2h = new StoreTriplesNodesHashDB2(sdb, desc);
sdbdb2h.getTableFormatter().create();
}
else
sdbdb2h.getTableFormatter().truncate();
return sdbdb2h;
}
public static Store getIndexDB2() {
if (sdbdb2i == null) {
JDBC.loadDriverDB2() ;
String url = JDBC.makeURL("db2", "sweb-sdb-4:50000", "TEST2I") ;
SDBConnection sdb = new SDBConnection(url, "user", "password") ;
StoreDesc desc = new StoreDesc(LayoutType.LayoutTripleNodesIndex, DatabaseType.DB2) ;
sdbdb2i = new StoreTriplesNodesIndexDB2(sdb, desc);
sdbdb2i.getTableFormatter().create();
}
sdbdb2i.getTableFormatter().truncate();
return sdbdb2i;
}
public static Store getIndexSAP() {
if (sdbsapi == null) {
JDBC.loadDriverSAP();
SDBConnection sdb = SDBFactory.createConnection(
"jdbc:sap://localhost/test2-index", "user", "password");
StoreDesc desc = new StoreDesc(LayoutType.LayoutTripleNodesIndex, DatabaseType.SAP) ;
sdbsapi = new StoreTriplesNodesIndexSAP(sdb, desc);
sdbsapi.getTableFormatter().create() ;
}
else
sdbsapi.getTableFormatter().truncate();
return sdbsapi;
}
public static Store getHashSAP() {
if (sdbsaph == null) {
JDBC.loadDriverSAP();
SDBConnection sdb = SDBFactory.createConnection(
"jdbc:sap://localhost/test2-hash", "user", "password");
StoreDesc desc = new StoreDesc(LayoutType.LayoutTripleNodesHash, DatabaseType.SAP) ;
sdbsaph = new StoreTriplesNodesHashSAP(sdb, desc);
sdbsaph.getTableFormatter().create();
}
else
sdbsaph.getTableFormatter().truncate();
return sdbsaph;
}
}
|
package com.lnquy.kafka.connect.source;
import org.apache.kafka.common.config.AbstractConfig;
import org.apache.kafka.common.config.ConfigDef;
import java.util.Map;
public class MySourceConfig extends AbstractConfig {
private static final String SOME_CONF = "source_config.some_conf";
private static final String SOME_CONF_DOC = "A dummy config which do nothing";
private static final String SOME_CONF_DEFAULT = "Something";
public MySourceConfig(ConfigDef config, Map<String, String> parsedConfig) {
super(config, parsedConfig);
}
public MySourceConfig(Map<String, String> parsedConfig) {
this(conf(), parsedConfig);
}
public static ConfigDef conf() {
return new ConfigDef()
.define(SOME_CONF, ConfigDef.Type.STRING, SOME_CONF_DEFAULT, ConfigDef.Importance.HIGH, SOME_CONF_DOC);
}
public String getSomeConf() {
return this.getString(SOME_CONF);
}
}
|
App . AddChild ( ' admin ' , {
EL : ' .admin ' ,
eventos : {
' Clic .project-admin-menú ' : " toggleAdminMenu " ,
},
toggleAdminMenu : función ( evento ) {
var enlace = $ ( evento . objetivo );
este . $ desplegable = enlace . progenitor (). próxima ( ' nav ' );
$ ( ' W - abierta ' ). No ( este . $ Desplegable ). RemoveClass ( ' w - abierta ' );
este . $ desplegable . toggleClass ( ' w - abierta ' );
volver falsa ;
},
});
|
def hex_to_dec(hex):
# convert the hexadecimal string to a decimal integer
dec_val = int(hex, 16)
return dec_val
# driver code
if __name__ == '__main__':
hex = '5F'
print("Decimal Equivalent of", hex, "is", hex_to_dec(hex))
# Output: Decimal Equivalent of 5F is 95
|
def maximumProfit(arr):
max_profit = 0
for i in range(len(arr)-1):
for j in range(i+1, len(arr)):
if arr[j] - arr[i] > max_profit:
max_profit = arr[j] - arr[i]
return max_profit
prices = [90, 80, 70, 100, 85]
print(maximumProfit(prices))
|
package nusmv_counterexample_visualizer.formula.arithmetic;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Created by buzhinsky on 11/20/17.
*/
public class Variable extends ArithmeticExpression {
public Variable(String name) {
super(name);
}
@Override
public Object calculate(Map<String, List<String>> values, int position) {
if (!values.containsKey(name)) {
throw new RuntimeException("Missing variable name: " + name
+ ". One possible cause may be that this variable is present in an LTL formula"
+ " but not in the corresponding counterexample. The list of all variables is: "
+ values.keySet());
}
return new Constant(values.get(name).get(position)).calculate(values, position);
}
@Override
public String toString() {
return name;
}
@Override
public Set<String> variableSet() {
return Collections.singleton(name);
}
}
|
import sys
import os
class suppress_output:
def __enter__(self):
"""Redirects the standard output to a file"""
self.original_stdout = sys.stdout
self.null_output = open(os.devnull, 'w')
sys.stdout = self.null_output
def __exit__(self, exc_type, exc_value, traceback):
"""Restores the standard output to its original state"""
sys.stdout = self.original_stdout
self.null_output.close()
return False # Propagate any exceptions
# Example usage:
with suppress_output():
print("This will not be displayed on the terminal")
# Any standard output inside the context will be suppressed
print("This will be displayed on the terminal")
# Standard output is restored to its original state outside the context
|
#!/bin/bash
PARAMS=('-m 6 -q 70 -mt -af -progress')
if [ $# -ne 0 ]; then
PARAMS=$@;
fi
cd $(pwd)
shopt -s nullglob nocaseglob extglob
for FILE in static/img/avatars/{andrea,dave,bambam,elke,fabrizio,iris,marco,mattia,max,pascal,paul,serge,urban,warwara}.png; do
cwebp $PARAMS "$FILE" -o "${FILE%.*}".webp;
convert -resize 144x "$FILE" -quality 70 "${FILE%.*}"-144.png;
convert -resize 255x "$FILE" -quality 70 "${FILE%.*}"-255.png;
done
for FILE in static/img/office/{bar.png,gym.png,office_view.jpg,teamcall.png,triarc_lego.jpeg,triarc-flow.png}; do
cwebp $PARAMS "$FILE" -o "${FILE%.*}".webp;
cwebp $PARAMS -resize 320 0 "$FILE" -o "${FILE%.*}"-320.webp;
cwebp $PARAMS -resize 640 0 "$FILE" -o "${FILE%.*}"-640.webp;
cwebp $PARAMS -resize 1080 0 "$FILE" -o "${FILE%.*}"-1080.webp;
convert -resize 320x "$FILE" -quality 70 "${FILE%.*}"-320.jpg;
convert -resize 640x "$FILE" -quality 70 "${FILE%.*}"-640.jpg;
convert -resize 1080x "$FILE" -quality 70 "${FILE%.*}"-1080.jpg;
done
|
def closest(arr, num):
closest_val = None
for i in arr:
diff = abs(i - num)
if closest_val is None or diff < closest_val:
closest_val = i
return closest_val
closest(arr, num) # returns 15
|
<gh_stars>0
/**
* Licensed to Jasig under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Jasig licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.ssp.util.importer.job.csv;
import java.io.IOException;
import java.io.Writer;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.item.file.FlatFileHeaderCallback;
import org.springframework.batch.item.file.transform.DelimitedLineTokenizer;
public class RawItemFlatFileHeaderCallback implements FlatFileHeaderCallback {
private static final Logger logger = LoggerFactory.getLogger(RawItemFlatFileHeaderCallback.class);
String[] columnNames;
private String delimiter = DelimitedLineTokenizer.DELIMITER_COMMA;
public RawItemFlatFileHeaderCallback() {
super();
}
@Override
public void writeHeader(Writer writer) throws IOException {
StringBuffer header = new StringBuffer();
if(columnNames == null){
logger.error("Column names not found");
throw new IOException("Unable to write table, column names not found");
}
for(String columnName:columnNames){
header.append(columnName).append(delimiter);
}
writer.write(StringUtils.chop(header.toString()));
}
public void setColumnNames(String[] columnNames) {
this.columnNames = columnNames;
}
public void setDelimiter(String delimiter){
this.delimiter = delimiter;
}
}
|
class JSONRequest: HTTPRequest {
override init() {
super.init()
headers["Content-Type"] = "application/json"
}
}
|
package com.wpisen.trace.server.dao.entity;
import java.util.Date;
public class ClientProjectBinding {
private Integer bindId;
private Integer clientId;
private Integer proId;
private String platform;
private Date createTime;
private Date lastUpdateTime;
private Boolean disable;
public Integer getBindId() {
return bindId;
}
public void setBindId(Integer bindId) {
this.bindId = bindId;
}
public Integer getClientId() {
return clientId;
}
public void setClientId(Integer clientId) {
this.clientId = clientId;
}
public Integer getProId() {
return proId;
}
public void setProId(Integer proId) {
this.proId = proId;
}
public String getPlatform() {
return platform;
}
public void setPlatform(String platform) {
this.platform = platform == null ? null : platform.trim();
}
public Date getCreateTime() {
return createTime;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
public Date getLastUpdateTime() {
return lastUpdateTime;
}
public void setLastUpdateTime(Date lastUpdateTime) {
this.lastUpdateTime = lastUpdateTime;
}
public Boolean getDisable() {
return disable;
}
public void setDisable(Boolean disable) {
this.disable = disable;
}
}
|
<gh_stars>0
define(["require", "exports", "react", "ting"], function (require, exports, React, ting_1) {
"use strict";
var btnStyle = { marginRight: "10px" };
return function () { return React.createElement("article", null,
React.createElement("h2", null, "\u5747\u5300\u5206\u914D"),
React.createElement(ting_1.Row, null,
React.createElement(ting_1.Col, null,
React.createElement("div", { className: "bg-info" }, "1")),
React.createElement(ting_1.Col, null,
React.createElement("div", { className: "bg-warn" }, "2")),
React.createElement(ting_1.Col, null,
React.createElement("div", { className: "bg-info" }, "3")),
React.createElement(ting_1.Col, null,
React.createElement("div", { className: "bg-warn" }, "4"))),
React.createElement("h2", null, "\u5E26\u95F4\u9699\u5747\u5300\u5206\u914D"),
React.createElement(ting_1.Row, { gutter: 20 },
React.createElement(ting_1.Col, null,
React.createElement("div", { className: "bg-primary" }, "1")),
React.createElement(ting_1.Col, null,
React.createElement("div", { className: "bg-success" }, "2")),
React.createElement(ting_1.Col, null,
React.createElement("div", { className: "bg-primary" }, "3")),
React.createElement(ting_1.Col, { span: 2 },
React.createElement("div", { className: "bg-success" }, "4"))),
React.createElement(ting_1.Row, { gutter: 20 },
React.createElement(ting_1.Col, null,
React.createElement("div", { className: "bg-primary" }, "1")),
React.createElement(ting_1.Col, null,
React.createElement("div", { className: "bg-success" }, "2")),
React.createElement(ting_1.Col, null,
React.createElement("div", { className: "bg-primary" }, "3")),
React.createElement(ting_1.Col, null,
React.createElement("div", { className: "bg-success" }, "4")),
React.createElement(ting_1.Col, null,
React.createElement("div", { className: "bg-primary" }, "5"))),
React.createElement("h2", null, "12\u7B49\u5206"),
React.createElement(ting_1.Row, { cols: 12 },
React.createElement(ting_1.Col, { span: 3 },
React.createElement("div", { className: "bg-primary" }, "1")),
React.createElement(ting_1.Col, { span: 3 },
React.createElement("div", { className: "bg-success" }, "2")),
React.createElement(ting_1.Col, { span: 3 },
React.createElement("div", { className: "bg-primary" }, "3")),
React.createElement(ting_1.Col, { span: 3 },
React.createElement("div", { className: "bg-success" }, "4"))),
React.createElement(ting_1.Row, { cols: 12 },
React.createElement(ting_1.Col, { span: 3 },
React.createElement("div", { className: "bg-primary" }, "1")),
React.createElement(ting_1.Col, { span: 3 },
React.createElement("div", { className: "bg-success" }, "2")),
React.createElement(ting_1.Col, { span: 6 },
React.createElement("div", { className: "bg-primary" }, "3"))),
React.createElement("h2", null, "\u5E26\u95F4\u969912\u7B49\u5206"),
React.createElement(ting_1.Row, { cols: 12, gutter: 20 },
React.createElement(ting_1.Col, { span: 3 },
React.createElement("div", { className: "bg-primary box-mbxs" }, "1")),
React.createElement(ting_1.Col, { span: 3 },
React.createElement("div", { className: "bg-success box-mbxs" }, "2")),
React.createElement(ting_1.Col, { span: 3 },
React.createElement("div", { className: "bg-primary box-mbxs" }, "3")),
React.createElement(ting_1.Col, { span: 3 },
React.createElement("div", { className: "bg-success box-mbxs" }, "4"))),
React.createElement(ting_1.Row, { cols: 12, gutter: 20 },
React.createElement(ting_1.Col, { span: 3 },
React.createElement("div", { className: "bg-primary" }, "1")),
React.createElement(ting_1.Col, { span: 3 },
React.createElement("div", { className: "bg-success" }, "2")),
React.createElement(ting_1.Col, { span: 6 },
React.createElement("div", { className: "bg-primary" }, "3"))),
React.createElement("h2", null, "\u8D85\u51FA\u81EA\u52A8\u6362\u884C"),
React.createElement(ting_1.Row, { cols: 12, gutter: 20 },
React.createElement(ting_1.Col, { span: 6 },
React.createElement("div", { className: "bg-primary box-mbxs" },
"1",
React.createElement("br", null),
"1")),
React.createElement(ting_1.Col, { span: 6 },
React.createElement("div", { className: "bg-success box-mbxs" }, "2")),
React.createElement(ting_1.Col, { span: 6 },
React.createElement("div", { className: "bg-primary box-mbxs" }, "3")),
React.createElement(ting_1.Col, { span: 6 },
React.createElement("div", { className: "bg-success box-mbxs" }, "4")),
React.createElement(ting_1.Col, { span: 6 },
React.createElement("div", { className: "bg-primary box-mbxs" }, "5")),
React.createElement(ting_1.Col, { span: 6 },
React.createElement("div", { className: "bg-success box-mbxs" }, "6")),
React.createElement(ting_1.Col, { span: 6 },
React.createElement("div", { className: "bg-primary box-mbxs" }, "7")),
React.createElement(ting_1.Col, { span: 6 },
React.createElement("div", { className: "bg-success box-mbxs" }, "8")))); };
});
|
<reponame>davidguttman/dynamodown<filename>example/index.js
var DynamoDOWN = require('../')
var levelup = require('levelup')
var db = levelup('table_name', {
db: DynamoDOWN,
// required AWS config
dynamo: {
// Capacity can be specified, these are the defaults:
ProvisionedThroughput: {
ReadCapacityUnits: 1,
WriteCapacityUnits: 1
},
region: 'us-east-1',
secretAccessKey: 'abc',
accessKeyId: '123',
httpOptions: {
proxy: 'http://localhost:8000'
}
}
})
db.batch([
{type: 'put', key: 'name', value: '<NAME>'},
{type: 'put', key: 'dob', value: '16 February 1941'},
{type: 'put', key: 'spouse', value: '<NAME>'},
{type: 'put', key: 'occupation', value: 'Clown'}
], function(err) {
db.createReadStream()
.on('data', console.log)
.on('close', function () {
console.log("Show's over folks!")
})
})
|
<gh_stars>0
package sunset.gitcore.database;
public class NotSupportedException extends Exception {
private static final long serialVersionUID = -6174099601531724695L;
public NotSupportedException(String message) {
super(message);
}
}
|
sudo docker build -t tf_colmap:latest .
sudo nvidia-docker run -it --rm -p 9999:8888 --volume /:/host --workdir /host$PWD tf_colmap bash
|
import ModuleFileServer from '../../server/modules/File/ModuleFileServer';
import IVendorGeneratorOptions from './IVendorGeneratorOptions';
export default class VendorBuilder {
public static getInstance(): VendorBuilder {
if (!VendorBuilder.instance) {
VendorBuilder.instance = new VendorBuilder();
}
return VendorBuilder.instance;
}
protected static instance: VendorBuilder = null;
private constructor() { }
/* istanbul ignore next: really difficult test depending on files */
public async generate_vendor() {
if (process.env.IGNORE_VENDOR_COMPILATION == 'true') {
return;
}
let vendor_path = './vendor/vendor.js';
let vendor_generator_options: IVendorGeneratorOptions = require(process.cwd() + '/vendor/vendor_generator_options.json');
let vendor_content = this.get_vendor_file_content(vendor_generator_options);
await ModuleFileServer.getInstance().writeFile(vendor_path, vendor_content);
}
public get_vendor_file_content(vendor_generator_options: IVendorGeneratorOptions): string {
if (!vendor_generator_options) {
return '';
}
let vendor_content = '';
let vendor_deps: string[] = vendor_generator_options.addins;
for (let i in vendor_deps) {
let dep_name = vendor_deps[i];
if (dep_name.toLowerCase() == 'oswedev') {
continue;
}
vendor_content += "require('" + dep_name + "');\n";
}
return vendor_content;
}
}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.socialForrst = void 0;
var socialForrst = {
"viewBox": "0 0 512 512",
"children": [{
"name": "path",
"attribs": {
"d": "M256,0C114.609,0,0,114.609,0,256s114.609,256,256,256s256-114.609,256-256S397.391,0,256,0z M256,472\r\n\tc-119.297,0-216-96.703-216-216S136.703,40,256,40s216,96.703,216,216S375.297,472,256,472z"
},
"children": []
}, {
"name": "path",
"attribs": {
"fill-rule": "evenodd",
"clip-rule": "evenodd",
"d": "M367.531,375.812L260.578,131.5c-0.891-2.125-2.891-3.5-5.109-3.5l0,0\r\n\tc-2.219,0-4.219,1.375-5.109,3.5L144.469,375.797c-0.75,1.828-0.594,3.922,0.438,5.578c1.031,1.625,2.781,2.625,4.656,2.625H240v-48\r\n\tl-32.016-16v-16L240,320v-48h32v32l32.016-16v16L272,320v16l48.016-32v16L272,352v32h90.438c1.875,0,3.625-0.969,4.656-2.625\r\n\tS368.281,377.625,367.531,375.812z"
},
"children": []
}]
};
exports.socialForrst = socialForrst;
|
def fibonacci(n):
a = 0
b = 1
series = [0, 1]
for i in range(2, n):
c = a + b
series.append(c)
a = b
b = c
print(fibonacci(10)) # Output: [0, 1, 1, 2, 3, 5, 8, 13, 21, 34]
|
<reponame>TheSeven/refinedstorage<filename>src/main/java/refinedstorage/container/ContainerDetector.java
package refinedstorage.container;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.Slot;
import net.minecraft.item.ItemStack;
import refinedstorage.container.slot.SlotSpecimenType;
import refinedstorage.tile.TileDetector;
public class ContainerDetector extends ContainerBase {
public ContainerDetector(TileDetector detector, EntityPlayer player) {
super(detector, player);
addSlotToContainer(new SlotSpecimenType(detector, 0, 107, 20));
addPlayerInventory(8, 55);
}
@Override
public ItemStack transferStackInSlot(EntityPlayer player, int index) {
Slot slot = getSlot(index);
if (slot != null && slot.getHasStack() && index > 0) {
return mergeItemStackToSpecimen(slot.getStack(), 0, 1);
}
return null;
}
}
|
# Use a convolutional neural network to create an AI algorithm to predict whether a given ECG signal is normal or abnormal
# Step 1: Prepare and load the ECG data
# Step 2: Pre-process the data by segmenting and normalizing the signal
# Step 3: Design the Convolutional Neural Network
# Step 4: Train and evaluate the model
# Step 5: Deploy the model for prediction
|
// Code generated by counterfeiter. DO NOT EDIT.
package fakes
import (
"sync"
"github.com/pivotal-cf/om/api"
)
type BoshDiffService struct {
DirectorDiffStub func() (api.DirectorDiff, error)
directorDiffMutex sync.RWMutex
directorDiffArgsForCall []struct {
}
directorDiffReturns struct {
result1 api.DirectorDiff
result2 error
}
directorDiffReturnsOnCall map[int]struct {
result1 api.DirectorDiff
result2 error
}
ListStagedProductsStub func() (api.StagedProductsOutput, error)
listStagedProductsMutex sync.RWMutex
listStagedProductsArgsForCall []struct {
}
listStagedProductsReturns struct {
result1 api.StagedProductsOutput
result2 error
}
listStagedProductsReturnsOnCall map[int]struct {
result1 api.StagedProductsOutput
result2 error
}
ProductDiffStub func(string) (api.ProductDiff, error)
productDiffMutex sync.RWMutex
productDiffArgsForCall []struct {
arg1 string
}
productDiffReturns struct {
result1 api.ProductDiff
result2 error
}
productDiffReturnsOnCall map[int]struct {
result1 api.ProductDiff
result2 error
}
invocations map[string][][]interface{}
invocationsMutex sync.RWMutex
}
func (fake *BoshDiffService) DirectorDiff() (api.DirectorDiff, error) {
fake.directorDiffMutex.Lock()
ret, specificReturn := fake.directorDiffReturnsOnCall[len(fake.directorDiffArgsForCall)]
fake.directorDiffArgsForCall = append(fake.directorDiffArgsForCall, struct {
}{})
fake.recordInvocation("DirectorDiff", []interface{}{})
fake.directorDiffMutex.Unlock()
if fake.DirectorDiffStub != nil {
return fake.DirectorDiffStub()
}
if specificReturn {
return ret.result1, ret.result2
}
fakeReturns := fake.directorDiffReturns
return fakeReturns.result1, fakeReturns.result2
}
func (fake *BoshDiffService) DirectorDiffCallCount() int {
fake.directorDiffMutex.RLock()
defer fake.directorDiffMutex.RUnlock()
return len(fake.directorDiffArgsForCall)
}
func (fake *BoshDiffService) DirectorDiffCalls(stub func() (api.DirectorDiff, error)) {
fake.directorDiffMutex.Lock()
defer fake.directorDiffMutex.Unlock()
fake.DirectorDiffStub = stub
}
func (fake *BoshDiffService) DirectorDiffReturns(result1 api.DirectorDiff, result2 error) {
fake.directorDiffMutex.Lock()
defer fake.directorDiffMutex.Unlock()
fake.DirectorDiffStub = nil
fake.directorDiffReturns = struct {
result1 api.DirectorDiff
result2 error
}{result1, result2}
}
func (fake *BoshDiffService) DirectorDiffReturnsOnCall(i int, result1 api.DirectorDiff, result2 error) {
fake.directorDiffMutex.Lock()
defer fake.directorDiffMutex.Unlock()
fake.DirectorDiffStub = nil
if fake.directorDiffReturnsOnCall == nil {
fake.directorDiffReturnsOnCall = make(map[int]struct {
result1 api.DirectorDiff
result2 error
})
}
fake.directorDiffReturnsOnCall[i] = struct {
result1 api.DirectorDiff
result2 error
}{result1, result2}
}
func (fake *BoshDiffService) ListStagedProducts() (api.StagedProductsOutput, error) {
fake.listStagedProductsMutex.Lock()
ret, specificReturn := fake.listStagedProductsReturnsOnCall[len(fake.listStagedProductsArgsForCall)]
fake.listStagedProductsArgsForCall = append(fake.listStagedProductsArgsForCall, struct {
}{})
fake.recordInvocation("ListStagedProducts", []interface{}{})
fake.listStagedProductsMutex.Unlock()
if fake.ListStagedProductsStub != nil {
return fake.ListStagedProductsStub()
}
if specificReturn {
return ret.result1, ret.result2
}
fakeReturns := fake.listStagedProductsReturns
return fakeReturns.result1, fakeReturns.result2
}
func (fake *BoshDiffService) ListStagedProductsCallCount() int {
fake.listStagedProductsMutex.RLock()
defer fake.listStagedProductsMutex.RUnlock()
return len(fake.listStagedProductsArgsForCall)
}
func (fake *BoshDiffService) ListStagedProductsCalls(stub func() (api.StagedProductsOutput, error)) {
fake.listStagedProductsMutex.Lock()
defer fake.listStagedProductsMutex.Unlock()
fake.ListStagedProductsStub = stub
}
func (fake *BoshDiffService) ListStagedProductsReturns(result1 api.StagedProductsOutput, result2 error) {
fake.listStagedProductsMutex.Lock()
defer fake.listStagedProductsMutex.Unlock()
fake.ListStagedProductsStub = nil
fake.listStagedProductsReturns = struct {
result1 api.StagedProductsOutput
result2 error
}{result1, result2}
}
func (fake *BoshDiffService) ListStagedProductsReturnsOnCall(i int, result1 api.StagedProductsOutput, result2 error) {
fake.listStagedProductsMutex.Lock()
defer fake.listStagedProductsMutex.Unlock()
fake.ListStagedProductsStub = nil
if fake.listStagedProductsReturnsOnCall == nil {
fake.listStagedProductsReturnsOnCall = make(map[int]struct {
result1 api.StagedProductsOutput
result2 error
})
}
fake.listStagedProductsReturnsOnCall[i] = struct {
result1 api.StagedProductsOutput
result2 error
}{result1, result2}
}
func (fake *BoshDiffService) ProductDiff(arg1 string) (api.ProductDiff, error) {
fake.productDiffMutex.Lock()
ret, specificReturn := fake.productDiffReturnsOnCall[len(fake.productDiffArgsForCall)]
fake.productDiffArgsForCall = append(fake.productDiffArgsForCall, struct {
arg1 string
}{arg1})
fake.recordInvocation("ProductDiff", []interface{}{arg1})
fake.productDiffMutex.Unlock()
if fake.ProductDiffStub != nil {
return fake.ProductDiffStub(arg1)
}
if specificReturn {
return ret.result1, ret.result2
}
fakeReturns := fake.productDiffReturns
return fakeReturns.result1, fakeReturns.result2
}
func (fake *BoshDiffService) ProductDiffCallCount() int {
fake.productDiffMutex.RLock()
defer fake.productDiffMutex.RUnlock()
return len(fake.productDiffArgsForCall)
}
func (fake *BoshDiffService) ProductDiffCalls(stub func(string) (api.ProductDiff, error)) {
fake.productDiffMutex.Lock()
defer fake.productDiffMutex.Unlock()
fake.ProductDiffStub = stub
}
func (fake *BoshDiffService) ProductDiffArgsForCall(i int) string {
fake.productDiffMutex.RLock()
defer fake.productDiffMutex.RUnlock()
argsForCall := fake.productDiffArgsForCall[i]
return argsForCall.arg1
}
func (fake *BoshDiffService) ProductDiffReturns(result1 api.ProductDiff, result2 error) {
fake.productDiffMutex.Lock()
defer fake.productDiffMutex.Unlock()
fake.ProductDiffStub = nil
fake.productDiffReturns = struct {
result1 api.ProductDiff
result2 error
}{result1, result2}
}
func (fake *BoshDiffService) ProductDiffReturnsOnCall(i int, result1 api.ProductDiff, result2 error) {
fake.productDiffMutex.Lock()
defer fake.productDiffMutex.Unlock()
fake.ProductDiffStub = nil
if fake.productDiffReturnsOnCall == nil {
fake.productDiffReturnsOnCall = make(map[int]struct {
result1 api.ProductDiff
result2 error
})
}
fake.productDiffReturnsOnCall[i] = struct {
result1 api.ProductDiff
result2 error
}{result1, result2}
}
func (fake *BoshDiffService) Invocations() map[string][][]interface{} {
fake.invocationsMutex.RLock()
defer fake.invocationsMutex.RUnlock()
fake.directorDiffMutex.RLock()
defer fake.directorDiffMutex.RUnlock()
fake.listStagedProductsMutex.RLock()
defer fake.listStagedProductsMutex.RUnlock()
fake.productDiffMutex.RLock()
defer fake.productDiffMutex.RUnlock()
copiedInvocations := map[string][][]interface{}{}
for key, value := range fake.invocations {
copiedInvocations[key] = value
}
return copiedInvocations
}
func (fake *BoshDiffService) recordInvocation(key string, args []interface{}) {
fake.invocationsMutex.Lock()
defer fake.invocationsMutex.Unlock()
if fake.invocations == nil {
fake.invocations = map[string][][]interface{}{}
}
if fake.invocations[key] == nil {
fake.invocations[key] = [][]interface{}{}
}
fake.invocations[key] = append(fake.invocations[key], args)
}
|
/*
* Copyright 2009-2012 The MyBatis Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibatis.sqlmap;
import com.ibatis.common.resources.Resources;
import java.sql.CallableStatement;
import java.sql.Connection;
import java.sql.ResultSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class MultiResultSetTest extends BaseSqlMapTest {
// SETUP & TEARDOWN
protected void setUp() throws Exception {
initSqlMap("com/ibatis/sqlmap/maps/DerbySqlMapConfig.xml", Resources.getResourceAsProperties("com/ibatis/sqlmap/maps/DerbySqlMapConfig.properties"));
initScript("com/scripts/account-init.sql");
initScript("com/scripts/derby-proc-init.sql");
}
public void testShouldRetrieveTwoSetsOfTwoAccountsFromMultipleResultMaps() throws Exception {
Map persons = new HashMap();
persons.put("1", new Integer(1));
persons.put("2", new Integer(2));
persons.put("3", new Integer(3));
persons.put("4", new Integer(4));
List results = sqlMap.queryForList("getMultiListsRm", persons);
assertEquals(2, results.size());
assertEquals(2, ((List) results.get(0)).size());
assertEquals(2, ((List) results.get(1)).size());
}
public void testShouldRetrieveTwoSetsOfTwoAccountsFromMultipleResultClasses() throws Exception {
Map persons = new HashMap();
persons.put("1", new Integer(1));
persons.put("2", new Integer(2));
persons.put("3", new Integer(3));
persons.put("4", new Integer(4));
List results = sqlMap.queryForList("getMultiListsRc", persons);
assertEquals(2, results.size());
assertEquals(2, ((List) results.get(0)).size());
assertEquals(2, ((List) results.get(1)).size());
}
public void testCallableStatementShouldReturnTwoResultSets() throws Exception {
sqlMap.startTransaction();
Connection conn = sqlMap.getCurrentConnection();
CallableStatement cs = conn.prepareCall("{call MRESULTSET(?,?,?,?)}");
cs.setInt(1, 1);
cs.setInt(2, 2);
cs.setInt(3, 3);
cs.setInt(4, 4);
cs.execute();
ResultSet rs = cs.getResultSet();
assertNotNull(rs);
int found = 1;
while (cs.getMoreResults()) {
assertNotNull(cs.getResultSet());
found++;
}
rs.close();
cs.close();
assertEquals("Didn't find second result set.", 2, found);
}
}
|
<reponame>geeekblog/blog
package models
import "time"
type Artical struct {
ID string //文章ID:uuid
UserID string //用户ID: uuid
Title string //文章标题
Content string //文章内容
Tags []int //文章tag数组
Pics []string //文章图片数组
CreateTime time.Time //第一次创建时间
UpdateTime time.Time //最后一次修改的时间
Category BlogCategory //文章分类
}
|
<filename>src/create.js
var Component = require("./Component")
var hook = require("./hook")
module.exports = component
function component (name, root, options) {
// component("string"[, {}])
if (!(root instanceof Element)) {
options = root
root = null
}
var element = hook.findComponent(name, root)
return Component.create(name, element, options)
}
component.all = function (name, root, options) {
// component("string"[, {}])
if (!(root instanceof Element)) {
options = root
root = null
}
// component("string"[, Element])
var elements = hook.findAllComponents(name, root)
return [].map.call(elements, function (element) {
return Component.create(name, element, options)
})
}
|
<reponame>IonutMorariu/speakup-api<filename>controllers/chatController.js
const mongoose = require('mongoose');
const User = mongoose.model('User');
const Chat = mongoose.model('Chat');
const Message = mongoose.model('Message');
exports.startChat = async (req, res) => {
//TODO Check existing chats
const user = await User.findOne({ _id: req.body.user._id });
if (!user) {
res.status(404).send('User not found');
return;
}
const matchingUsers = await User.find({ native_language: user.learning_language }).select('-session_token');
const checkedUsers = [];
while (checkedUsers.length < matchingUsers.length) {
const randNumber = Math.floor(Math.random() * matchingUsers.length);
const randUser = matchingUsers[randNumber];
const chat = await Chat.findOne({
$or: [{ user_1: randUser._id, user_2: user._id }, { user_1: user._id, user_2: randUser._id }]
});
if (chat) {
checkedUsers.push(randUser);
} else {
const newChat = await new Chat({ user_1: user._id, user_2: randUser._id }).save();
res.json(newChat);
break;
}
}
if (checkedUsers.length == matchingUsers.length) {
res.status(404).send('No user match found');
}
};
exports.getChats = async (req, res) => {
const userId = req.body.user._id; //<- Comes from checkSession middleware
const chats = await Chat.find({ $or: [{ user_1: userId }, { user_2: userId }] })
.populate('user_1', '-session_token')
.populate('user_2', '-session_token');
if (!chats) {
res.status(404).send('No chats found');
return;
}
res.send(chats);
};
exports.getMessages = async (req, res) => {
const messages = await Message.find({ chat: req.query.chat }).populate('user', '-session_token');
res.json(messages);
};
exports.createMessage = async (req, res) => {
const message = await new Message({
text: req.body.text,
user: req.body.user._id,
chat: req.body.chat,
createdAt: req.body.createdAt
}).save();
res.json(message);
};
exports.removeChat = async (req, res) => {
const chat = await Chat.findOneAndRemove({ _id: req.body.id });
if (!chat) {
res.status(404).send('Chat not found');
return;
}
res.status(200).send('Chat delete successfully');
};
|
package com.java110.things.service.user;
import com.java110.things.entity.accessControl.HeartbeatTaskDto;
import com.java110.things.entity.accessControl.UserFaceDto;
import com.java110.things.entity.machine.MachineDto;
import com.java110.things.entity.response.ResultDto;
/**
* @ClassName IUserService
* @Description TODO
* @Author wuxw
* @Date 2020/5/14 14:48
* @Version 1.0
* add by wuxw 2020/5/14
**/
public interface IUserFaceService {
/**
* 保存用户信息
*
* @param userFaceDto 用户人脸信息
* @return
* @throws Exception
*/
public ResultDto saveUserFace(MachineDto machineDto, UserFaceDto userFaceDto) throws Exception;
public ResultDto updateUserFace(MachineDto machineDto, UserFaceDto userFaceDto) throws Exception;
public ResultDto deleteUserFace(MachineDto machineDto, HeartbeatTaskDto heartbeatTaskDto) throws Exception;
/**
* 清空用户信息
*
* @param machineDto
* @return
* @throws Exception
*/
public ResultDto clearUserFace(MachineDto machineDto) throws Exception;
}
|
<filename>lib/bosh_release_diff/commands/ui.rb
module BoshReleaseDiff::Commands
class Ui
def initialize(bosh); @bosh = bosh; end
def say(*args); @bosh.say(*args); end
def nl(*args); @bosh.nl(*args); end
end
end
|
/*
*
* Copyright (c) 2004
* <NAME>
*
* Use, modification and distribution are subject to the
* Boost Software License, Version 1.0. (See accompanying file
* LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
*
*/
/*
* LOCATION: see http://www.boost.org for most recent version.
* FILE c_regex_traits.hpp
* VERSION see <boost/version.hpp>
* DESCRIPTION: Declares regular expression traits class that wraps the global C locale.
*/
#ifndef BOOST_C_REGEX_TRAITS_HPP_INCLUDED
#define BOOST_C_REGEX_TRAITS_HPP_INCLUDED
#ifndef BOOST_REGEX_CONFIG_HPP
#include <boost/regex/config.hpp>
#endif
#ifndef BOOST_REGEX_WORKAROUND_HPP
#include <boost/regex/v4/regex_workaround.hpp>
#endif
#include <cctype>
#ifdef BOOST_NO_STDC_NAMESPACE
namespace std{
using ::strlen; using ::tolower;
}
#endif
#ifdef BOOST_HAS_ABI_HEADERS
# include BOOST_ABI_PREFIX
#endif
namespace boost{
template <class charT>
struct c_regex_traits;
template<>
struct BOOST_REGEX_DECL c_regex_traits<char>
{
c_regex_traits(){}
typedef char char_type;
typedef std::size_t size_type;
typedef std::string string_type;
struct locale_type{};
typedef boost::uint32_t char_class_type;
static size_type length(const char_type* p)
{
return (std::strlen)(p);
}
char translate(char c) const
{
return c;
}
char translate_nocase(char c) const
{
return static_cast<char>((std::tolower)(static_cast<unsigned char>(c)));
}
static string_type BOOST_REGEX_CALL transform(const char* p1, const char* p2);
static string_type BOOST_REGEX_CALL transform_primary(const char* p1, const char* p2);
static char_class_type BOOST_REGEX_CALL lookup_classname(const char* p1, const char* p2);
static string_type BOOST_REGEX_CALL lookup_collatename(const char* p1, const char* p2);
static bool BOOST_REGEX_CALL isctype(char, char_class_type);
static int BOOST_REGEX_CALL value(char, int);
locale_type imbue(locale_type l)
{ return l; }
locale_type getloc()const
{ return locale_type(); }
private:
// this type is not copyable:
c_regex_traits(const c_regex_traits&);
c_regex_traits& operator=(const c_regex_traits&);
};
#ifndef BOOST_NO_WREGEX
template<>
struct BOOST_REGEX_DECL c_regex_traits<wchar_t>
{
c_regex_traits(){}
typedef wchar_t char_type;
typedef std::size_t size_type;
typedef std::wstring string_type;
struct locale_type{};
typedef boost::uint32_t char_class_type;
static size_type length(const char_type* p)
{
return (std::wcslen)(p);
}
wchar_t translate(wchar_t c) const
{
return c;
}
wchar_t translate_nocase(wchar_t c) const
{
return (std::towlower)(c);
}
static string_type BOOST_REGEX_CALL transform(const wchar_t* p1, const wchar_t* p2);
static string_type BOOST_REGEX_CALL transform_primary(const wchar_t* p1, const wchar_t* p2);
static char_class_type BOOST_REGEX_CALL lookup_classname(const wchar_t* p1, const wchar_t* p2);
static string_type BOOST_REGEX_CALL lookup_collatename(const wchar_t* p1, const wchar_t* p2);
static bool BOOST_REGEX_CALL isctype(wchar_t, char_class_type);
static int BOOST_REGEX_CALL value(wchar_t, int);
locale_type imbue(locale_type l)
{ return l; }
locale_type getloc()const
{ return locale_type(); }
private:
// this type is not copyable:
c_regex_traits(const c_regex_traits&);
c_regex_traits& operator=(const c_regex_traits&);
};
#ifdef BOOST_REGEX_HAS_OTHER_WCHAR_T
//
// Provide an unsigned short version as well, so the user can link to this
// no matter whether they build with /Zc:wchar_t or not (MSVC specific).
//
template<>
struct BOOST_REGEX_DECL c_regex_traits<unsigned short>
{
c_regex_traits(){}
typedef unsigned short char_type;
typedef std::size_t size_type;
typedef std::basic_string<unsigned short> string_type;
struct locale_type{};
typedef boost::uint32_t char_class_type;
static size_type length(const char_type* p)
{
return (std::wcslen)((const wchar_t*)p);
}
unsigned short translate(unsigned short c) const
{
return c;
}
unsigned short translate_nocase(unsigned short c) const
{
return (std::towlower)((wchar_t)c);
}
static string_type BOOST_REGEX_CALL transform(const unsigned short* p1, const unsigned short* p2);
static string_type BOOST_REGEX_CALL transform_primary(const unsigned short* p1, const unsigned short* p2);
static char_class_type BOOST_REGEX_CALL lookup_classname(const unsigned short* p1, const unsigned short* p2);
static string_type BOOST_REGEX_CALL lookup_collatename(const unsigned short* p1, const unsigned short* p2);
static bool BOOST_REGEX_CALL isctype(unsigned short, char_class_type);
static int BOOST_REGEX_CALL value(unsigned short, int);
locale_type imbue(locale_type l)
{ return l; }
locale_type getloc()const
{ return locale_type(); }
private:
// this type is not copyable:
c_regex_traits(const c_regex_traits&);
c_regex_traits& operator=(const c_regex_traits&);
};
#endif
#endif // BOOST_NO_WREGEX
}
#ifdef BOOST_HAS_ABI_HEADERS
# include BOOST_ABI_SUFFIX
#endif
#endif
|
<reponame>PotatoDrug/EAD-Assignment<gh_stars>0
package com.spmovy.beans;
import java.sql.Date;
public class MovieJB implements java.io.Serializable {
private int ID;
private String title;
private Date releasedate;
private String synopsis;
private int duration;
private String imagepath;
private String status;
private float rating;
public MovieJB() {
}
public int getID() {
return ID;
}
public void setID(int ID) {
this.ID = ID;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public Date getReleasedate() {
return releasedate;
}
public void setReleasedate(Date releasedate) {
this.releasedate = releasedate;
}
public String getSynopsis() {
return synopsis;
}
public void setSynopsis(String synopsis) {
this.synopsis = synopsis;
}
public int getDuration() {
return duration;
}
public void setDuration(int duration) {
this.duration = duration;
}
public String getImagepath() {
return imagepath;
}
public void setImagepath(String imagepath) {
this.imagepath = imagepath;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public float getRating() {
return rating;
}
public void setRating(float rating) {
this.rating = rating;
}
}
|
#!/bin/bash
#
# Apache License
# Version 2.0, January 2004
# http://www.apache.org/licenses/
#
# TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
#
# 1. Definitions.
#
# "License" shall mean the terms and conditions for use, reproduction,
# and distribution as defined by Sections 1 through 9 of this document.
#
# "Licensor" shall mean the copyright owner or entity authorized by
# the copyright owner that is granting the License.
#
# "Legal Entity" shall mean the union of the acting entity and all
# other entities that control, are controlled by, or are under common
# control with that entity. For the purposes of this definition,
# "control" means (i) the power, direct or indirect, to cause the
# direction or management of such entity, whether by contract or
# otherwise, or (ii) ownership of fifty percent (50%) or more of the
# outstanding shares, or (iii) beneficial ownership of such entity.
#
# "You" (or "Your") shall mean an individual or Legal Entity
# exercising permissions granted by this License.
#
# "Source" form shall mean the preferred form for making modifications,
# including but not limited to software source code, documentation
# source, and configuration files.
#
# "Object" form shall mean any form resulting from mechanical
# transformation or translation of a Source form, including but
# not limited to compiled object code, generated documentation,
# and conversions to other media types.
#
# "Work" shall mean the work of authorship, whether in Source or
# Object form, made available under the License, as indicated by a
# copyright notice that is included in or attached to the work
# (an example is provided in the Appendix below).
#
# "Derivative Works" shall mean any work, whether in Source or Object
# form, that is based on (or derived from) the Work and for which the
# editorial revisions, annotations, elaborations, or other modifications
# represent, as a whole, an original work of authorship. For the purposes
# of this License, Derivative Works shall not include works that remain
# separable from, or merely link (or bind by name) to the interfaces of,
# the Work and Derivative Works thereof.
#
# "Contribution" shall mean any work of authorship, including
# the original version of the Work and any modifications or additions
# to that Work or Derivative Works thereof, that is intentionally
# submitted to Licensor for inclusion in the Work by the copyright owner
# or by an individual or Legal Entity authorized to submit on behalf of
# the copyright owner. For the purposes of this definition, "submitted"
# means any form of electronic, verbal, or written communication sent
# to the Licensor or its representatives, including but not limited to
# communication on electronic mailing lists, source code control systems,
# and issue tracking systems that are managed by, or on behalf of, the
# Licensor for the purpose of discussing and improving the Work, but
# excluding communication that is conspicuously marked or otherwise
# designated in writing by the copyright owner as "Not a Contribution."
#
# "Contributor" shall mean Licensor and any individual or Legal Entity
# on behalf of whom a Contribution has been received by Licensor and
# subsequently incorporated within the Work.
#
# 2. Grant of Copyright License. Subject to the terms and conditions of
# this License, each Contributor hereby grants to You a perpetual,
# worldwide, non-exclusive, no-charge, royalty-free, irrevocable
# copyright license to reproduce, prepare Derivative Works of,
# publicly display, publicly perform, sublicense, and distribute the
# Work and such Derivative Works in Source or Object form.
#
# 3. Grant of Patent License. Subject to the terms and conditions of
# this License, each Contributor hereby grants to You a perpetual,
# worldwide, non-exclusive, no-charge, royalty-free, irrevocable
# (except as stated in this section) patent license to make, have made,
# use, offer to sell, sell, import, and otherwise transfer the Work,
# where such license applies only to those patent claims licensable
# by such Contributor that are necessarily infringed by their
# Contribution(s) alone or by combination of their Contribution(s)
# with the Work to which such Contribution(s) was submitted. If You
# institute patent litigation against any entity (including a
# cross-claim or counterclaim in a lawsuit) alleging that the Work
# or a Contribution incorporated within the Work constitutes direct
# or contributory patent infringement, then any patent licenses
# granted to You under this License for that Work shall terminate
# as of the date such litigation is filed.
#
# 4. Redistribution. You may reproduce and distribute copies of the
# Work or Derivative Works thereof in any medium, with or without
# modifications, and in Source or Object form, provided that You
# meet the following conditions:
#
# (a) You must give any other recipients of the Work or
# Derivative Works a copy of this License; and
#
# (b) You must cause any modified files to carry prominent notices
# stating that You changed the files; and
#
# (c) You must retain, in the Source form of any Derivative Works
# that You distribute, all copyright, patent, trademark, and
# attribution notices from the Source form of the Work,
# excluding those notices that do not pertain to any part of
# the Derivative Works; and
#
# (d) If the Work includes a "NOTICE" text file as part of its
# distribution, then any Derivative Works that You distribute must
# include a readable copy of the attribution notices contained
# within such NOTICE file, excluding those notices that do not
# pertain to any part of the Derivative Works, in at least one
# of the following places: within a NOTICE text file distributed
# as part of the Derivative Works; within the Source form or
# documentation, if provided along with the Derivative Works; or,
# within a display generated by the Derivative Works, if and
# wherever such third-party notices normally appear. The contents
# of the NOTICE file are for informational purposes only and
# do not modify the License. You may add Your own attribution
# notices within Derivative Works that You distribute, alongside
# or as an addendum to the NOTICE text from the Work, provided
# that such additional attribution notices cannot be construed
# as modifying the License.
#
# You may add Your own copyright statement to Your modifications and
# may provide additional or different license terms and conditions
# for use, reproduction, or distribution of Your modifications, or
# for any such Derivative Works as a whole, provided Your use,
# reproduction, and distribution of the Work otherwise complies with
# the conditions stated in this License.
#
# 5. Submission of Contributions. Unless You explicitly state otherwise,
# any Contribution intentionally submitted for inclusion in the Work
# by You to the Licensor shall be under the terms and conditions of
# this License, without any additional terms or conditions.
# Notwithstanding the above, nothing herein shall supersede or modify
# the terms of any separate license agreement you may have executed
# with Licensor regarding such Contributions.
#
# 6. Trademarks. This License does not grant permission to use the trade
# names, trademarks, service marks, or product names of the Licensor,
# except as required for reasonable and customary use in describing the
# origin of the Work and reproducing the content of the NOTICE file.
#
# 7. Disclaimer of Warranty. Unless required by applicable law or
# agreed to in writing, Licensor provides the Work (and each
# Contributor provides its Contributions) on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied, including, without limitation, any warranties or conditions
# of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
# PARTICULAR PURPOSE. You are solely responsible for determining the
# appropriateness of using or redistributing the Work and assume any
# risks associated with Your exercise of permissions under this License.
#
# 8. Limitation of Liability. In no event and under no legal theory,
# whether in tort (including negligence), contract, or otherwise,
# unless required by applicable law (such as deliberate and grossly
# negligent acts) or agreed to in writing, shall any Contributor be
# liable to You for damages, including any direct, indirect, special,
# incidental, or consequential damages of any character arising as a
# result of this License or out of the use or inability to use the
# Work (including but not limited to damages for loss of goodwill,
# work stoppage, computer failure or malfunction, or any and all
# other commercial damages or losses), even if such Contributor
# has been advised of the possibility of such damages.
#
# 9. Accepting Warranty or Additional Liability. While redistributing
# the Work or Derivative Works thereof, You may choose to offer,
# and charge a fee for, acceptance of support, warranty, indemnity,
# or other liability obligations and/or rights consistent with this
# License. However, in accepting such obligations, You may act only
# on Your own behalf and on Your sole responsibility, not on behalf
# of any other Contributor, and only if You agree to indemnify,
# defend, and hold each Contributor harmless for any liability
# incurred by, or claims asserted against, such Contributor by reason
# of your accepting any such warranty or additional liability.
#
# END OF TERMS AND CONDITIONS
#
# APPENDIX: How to apply the Apache License to your work.
#
# To apply the Apache License to your work, attach the following
# boilerplate notice, with the fields enclosed by brackets "{}"
# replaced with your own identifying information. (Don't include
# the brackets!) The text should be enclosed in the appropriate
# comment syntax for the file format. We also recommend that a
# file or class name and description of purpose be included on the
# same "printed page" as the copyright notice for easier
# identification within third-party archives.
#
# Copyright {yyyy} {name of copyright owner}
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -x
# Sets up an arbitary process group
source /etc/rtwsrc
DOWNLOAD_TMP=/tmp/pg/setup
RTWS_ARBITRARY_PG_SETUP_FILE="$(echo $RTWS_PROCESS_GROUP | cut -d"." -f2)"
for d in logs lib scripts
do
if [ ! -d /usr/local/rtws/ingest/$d ]; then
mkdir -p /usr/local/rtws/ingest/$d
fi
done
for d in logs
do
if [ ! -d /usr/local/rtws/arbitrary/$d ]; then
mkdir -p /usr/local/rtws/arbitrary/$d
fi
done
mkdir -p /tmp/pg/setup ; chmod -R 777 /tmp/pg/
mkdir -p /tmp/ingest/lib ; chmod -R 777 /tmp/ingest
RTWS_INGEST_OPTIONS="-DRTWS_ROOT_LOG_LEVEL=$RTWS_ROOT_LOG_LEVEL -DRTWS_APP_LOG_LEVEL=$RTWS_APP_LOG_LEVEL -DRTWS_ACCESS_KEY=$RTWS_ACCESS_KEY -DRTWS_TENANT_ID=$RTWS_TENANT_ID -DRTWS_SECRET_KEY=$RTWS_SECRET_KEY -DINTERNAL_IP=$INTERNAL_IP -DRTWS_FQDN=$RTWS_FQDN -DRTWS_DOMAIN=$RTWS_DOMAIN -DRTWS_ZOOKEEPER_QUORUM_SERVERS=$RTWS_ZOOKEEPER_QUORUM_SERVERS"
RTWS_INGEST_CONTENT_DOWNLOADER_CONFIG="../conf/arbitrary-pg-content-downloader.xml"
cd /usr/local/rtws/ingest/bin
java -Dlog4j.configuration=file:///usr/local/rtws/ingest/conf/log4j.properties $RTWS_INGEST_OPTIONS -cp "../lib/*" com.deleidos.rtws.tools.repository.IngestContentDownloader $RTWS_INGEST_CONTENT_DOWNLOADER_CONFIG 30
cd $DOWNLOAD_TMP
unzip $RTWS_ARBITRARY_PG_SETUP_FILE
if [ $? -ne 0 ]; then
echo "Unable to unpack arbitrary process group setup file. Aborting...."
touch $DOWNLOAD_TMP/.setup_failed
exit 1
fi
|
#!/bin/ash -x
#INIT
. ~/.nnl-builder/settings
#PARAMS
NAME=opkg-utils
VER=eae0d8fa44e8594aa90eadf06e5f4fbeef314509
REL=1
BUILD_DIR=$NAME-$VER
INSTALL_DIR=$NAME-root
SOURCE_DIR=$OPKG_WORK_SOURCES/$NAME
EXTERNAL_SRC_0=$NAME-$VER.tar.bz2
EXTERNAL_URL_0=http://git.yoctoproject.org/cgit/cgit.cgi/opkg-utils/snapshot
#PREP
cd $OPKG_WORK_BUILD
rm -rf $BUILD_DIR
tar xf $SOURCE_DIR/$EXTERNAL_SRC_0 && cd $BUILD_DIR
patch -Np1 -i $SOURCE_DIR/$NAME-$VER-1.patch
#BUILD
CONFIG_ADD=""
$OPKG_HELPER/gnu-build.sh $NAME $VER $BUILD_DIR $INSTALL_DIR "$CONFIG_ADD"
if [ $? -ne 0 ]; then
echo "ERROR: building in $NAME-$VER" >&2
exit 1
fi
#PACK
cd $OPKG_WORK_BUILD
python -m compileall $INSTALL_DIR
$OPKG_HELPER/packaging.sh $NAME $VER-$REL $SOURCE_DIR $INSTALL_DIR
if [ $? -ne 0 ]; then
echo "ERROR: packaging in $NAME-$VER" >&2
exit 1
fi
#CLEAN
cd $OPKG_WORK_BUILD
rm -rf $BUILD_DIR $INSTALL_DIR $NAME-build
#FINISH
echo "OK: $NAME-$VER" >&2
exit 0
|
<filename>test/schema.rb<gh_stars>1-10
ActiveRecord::Base.configurations = {
'active_record_schema_exporter' => {
:adapter => 'mysql',
:username => 'root',
:encoding => 'utf8',
:database => 'active_record_sql_exporter_test',
}
}
ActiveRecord::Base.establish_connection 'active_record_schema_exporter'
ActiveRecord::Schema.define do
create_table :tasks, :force => true do |t|
t.string :name
t.integer :project_id
end
create_table :departments, :force => true do |t|
t.string :name
end
create_table :budgets, :force => true do |t|
t.integer :amount
t.integer :department_id
end
create_table :employees, :force => true do |t|
t.integer :department_id
t.string :name
t.date :started
end
create_table :simples, :force => true do |t|
t.string :name
end
create_table :projects, :force => true do |t|
t.string :name
t.string :owner_type
t.integer :owner_id
end
end
class Employee < ActiveRecord::Base
belongs_to :department
end
class Department < ActiveRecord::Base
has_many :employees
has_one :budget
belongs_to :manager, :class_name => 'Employee'
end
class Budget < ActiveRecord::Base
belongs_to :department
end
class Simple < ActiveRecord::Base
end
class Project < ActiveRecord::Base
belongs_to :owner, :polymorphic => true
has_many :tasks, :dependent => :nullify
end
class Task < ActiveRecord::Base
belongs_to :project
end
|
scss --watch scss:css --style compressed
|
<reponame>KameronJohnson/phone_book
require('rspec')
require('contact')
require('phone')
describe(Contact) do
before() do
Contact.clear()
end
describe("#contact_name") do
it("returns the contact name") do
test_contact = Contact.new("<NAME>", "503-555-1111")
test_contact.save()
expect(test_contact.contact_name()).to(eq("<NAME>"))
end
end
describe('#id') do
it("returns the id of the contact") do
test_contact = Contact.new("<NAME>", "503-555-1111")
expect(test_contact.id()).to(eq(1))
end
end
describe("#save") do
it("pushes a contact into the array of saved contacts") do
test_contact = Contact.new("<NAME>", "503-555-1111")
test_contact.save()
expect(Contact.all()).to(eq([test_contact]))
end
end
describe(".all") do
it("is empty at first") do
expect(Contact.all()).to(eq([]))
end
end
describe(".clear") do
it("empties out all of the saved contacts") do
Contact.new("<NAME>", "503-555-1111").save()
Contact.clear()
expect(Contact.all()).to(eq([]))
end
end
describe(".find_contact") do
it("returns a contact by its id number") do
test_contact = Contact.new("<NAME>", "503-555-1111")
test_contact.save()
test_contact2 = Contact.new("<NAME>", "503-555-2222")
test_contact2.save()
expect(Contact.find_contact(test_contact.id())).to(eq(test_contact))
end
end
describe('#add_contact') do
it("adds a new phone number to contacts") do
test_contact = Contact.new("<NAME>", "503-555-9999")
test_phone_numbers = Phone.new("503-555-1111", "503-555-2222", "503-555-3333")
test_contact.add_contact(test_phone_numbers)
expect(test_contact.phone_numbers()).to(eq([test_phone_numbers]))
end
end
end #end class
# describe("#phone_number") do
# it("returns the phone number") do
# test_phone_number = Contact.new("<NAME>", "503-555-1111")
# test_phone_number.save()
# expect(test_phone_number.phone_number()).to(eq("503-555-1111"))
# end
# end
|
module.exports = {
currentPage: null,
init: function(t) {
var o = this;
void 0 === (o.currentPage = t).shoppingCartListModel && (t.shoppingCartListModel = function(t) {
o.shoppingCartListModel(t);
}), void 0 === t.hideShoppingCart && (t.hideShoppingCart = function(t) {
o.hideShoppingCart(t);
}), void 0 === t.clearShoppingCart && (t.clearShoppingCart = function(t) {
o.clearShoppingCart(t);
}), void 0 === t.jia && (t.jia = function(t) {
o.jia(t);
}), void 0 === t.jian && (t.jian = function(t) {
o.jian(t);
}), void 0 === t.goodNumChange && (t.goodNumChange = function(t) {
o.goodNumChange(t);
}), void 0 === t.buynow && (t.buynow = function(t) {
o.buynow(t);
});
},
carStatistics: function(t) {
var o = t.data.carGoods, a = 0, i = 0;
for (var r in o) a += o[r].num, i = parseFloat(i) + parseFloat(o[r].goods_price);
var s = {
total_num: a,
total_price: i.toFixed(2)
};
0 === a && this.hideShoppingCart(t), t.setData({
total: s
});
},
hideShoppingCart: function() {
this.currentPage.setData({
shoppingCartModel: !1
});
},
shoppingCartListModel: function() {
var t = this.currentPage, o = (t.data.carGoods, t.data.shoppingCartModel);
console.log(o), o ? t.setData({
shoppingCartModel: !1
}) : t.setData({
shoppingCartModel: !0
});
},
clearShoppingCart: function(t) {
var o = (t = this.currentPage).data.quick_hot_goods_lists, a = t.data.quick_list;
for (var i in o) for (var r in o[i]) o[i].num = 0;
for (var s in a) for (var n in a[s].goods) a[s].goods[n].num = 0;
t.setData({
goodsModel: !1,
carGoods: [],
total: {
total_num: 0,
total_price: 0
},
check_num: 0,
quick_hot_goods_lists: o,
quick_list: a,
currentGood: [],
checked_attr: [],
check_goods_price: 0,
temporaryGood: {},
goodNumCount: 0,
goods_num: 0
}), t.setData({
shoppingCartModel: !1
}), getApp().core.removeStorageSync(getApp().const.ITEM);
},
saveItemData: function(t) {
var o = {
quick_list: t.data.quick_list,
carGoods: t.data.carGoods,
total: t.data.total,
quick_hot_goods_lists: t.data.quick_hot_goods_lists,
checked_attr: t.data.checked_attr
};
getApp().core.setStorageSync(getApp().const.ITEM, o);
},
jia: function(t) {
var o = this.currentPage, a = t.currentTarget.dataset, i = o.data.quick_list;
for (var r in i) for (var s in i[r].goods) {
var n = i[r].goods[s];
if (parseInt(n.id) === parseInt(a.id)) {
var e = n.num ? n.num + 1 : 1;
if (e > JSON.parse(n.attr)[0].num) return void wx.showToast({
title: "商品库存不足",
image: "/images/icon-warning.png"
});
n.num = e;
var d = o.data.carGoods, c = 1, u = a.price ? a.price : n.price;
for (var g in d) {
if (parseInt(d[g].goods_id) === parseInt(n.id) && 1 === JSON.parse(n.attr).length) {
c = 0, d[g].num = e, d[g].goods_price = (d[g].num * d[g].price).toFixed(2);
break;
}
var p = a.index;
if (d[p]) {
c = 0, d[p].num = d[p].num + 1, d[p].goods_price = (d[p].num * d[p].price).toFixed(2);
break;
}
}
if (1 === c || 0 === d.length) {
var h = JSON.parse(i[r].goods[s].attr);
d.push({
goods_id: parseInt(i[r].goods[s].id),
attr: h[0].attr_list,
goods_name: i[r].goods[s].name,
goods_price: u,
num: 1,
price: u
});
}
}
}
o.setData({
carGoods: d,
quick_list: i
}), this.carStatistics(o), this.quickHotStatistics(), this.updateGoodNum();
},
jian: function(t) {
var o = this.currentPage, a = t.currentTarget.dataset, i = o.data.quick_list;
for (var r in i) for (var s in i[r].goods) {
var n = i[r].goods[s];
if (parseInt(n.id) === parseInt(a.id)) {
var e = 0 < n.num ? n.num - 1 : n.num;
n.num = e;
var d = o.data.carGoods;
for (var c in d) {
a.price ? a.price : n.price;
if (parseInt(d[c].goods_id) === parseInt(n.id) && 1 === JSON.parse(n.attr).length) {
d[c].num = e, d[c].goods_price = (d[c].num * d[c].price).toFixed(2);
break;
}
var u = a.index;
if (d[u] && 0 < d[u].num) {
d[u].num = d[u].num - 1, d[u].goods_price = (d[u].num * d[u].price).toFixed(2);
break;
}
}
}
}
o.setData({
carGoods: d,
quick_list: i
}), this.carStatistics(o), this.quickHotStatistics(), this.updateGoodNum();
},
goodNumChange: function(t) {
var o = this.currentPage, a = parseInt(t.detail.value) ? parseInt(t.detail.value) : 0, i = t.target.dataset.id ? parseInt(t.target.dataset.id) : o.data.currentGood.id, r = o.data.carGoods, s = o.data.quick_list, n = o.data.quick_hot_goods_lists, e = a, d = 0, c = "";
for (var u in s) for (var g in s[u].goods) {
var p = parseInt(s[u].goods[g].use_attr);
if ((C = parseInt(s[u].goods[g].id)) === i && 0 === p) {
var h = parseInt(s[u].goods[g].goods_num);
h < a && (wx.showToast({
title: "商品库存不足",
image: "/images/icon-warning.png"
}), e = h), s[u].goods[g].num = e, d = p;
}
if (C === i && 1 === p) {
var _ = o.data.temporaryGood;
_.num < a && (wx.showToast({
title: "商品库存不足",
image: "/images/icon-warning.png"
}), e = _.num), d = p, c = s[u].goods[g], o.setData({
check_goods_price: (e * _.price).toFixed(2)
});
}
}
var m = 0;
for (var l in r) {
if ((C = parseInt(r[l].goods_id)) === i && 0 === d && (r[l].num = e, r[l].goods_price = (e * r[l].price).toFixed(2)),
C === i && 1 === d) {
var v = o.data.checked_attr, f = r[l].attr, k = [];
for (var u in f) k.push([ f[u].attr_id, i ]);
k.sort().join() === v.sort().join() && (r[l].num = e, r[l].goods_price = (e * r[l].price).toFixed(2));
}
C === i && (m += r[l].num);
}
for (var S in 1 === d && (c.num = m), n) {
var C;
(C = parseInt(n[S].id)) === i && 0 === d && (n[S].num = e), C === i && 1 === d && (n[S].num = m);
}
o.setData({
carGoods: r,
quick_list: s,
quick_hot_goods_lists: n
}), this.carStatistics(o);
},
quickHotStatistics: function() {
var t = this.currentPage, o = t.data.quick_hot_goods_lists, a = t.data.quick_list;
for (var i in o) for (var r in a) for (var s in a[r].goods) parseInt(a[r].goods[s].id) === parseInt(o[i].id) && (o[i].num = a[r].goods[s].num);
t.setData({
quick_hot_goods_lists: o
});
},
updateGoodNum: function() {
var t = this.currentPage, o = t.data.quick_list, a = t.data.goods;
if (o && a) for (var i in o) for (var r in o[i].goods) if (parseInt(o[i].goods[r].id) === parseInt(a.id)) {
var s = o[i].goods[r].num, n = o[i].goods[r].num;
t.setData({
goods_num: n,
goodNumCount: s
});
break;
}
},
buynow: function(t) {
var o = this.currentPage, a = o.data.carGoods;
o.data.goodsModel;
o.setData({
goodsModel: !1
});
for (var i = a.length, r = [], s = [], n = 0; n < i; n++) 0 != a[n].num && (s = {
goods_id: a[n].goods_id,
num: a[n].num,
attr: a[n].attr
}, r.push(s));
var e = [];
e.push({
mch_id: 0,
goods_list: r
}), getApp().core.navigateTo({
url: "/pages/new-order-submit/new-order-submit?mch_list=" + JSON.stringify(e)
}), this.clearShoppingCart();
}
};
|
<gh_stars>0
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.25.0-devel
// protoc v3.14.0
// source: common/common.proto
package common
import (
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
type STATE int32
const (
STATE_CREATED STATE = 0
STATE_CHECKED STATE = 1
STATE_ACTIVE STATE = 2
STATE_INACTIVE STATE = 3
STATE_DELETED STATE = 4
)
// Enum value maps for STATE.
var (
STATE_name = map[int32]string{
0: "CREATED",
1: "CHECKED",
2: "ACTIVE",
3: "INACTIVE",
4: "DELETED",
}
STATE_value = map[string]int32{
"CREATED": 0,
"CHECKED": 1,
"ACTIVE": 2,
"INACTIVE": 3,
"DELETED": 4,
}
)
func (x STATE) Enum() *STATE {
p := new(STATE)
*p = x
return p
}
func (x STATE) String() string {
return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
}
func (STATE) Descriptor() protoreflect.EnumDescriptor {
return file_common_common_proto_enumTypes[0].Descriptor()
}
func (STATE) Type() protoreflect.EnumType {
return &file_common_common_proto_enumTypes[0]
}
func (x STATE) Number() protoreflect.EnumNumber {
return protoreflect.EnumNumber(x)
}
// Deprecated: Use STATE.Descriptor instead.
func (STATE) EnumDescriptor() ([]byte, []int) {
return file_common_common_proto_rawDescGZIP(), []int{0}
}
type STATUS int32
const (
STATUS_UNKNOWN STATUS = 0
STATUS_QUEUED STATUS = 1
STATUS_IN_PROCESS STATUS = 2
STATUS_FAILED STATUS = 3
STATUS_SUCCESSFUL STATUS = 4
)
// Enum value maps for STATUS.
var (
STATUS_name = map[int32]string{
0: "UNKNOWN",
1: "QUEUED",
2: "IN_PROCESS",
3: "FAILED",
4: "SUCCESSFUL",
}
STATUS_value = map[string]int32{
"UNKNOWN": 0,
"QUEUED": 1,
"IN_PROCESS": 2,
"FAILED": 3,
"SUCCESSFUL": 4,
}
)
func (x STATUS) Enum() *STATUS {
p := new(STATUS)
*p = x
return p
}
func (x STATUS) String() string {
return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
}
func (STATUS) Descriptor() protoreflect.EnumDescriptor {
return file_common_common_proto_enumTypes[1].Descriptor()
}
func (STATUS) Type() protoreflect.EnumType {
return &file_common_common_proto_enumTypes[1]
}
func (x STATUS) Number() protoreflect.EnumNumber {
return protoreflect.EnumNumber(x)
}
// Deprecated: Use STATUS.Descriptor instead.
func (STATUS) EnumDescriptor() ([]byte, []int) {
return file_common_common_proto_rawDescGZIP(), []int{1}
}
var File_common_common_proto protoreflect.FileDescriptor
var file_common_common_proto_rawDesc = []byte{
0x0a, 0x13, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2f, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x04, 0x61, 0x70, 0x69, 0x73, 0x2a, 0x48, 0x0a, 0x05, 0x53,
0x54, 0x41, 0x54, 0x45, 0x12, 0x0b, 0x0a, 0x07, 0x43, 0x52, 0x45, 0x41, 0x54, 0x45, 0x44, 0x10,
0x00, 0x12, 0x0b, 0x0a, 0x07, 0x43, 0x48, 0x45, 0x43, 0x4b, 0x45, 0x44, 0x10, 0x01, 0x12, 0x0a,
0x0a, 0x06, 0x41, 0x43, 0x54, 0x49, 0x56, 0x45, 0x10, 0x02, 0x12, 0x0c, 0x0a, 0x08, 0x49, 0x4e,
0x41, 0x43, 0x54, 0x49, 0x56, 0x45, 0x10, 0x03, 0x12, 0x0b, 0x0a, 0x07, 0x44, 0x45, 0x4c, 0x45,
0x54, 0x45, 0x44, 0x10, 0x04, 0x2a, 0x4d, 0x0a, 0x06, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x12,
0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06,
0x51, 0x55, 0x45, 0x55, 0x45, 0x44, 0x10, 0x01, 0x12, 0x0e, 0x0a, 0x0a, 0x49, 0x4e, 0x5f, 0x50,
0x52, 0x4f, 0x43, 0x45, 0x53, 0x53, 0x10, 0x02, 0x12, 0x0a, 0x0a, 0x06, 0x46, 0x41, 0x49, 0x4c,
0x45, 0x44, 0x10, 0x03, 0x12, 0x0e, 0x0a, 0x0a, 0x53, 0x55, 0x43, 0x43, 0x45, 0x53, 0x53, 0x46,
0x55, 0x4c, 0x10, 0x04, 0x42, 0x0f, 0x5a, 0x0d, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x3b, 0x63,
0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_common_common_proto_rawDescOnce sync.Once
file_common_common_proto_rawDescData = file_common_common_proto_rawDesc
)
func file_common_common_proto_rawDescGZIP() []byte {
file_common_common_proto_rawDescOnce.Do(func() {
file_common_common_proto_rawDescData = protoimpl.X.CompressGZIP(file_common_common_proto_rawDescData)
})
return file_common_common_proto_rawDescData
}
var file_common_common_proto_enumTypes = make([]protoimpl.EnumInfo, 2)
var file_common_common_proto_goTypes = []interface{}{
(STATE)(0), // 0: apis.STATE
(STATUS)(0), // 1: apis.STATUS
}
var file_common_common_proto_depIdxs = []int32{
0, // [0:0] is the sub-list for method output_type
0, // [0:0] is the sub-list for method input_type
0, // [0:0] is the sub-list for extension type_name
0, // [0:0] is the sub-list for extension extendee
0, // [0:0] is the sub-list for field type_name
}
func init() { file_common_common_proto_init() }
func file_common_common_proto_init() {
if File_common_common_proto != nil {
return
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_common_common_proto_rawDesc,
NumEnums: 2,
NumMessages: 0,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_common_common_proto_goTypes,
DependencyIndexes: file_common_common_proto_depIdxs,
EnumInfos: file_common_common_proto_enumTypes,
}.Build()
File_common_common_proto = out.File
file_common_common_proto_rawDesc = nil
file_common_common_proto_goTypes = nil
file_common_common_proto_depIdxs = nil
}
|
// JavaScript program to calculate the number of characters, words and sentences in a text
let text = "The quick brown fox jumps over the lazy dog";
let countCharacters = text.length;
let countWords = text.split(" ").length;
let countSentences = text.split(/[.]+/).length-1;
console.log("Number of Characters: " + countCharacters);
console.log("Number of Words: " + countWords);
console.log("Number of Sentences: " + countSentences);
|
def updateRupeeContainer(rupeeContainer, qte, playerHUD):
next_rupee = rupeeContainer['rupee'] + qte
if next_rupee > rupeeContainer['maxRupee']:
rupeeContainer['rupee'] = rupeeContainer['maxRupee']
else:
rupeeContainer['rupee'] = next_rupee
playerHUD.updateRupee()
|
<reponame>paulhoughton/preact-pwa<filename>src/Routes.js
import { h } from "preact";
import AsyncRoute from "preact-async-route";
const ROUTES = [
{
path: "/",
title: "Home",
router: () => import("./components/Home.js").then(m => m.default)
},
{
path: "/about",
title: "About",
router: () => import("./components/About.js").then(m => m.default)
}
];
export const routes = ROUTES.map(({ path, router }) => (
<AsyncRoute path={path} component={router} />
));
export const mappings = ROUTES.map(({ title, path }) => ({ title, path }));
|
import * as tslib_1 from "tslib";
import { MidSideEffect } from "../effect/MidSideEffect";
import { Signal } from "../signal/Signal";
import { Multiply } from "../signal/Multiply";
import { Subtract } from "../signal/Subtract";
import { optionsFromArguments } from "../core/util/Defaults";
import { readOnly } from "../core/util/Interface";
import { connect } from "../core/context/ToneAudioNode";
/**
* Applies a width factor to the mid/side seperation.
* 0 is all mid and 1 is all side.
* Algorithm found in [kvraudio forums](http://www.kvraudio.com/forum/viewtopic.php?t=212587).
* ```
* Mid *= 2*(1-width)<br>
* Side *= 2*width
* ```
* @category Effect
*/
var StereoWidener = /** @class */ (function (_super) {
tslib_1.__extends(StereoWidener, _super);
function StereoWidener() {
var _this = _super.call(this, optionsFromArguments(StereoWidener.getDefaults(), arguments, ["width"])) || this;
_this.name = "StereoWidener";
var options = optionsFromArguments(StereoWidener.getDefaults(), arguments, ["width"]);
_this.width = new Signal({
context: _this.context,
value: options.width,
units: "normalRange",
});
readOnly(_this, ["width"]);
_this._twoTimesWidthMid = new Multiply({
context: _this.context,
value: 2,
});
_this._twoTimesWidthSide = new Multiply({
context: _this.context,
value: 2,
});
_this._midMult = new Multiply({ context: _this.context });
_this._twoTimesWidthMid.connect(_this._midMult.factor);
_this.connectEffectMid(_this._midMult);
_this._oneMinusWidth = new Subtract({ context: _this.context });
_this._oneMinusWidth.connect(_this._twoTimesWidthMid);
connect(_this.context.getConstant(1), _this._oneMinusWidth);
_this.width.connect(_this._oneMinusWidth.subtrahend);
_this._sideMult = new Multiply({ context: _this.context });
_this.width.connect(_this._twoTimesWidthSide);
_this._twoTimesWidthSide.connect(_this._sideMult.factor);
_this.connectEffectSide(_this._sideMult);
return _this;
}
StereoWidener.getDefaults = function () {
return Object.assign(MidSideEffect.getDefaults(), {
width: 0.5,
});
};
StereoWidener.prototype.dispose = function () {
_super.prototype.dispose.call(this);
this.width.dispose();
this._midMult.dispose();
this._sideMult.dispose();
this._twoTimesWidthMid.dispose();
this._twoTimesWidthSide.dispose();
this._oneMinusWidth.dispose();
return this;
};
return StereoWidener;
}(MidSideEffect));
export { StereoWidener };
//# sourceMappingURL=StereoWidener.js.map
|
package cyclops.stream.spliterator.push.filter;
import cyclops.stream.spliterator.push.AbstractOperatorTest;
import cyclops.stream.spliterator.push.ArrayOfValuesOperator;
import cyclops.stream.spliterator.push.FilterOperator;
import cyclops.stream.spliterator.push.Fixtures;
import cyclops.stream.spliterator.push.Operator;
import cyclops.stream.spliterator.push.SingleValueOperator;
/**
* Created by johnmcclean on 17/01/2017.
*/
public class FilterOperatorTest extends AbstractOperatorTest {
public Operator<Integer> createEmpty() {
return new FilterOperator<>(new ArrayOfValuesOperator<>(),
i -> true);
}
public Operator<Integer> createOne() {
return new FilterOperator<>(new SingleValueOperator<>(1),
i -> true);
}
public Operator<Integer> createThree() {
return new FilterOperator<>(new ArrayOfValuesOperator<>(1,
2,
3),
i -> true);
}
public Operator<Integer> createTwoAndError() {
return new FilterOperator<>(Fixtures.twoAndErrorSource,
i -> true);
}
public Operator<Integer> createThreeErrors() {
return new FilterOperator<>(Fixtures.threeErrorsSource,
i -> true);
}
}
|
package com.io.routesapp;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.view.Menu;
import android.view.View;
import android.widget.TextView;
import com.google.android.material.navigation.NavigationView;
import com.io.routesapp.data.SharedRoutesPlacesRepository;
import com.io.routesapp.data.model.LoggedInUser;
import com.io.routesapp.data.httpClient;
import androidx.navigation.NavController;
import androidx.navigation.Navigation;
import androidx.navigation.ui.AppBarConfiguration;
import androidx.navigation.ui.NavigationUI;
import androidx.drawerlayout.widget.DrawerLayout;
import androidx.appcompat.app.AppCompatActivity;
import androidx.appcompat.widget.Toolbar;
import org.json.JSONException;
import java.util.HashMap;
import java.util.Objects;
public class MainActivity extends AppCompatActivity {
private AppBarConfiguration mAppBarConfiguration;
private static LoggedInUser loggedInUser;
public static httpClient HTTPClient;
public static boolean logoutButtonPressed = false;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
logoutButtonPressed = false;
Intent intent = getIntent();
loggedInUser = new LoggedInUser(0, "", "", "", new HashMap<>());
loggedInUser.setCookie("AccessToken2", intent.getStringExtra("AccessToken2"));
loggedInUser.setCookie("RefreshToken2", intent.getStringExtra("RefreshToken2"));
HTTPClient = new httpClient(this.getApplicationContext(), "MainActivity");
if (Objects.requireNonNull(intent.getStringExtra("PreviousActivity")).equals("login")) {
loggedInUser.setUserId(intent.getIntExtra("userId", 0));
loggedInUser.setUsername(intent.getStringExtra("username"));
loggedInUser.setDisplayName(intent.getStringExtra("displayName"));
loggedInUser.setEmail(intent.getStringExtra("email"));
}
else if (Objects.requireNonNull(intent.getStringExtra("PreviousActivity")).equals("start")) {
String username = intent.getStringExtra("username");
try {
loggedInUser = HTTPClient.getUserData(username);
} catch (InterruptedException | JSONException e) {
e.printStackTrace();
}
}
setContentView(R.layout.activity_main);
Toolbar toolbar = findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
DrawerLayout mDrawerLayout = findViewById(R.id.drawer_layout);
NavigationView navigationView = findViewById(R.id.nav_view);
//initializing a shared repository
SharedRoutesPlacesRepository sharedRepo = new SharedRoutesPlacesRepository();
// Passing each menu ID as a set of Ids because each
// menu should be considered as top level destinations.
mAppBarConfiguration = new AppBarConfiguration.Builder(R.id.nav_my_profile,
R.id.nav_discover_places, R.id.nav_discover_routes,
R.id.nav_my_fav_places, R.id.nav_my_fav_routes, R.id.nav_help)
.setDrawerLayout(mDrawerLayout)
.build();
NavController navController = Navigation.findNavController(this, R.id.nav_host_fragment);
NavigationUI.setupActionBarWithNavController(this, navController, mAppBarConfiguration);
NavigationUI.setupWithNavController(navigationView, navController);
View headerView = navigationView.getHeaderView(0);
TextView name = headerView.findViewById(R.id.drawer_name);
name.setText(loggedInUser.getDisplayName());
TextView email = headerView.findViewById((R.id.drawer_email));
email.setText(loggedInUser.getEmail());
CheckAccessTokenThread thread = new CheckAccessTokenThread();
thread.start();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
@Override
public boolean onSupportNavigateUp() {
NavController navController = Navigation.findNavController(this, R.id.nav_host_fragment);
return NavigationUI.navigateUp(navController, mAppBarConfiguration)
|| super.onSupportNavigateUp();
}
@Override
protected void onStop() {
super.onStop();
if (!logoutButtonPressed) {
SharedPreferences mySharedPreferences = getSharedPreferences(getString(R.string.settings), Context.MODE_PRIVATE);
SharedPreferences.Editor editor = mySharedPreferences.edit();
editor.putString("AccessToken2", loggedInUser.getCookies().get("AccessToken2"));
editor.putString("RefreshToken2", loggedInUser.getCookies().get("RefreshToken2"));
editor.putString("username", loggedInUser.getUsername());
editor.apply();
}
}
@Override
protected void onPause() {
super.onPause();
if (!logoutButtonPressed) {
SharedPreferences mySharedPreferences = getSharedPreferences(getString(R.string.settings), Context.MODE_PRIVATE);
SharedPreferences.Editor editor = mySharedPreferences.edit();
editor.putString("AccessToken2", loggedInUser.getCookies().get("AccessToken2"));
editor.putString("RefreshToken2", loggedInUser.getCookies().get("RefreshToken2"));
editor.putString("username", loggedInUser.getUsername());
editor.apply();
}
}
public static LoggedInUser getLoggedInUser() {
return loggedInUser;
}
}
|
# -*- coding: utf-8 -*-
import abc
import argparse
import gc
import os
from pathlib import Path
from typing import Callable, Dict, List, Tuple
import numpy as np
import pandas as pd
import torch
import wandb
from rich.progress import (BarColumn, Progress, TaskID, TextColumn,
TimeRemainingColumn)
from torch.utils.data import DataLoader
from .common import collate_fn, node_cls_collate_fn
from .params import MOVIELENS_1M_DIR
class Runner(abc.ABC):
"""Abstract runner class for task training and attribute inference attacks
:param args: namespace for input parameters
:type args: argparse.Namespace
"""
def __init__(self, args: argparse.Namespace):
"""Constructor method"""
self.args = args
self.prefetch_to_gpu = args.prefetch_to_gpu
self.train_set = self.build_main_dataset(
args.train_ratings, args.users_train, args.prefetch_to_gpu
)
self.test_set = self.build_main_dataset(
args.test_ratings, args.users_test, args.prefetch_to_gpu
)
self.adv_train_set = self.bulid_adversary_dataset(
args.users_train, args.prefetch_to_gpu
)
self.adv_test_set = self.bulid_adversary_dataset(
args.users_test, args.prefetch_to_gpu
)
self.edges = self.build_edges(args.train_ratings)
self.base_model = self.get_base_model()
self.train_routine = self.get_train_routine()
self.test_routine = self.get_test_routine()
self.adv_train_routine = self.get_adv_train_routine()
self.adv_test_routine = self.get_adv_test_routine()
train_loader_params = dict(
dataset=self.train_set,
batch_size=args.batch_size,
shuffle=True,
drop_last=True,
num_workers=0,
collate_fn=collate_fn,
)
adv_cls_loader_params = dict(
dataset=self.adv_train_set,
batch_size=args.node_cls_batch_size,
shuffle=True,
drop_last=False,
num_workers=0,
collate_fn=node_cls_collate_fn,
)
if args.prefetch_to_gpu:
adv_cls_loader_params["pin_memory"] = True
train_loader_params["pin_memory"] = True
self.train_loader = DataLoader(**train_loader_params)
self.adv_train_loader = DataLoader(**adv_cls_loader_params)
@abc.abstractmethod
def get_test_routine(
self,
) -> Callable[
[
torch.utils.data.Dataset,
argparse.Namespace,
torch.nn.Module,
Progress,
TaskID,
],
Tuple[Dict[str, float], pd.DataFrame],
]:
"""Returns function to perform objective task evaluation
:return: function to perform objective task evaluation
:rtype: Callable[ [ torch.utils.data.Dataset, argparse.Namespace, torch.nn.Module, Progress, TaskID, ], Tuple[Dict[str, float], pd.DataFrame], ]
"""
pass
@abc.abstractmethod
def get_train_routine(
self,
) -> Callable[
[
torch.utils.data.DataLoader,
torch.utils.data.DataLoader,
argparse.Namespace,
torch.nn.Module,
torch.optim.Optimizer,
torch.optim.Optimizer,
Progress,
TaskID,
],
None,
]:
"""Returns function to perform objective task training
:return: function to perform objective task training
:rtype: Callable[ [ torch.utils.data.DataLoader, torch.utils.data.DataLoader, argparse.Namespace, torch.nn.Module, torch.optim.Optimizer, torch.optim.Optimizer, Progress, TaskID, ], None, ]
"""
pass
@abc.abstractmethod
def get_adv_test_routine(
self,
) -> Callable[
[
torch.utils.data.DataLoader,
argparse.Namespace,
torch.nn.Module,
torch.optim.Optimizer,
Progress,
TaskID,
],
None,
]:
"""Returns function to perform adversary evaluation
:return: function to perform adversary evaluation
:rtype: Callable[ [ torch.utils.data.DataLoader, argparse.Namespace, torch.nn.Module, torch.optim.Optimizer, Progress, TaskID, ], None, ]
"""
pass
@abc.abstractmethod
def get_adv_train_routine(
self,
) -> Callable[
[
torch.utils.data.Dataset,
argparse.Namespace,
torch.nn.Module,
str,
Progress,
TaskID,
],
Tuple[Dict[str, float], pd.DataFrame],
]:
"""Returns function to perform adversary training
:return: function to perform adversary training
:rtype: Callable[ [ torch.utils.data.Dataset, argparse.Namespace, torch.nn.Module, str, Progress, TaskID, ], Tuple[Dict[str, float], pd.DataFrame], ]
"""
pass
@abc.abstractmethod
def get_main_dataset(self) -> pd.DataFrame:
"""Returns main dataset for objective task
:return: main dataset for objective task
:rtype: pd.DataFrame
"""
pass
@abc.abstractmethod
def get_adv_classification_dataset(self) -> pd.DataFrame:
"""Returns dataset for adversarial task
:return: dataset for adversarial task
:rtype: pd.DataFrame
"""
pass
def build_main_dataset(
self, ratings: pd.DataFrame, users: pd.DataFrame, prefetch_to_gpu: bool
) -> torch.utils.data.Dataset:
"""Routine to build PyTorch dataset from pandas dataframes for task training
:param ratings: movie ratings
:type ratings: pd.DataFrame
:param users: user data
:type users: pd.DataFrame
:param prefetch_to_gpu: whether to pre-load data to gpu
:type prefetch_to_gpu: bool
:return: PyTorch dataset for main task
:rtype: torch.utils.data.Dataset
"""
main_dataset_builder = self.get_main_dataset()
return main_dataset_builder(ratings, users, prefetch_to_gpu)
def bulid_adversary_dataset(
self, users: pd.DataFrame, prefetch_to_gpu: bool
) -> torch.utils.data.Dataset:
"""Routine to build PyTorch dataset from pandas dataframes for adversarial task
:param users: user data
:type users: pd.DataFrame
:param prefetch_to_gpu: whether to pre-load data to gpu
:type prefetch_to_gpu: bool
:return: PyTorch dataset for adversarial task
:rtype: torch.utils.data.Dataset
"""
adv_dataset_builder = self.get_adv_classification_dataset()
return adv_dataset_builder(users, prefetch_to_gpu)
def build_edges(self, train_ratings: pd.DataFrame) -> torch.LongTensor:
"""Routine to build edges for graph neural networks
:param train_ratings: train movie ratings
:type train_ratings: pd.DataFrame
:return: edge tensor
:rtype: torch.LongTensor
"""
edges = np.hstack(
(
np.stack(
[
train_ratings["user_id"].values,
train_ratings["movie_id"].values,
]
),
np.stack(
[
train_ratings["movie_id"].values,
train_ratings["user_id"].values,
]
),
)
)
edges = torch.LongTensor(edges)
return edges
@abc.abstractmethod
def get_base_model(self) -> torch.nn.Module:
"""Returns the base model for main task and optionally adversarial defense branch
:return: base model
:rtype: torch.nn.Module
"""
pass
@abc.abstractmethod
def get_adversary_models(self, mode: str) -> List[torch.nn.Module]:
"""Returns a list of adversaries attacking different parts of the network
:param mode: sensitive attribute to attack (one of age, gender or occupation)
:type mode: str
:return: list of adversaries
:rtype: List[torch.nn.Module]
"""
pass
@abc.abstractmethod
def get_oracle_adversary_optimizers(
self, adversaries: List[torch.nn.Module], mode: str
) -> List[torch.optim.Optimizer]:
"""Returns a list of optimizers corresponding to the adversaries
:param adversaries: list of adversaries attacking different parts of the network
:type adversaries: List[torch.nn.Module]
:param mode: sensitive attribute to attack (one of age, gender or occupation)
:type mode: str
:return: list of optimizers
:rtype: List[torch.optim.Optimizer]
"""
pass
@abc.abstractmethod
def get_task_optimizer(self) -> torch.optim.Optimizer:
"""Returns the optimizer corresponding to the task model
:return: optimizer corresponding to the task model
:rtype: torch.optim.Optimizer
"""
pass
@abc.abstractmethod
def get_adv_optimizer(self, mode: str) -> torch.optim.Optimizer:
"""Returns the optimizer corresponding to the adversary model
:param mode: sensitive attribute to attack (one of age, gender or occupation)
:type mode: str
:return: optimizer corresponding to the adversary model
:rtype: torch.optim.Optimizer
"""
pass
@abc.abstractmethod
def num_adversaries(self) -> int:
"""Returns the numebr of adversaries
:return: numebr of adversaries
:rtype: int
"""
pass
@abc.abstractmethod
def get_ordered_adversary_names(self) -> List[str]:
"""Returns a list of adversary names, in the same order as the model and optimizer method
:return: list of adversary names
:rtype: List[str]
"""
pass
def train_task_with_adversary(
self,
mode: str,
dirname: str,
refresh=True,
progress=None,
task=None,
adv_tasks=[],
):
"""Train a task, then evaluate adversary performance
:param mode: sensitive attribute to evaluate (one of age, occupation and gender)
:type mode: str
:param dirname: directory to store model checkpoints
:type dirname: str
:param refresh: whether to re-initialize model for each sensitive attribute, defaults to True
:type refresh: bool, optional
:param progress: progress bar instance, defaults to None
:type progress: rich.progress.Progress, optional
:param task: task instance, defaults to None
:type task: rich.progress.TaskID, optional
:param adv_tasks: list of adversaries to test, defaults to []
:type adv_tasks: list, optional
"""
if refresh:
self.base_model = self.get_base_model()
optimizer_task = self.get_task_optimizer()
optimizer_adv = self.get_adv_optimizer(mode)
self.base_model.set_mode(mode)
for epoch in range(self.args.num_epochs):
if epoch % (self.args.valid_freq) == 0:
with torch.no_grad():
task_measures, task_cm = self.test_routine(
self.test_set, self.args, self.base_model, progress, task
)
self.train_routine(
self.train_loader,
self.adv_train_loader,
self.args,
self.base_model,
optimizer_task,
optimizer_adv,
False,
)
gc.collect()
wandb.log(task_measures)
task_tb = wandb.Table(
columns=list(task_cm.columns), data=task_cm.values.tolist()
)
wandb.log({"Task Confusion Matrix": task_tb})
self.args.logger.info(
[epoch, "task", str(task_measures), str(task_cm.to_dict())]
)
progress.update(
task,
advance=1,
measures=task_measures,
conf_matrix=task_cm,
refresh=True,
)
path = os.path.join(dirname, "model.pth")
torch.save(self.base_model.state_dict(), path)
oracle_adversaries = self.get_adversary_models(mode)
optimizer_oracle_adversaries = self.get_oracle_adversary_optimizers(
oracle_adversaries, mode
)
for name, oracle_adversary, optimizer_oracle_adversary, adv_task in zip(
self.get_ordered_adversary_names(),
oracle_adversaries,
optimizer_oracle_adversaries,
adv_tasks,
):
oracle_adversary.set_mode(mode)
for epoch in range(self.args.finetune_epochs):
self.adv_train_routine(
self.adv_train_loader,
self.args,
oracle_adversary,
optimizer_oracle_adversary,
progress,
task,
)
gc.collect()
with torch.no_grad():
adversary_measures, adversary_cm = self.adv_test_routine(
self.adv_test_set,
self.args,
oracle_adversary,
mode,
progress,
task,
)
wandb.log(adversary_measures)
adversary_tb = wandb.Table(
columns=list(adversary_cm.columns),
data=adversary_cm.values.tolist(),
)
wandb.log({"Adversary Confusion Matrix": adversary_tb})
self.args.logger.info(
[
epoch,
"adversary",
str(adversary_measures),
str(adversary_cm.to_dict()),
]
)
progress.update(
adv_task,
advance=1,
measures=adversary_measures,
conf_matrix=adversary_cm,
refresh=True,
)
path = os.path.join(dirname, f"adversary_{mode}_{name}.pth")
torch.save(oracle_adversary.state_dict(), path)
def run(self, refresh=True):
"""Running task training, then evaluate adversaries in three sensitive attributes (gender, age, occupation)
:param refresh: whether to re-initialize model for each sensitive attribute, defaults to True
:type refresh: bool, optional
"""
progress = Progress(
"[progress.description]{task.description}",
TextColumn("[bold green]{task.fields[measures]}", justify="right"),
TextColumn(
"[dark_goldenrod]Truncated CM {task.fields[conf_matrix]}",
justify="right",
),
BarColumn(),
"[progress.percentage]{task.percentage:>3.0f}%",
TimeRemainingColumn(),
auto_refresh=False,
)
logname = self.args.logname
print("Log stored at: ", logname)
run = wandb.init(
project="information-obfuscation",
entity="peiyuanl",
name=logname,
config=vars(self.args),
)
dirname = os.path.join(
"../checkpoints",
self.args.experiment,
self.args.task,
self.args.model,
logname,
)
Path(dirname).mkdir(parents=True, exist_ok=True)
with progress:
gender_adv_tasks = []
age_adv_tasks = []
occupation_adv_tasks = []
# To ensure layout correctness
gender_task = progress.add_task(
"[cyan]Gender Task",
total=self.args.num_epochs,
measures={},
conf_matrix=[],
)
for name in self.get_ordered_adversary_names():
gender_adv_tasks.append(
progress.add_task(
f"[cyan]Gender {name} Adversary",
total=self.args.finetune_epochs,
measures={},
conf_matrix=[],
)
)
age_task = progress.add_task(
"[cyan]Age Task",
total=self.args.num_epochs,
measures={},
conf_matrix=[],
)
for name in self.get_ordered_adversary_names():
age_adv_tasks.append(
progress.add_task(
f"[cyan]Age {name} Adversary",
total=self.args.finetune_epochs,
measures={},
conf_matrix=[],
)
)
occupation_task = progress.add_task(
"[cyan]Occupation Task",
total=self.args.num_epochs,
measures={},
conf_matrix=[],
)
for name in self.get_ordered_adversary_names():
occupation_adv_tasks.append(
progress.add_task(
f"[cyan]Age {name} Adversary",
total=self.args.finetune_epochs,
measures={},
conf_matrix=[],
)
)
self.train_task_with_adversary(
"gender",
dirname,
refresh=refresh,
progress=progress,
task=gender_task,
adv_tasks=gender_adv_tasks,
)
self.train_task_with_adversary(
"age",
dirname,
refresh=refresh,
progress=progress,
task=age_task,
adv_tasks=age_adv_tasks,
)
self.train_task_with_adversary(
"occupation",
dirname,
refresh=refresh,
progress=progress,
task=occupation_task,
adv_tasks=occupation_adv_tasks,
)
trained_model_artifact = wandb.Artifact(
logname + "_model", type="model", description="Task and adversary models"
)
trained_model_artifact.add_dir(dirname)
run.log_artifact(trained_model_artifact)
dataset_artifact = wandb.Artifact(
logname + "_dataset",
type="dataset",
description="Dataset used to train the models",
)
dataset_artifact.add_dir(MOVIELENS_1M_DIR)
run.log_artifact(dataset_artifact)
|
# Copyright dunnhumby Germany GmbH 2017.
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
#
# Utility library to use in scripts
# This function installs packages with specific versions and also pin them
# (with prio 990) to those versions by appending pinning information to
# /etc/apt/preferences.d/cachalot, so they are not upgraded accidentally.
#
# All packages should be specified with version: pkg=version.
#
# Version can have wildcards, see apt-get/apt_preferences documentation for
# details
apt_pin_install()
{
( { set +x -e; } 2>/dev/null # disable verboseness (silently)
# Pin the packages
for pkg_ver in "$@"
do
pkg="$(echo "$pkg_ver" | cut -d= -f1)"
ver="$(echo "$pkg_ver" | cut -d= -f2-)"
# If we still have a version, print the pinning spec to Prio
# 990 means don't install newer versions, but don't downgrade
# if installed is newer.
# https://manpages.debian.org/stretch/apt/apt_preferences.5.en.html#How_APT_Interprets_Priorities
cat <<EOT >> /etc/apt/preferences.d/cachalot
Package: $pkg
Pin: version $ver
Pin-Priority: 990
EOT
done
# Install the packages
apt -y install "$@"
)
}
gem_ver()
{
( { set +x -e; } 2>/dev/null # disable verboseness (silently)
pkg="$1"
ver="$2"
# If there is a $ver specified, then get the latest one
ver="${ver:+$(./last-version gem "$pkg" "$ver")}"
# If we still have a version, then pass it to apt explicitly
echo "${ver:+--version=$ver }$pkg"
)
}
apt_update_and_install_base_packages()
{
# Do not install recommended packages by default
echo 'APT::Install-Recommends "0";' > /etc/apt/apt.conf.d/99no-recommends
# Make sure our packages list is updated
apt update
# Get the current distribution name
apt -y install lsb-release
dist="$(lsb_release -cs)"
# Select extra packages depending on the distro version
case "$dist" in
bionic)
extra_packages="gpg-agent dirmngr"
;;
*)
extra_packages=
;;
esac
# We install some basic packages first.
apt -y install apt-transport-https software-properties-common curl $extra_packages
}
apt_install_bintray_key()
{
apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 379CE192D401AB61
}
apt_add_bintray_repos()
{
dist="$(lsb_release -cs)"
apt_install_bintray_key
for repo in "$@"
do
echo "deb https://dl.bintray.com/$repo $dist release prerelease" \
> /etc/apt/sources.list.d/$(echo "$repo" | tr / -).list
done
# Add apt_preferences file
sed 's/\${DIST}/'$dist'/g' bintray-apt-preferences > /etc/apt/preferences.d/cachalot-bintray
}
cleanup()
{
apt-get autoremove -y
rm -fr /var/lib/apt/lists/*
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.