text stringlengths 1 1.05M |
|---|
#!/bin/bash
##############################################################################################################
quick_install=${1:-"N"}
docker_network=${2:-'oracle_network'}
db_file_name=${3:-'oracle-database-xe-18c-1.0-1.x86_64.rpm'}
db_version=${4:-'18c'}
db_sys_pwd=${5:-'oracle'}
db_port=${6:-31521}
em_port=${7:-35500}
apex_file_name=${8:-'apex_19.1.zip'}
apex_version=${9:-'19.1'}
apex_admin_username=${10:-'ADMIN'}
apex_admin_pwd=${11:-'Welc0me@1'}
apex_admin_email=${12:-'wfgdlut@gmail.com'}
ords_file_name=${13:-'ords-19.2.0.199.1647.zip'}
ords_version=${14:-'19.2.0'}
ords_port=${15:-32513}
ip_address=${16:-'localhost'}
url_check=""
fileName=""
docker_prefix='rapid-apex'
oss_url='https://oracle-apex-bucket.s3.ap-northeast-1.amazonaws.com/'
echo ">>> print all of input parameters..."
echo $*
echo ">>> end of print all of input parameters..."
##############################################################################################################
echo ""
echo "--------- Step 1: Download installation media ---------"
echo ""
work_path=`pwd`
echo ">>> current work path is $work_path"
# check if url is valid
function httpRequest()
{
unset url_check
#curl request
info=`curl -s -m 10 --connect-timeout 10 -I $1`
#get return code
code=`echo $info|grep "HTTP"|awk '{print $2}'`
#check return code
if [ "$code" != "200" ];then
echo ">>> $1 cannot be touched..."
url_check="N"
fi
}
# download installation file
function download()
{
fileUrl=$1
fileName=""
echo "fileUrl=$fileUrl"
if [[ $1 =~ "/" ]]; then
# user has downloaded the file, just copy it
if [[ ${fileUrl:0:1} == "/" ]]; then
echo ">>> copy installation file to files folder"
fileName=${fileUrl##*"/"}
cp $fileUrl .
else
# download from url user provided
echo ">>> download installation file from the url user provided"
httpRequest "$fileUrl"
if [ "$url_check" = "N" ]; then
fileName=""
exit;
else
fileName=${fileUrl##*"/"}
curl -o $fileName $fileUrl
fi
fi;
else
# try to download installation file from default repository
fileName=$fileUrl
echo ">>> download $fileName from $oss_url"
if [ ! -f $fileName ]; then
httpRequest "$oss_url$fileName"
if [ "$url_check" = "N" ]; then
exit;
else
curl -o $fileName $oss_url$fileName
fi
fi
fi;
}
# download apex installation file
cd $work_path/docker-xe/files
download $apex_file_name
apex_file_name=$fileName
echo ">>> apex_file_name="$apex_file_name
echo ""
# download ords installation file
cd $work_path/docker-ords/files
download $ords_file_name
ords_file_name=$fileName
echo ">>> ords_file_name="$ords_file_name
echo ""
# download oracle db installation file
cd $work_path/docker-xe/files
download $db_file_name
db_file_name=$fileName
echo ">>> db_file_name="$db_file_name
echo ""
cd $work_path/docker-xe
if [ ! -d ../apex ]; then
echo ">>> unzip apex installation media ..."
mkdir ../apex
cp scripts/apex-install* ../apex/
unzip -oq files/$apex_file_name -d ../ &
fi;
echo ""
echo "--------- Step 2: compile oracle xe docker image ---------"
echo ""
echo ">>> docker image $docker_prefix/oracle-xe:$db_version does not exist, begin to build docker image..."
docker build -t $docker_prefix/oracle-xe:$db_version --build-arg DB_SYS_PWD=$db_sys_pwd .
echo ""
echo "--------- Step 3: startup oracle xe docker image ---------"
echo ""
docker run -d \
-p $db_port:1521 \
-p $em_port:5500 \
--name=oracle-xe \
--volume $work_path/oradata:/opt/oracle/oradata \
--volume $work_path/apex:/tmp/apex \
--network=$docker_network \
$docker_prefix/oracle-xe:$db_version
# wait until database configuration is done
rm -f xe_installation.log
docker logs oracle-xe >& xe_installation.log
while : ; do
[[ `grep "Completed: ALTER PLUGGABLE DATABASE" xe_installation.log` ]] && break
docker logs oracle-xe >& xe_installation.log
echo "wait until oracle-xe configuration is done..."
sleep 10
done
##############################################################################################################
echo ""
echo "--------- Step 4: install apex on xe docker image ---------"
echo ""
docker exec -it oracle-xe bash -c "source /home/oracle/.bashrc && cd /tmp/apex && chmod +x apex-install.sh && . apex-install.sh XEPDB1 $db_sys_pwd $apex_admin_username $apex_admin_pwd $apex_admin_email"
##############################################################################################################
echo ""
echo "--------- Step 5: compile oracle ords docker image ---------"
echo ""
cd $work_path/docker-ords/
if [[ "$(docker images -q $docker_prefix/oracle-ords:$ords_version 2> /dev/null)" == "" ]]; then
echo ">>> docker image $docker_prefix/oracle-ords:$ords_version does not exist, begin to build docker image..."
docker build -t $docker_prefix/oracle-ords:$ords_version .
else
echo ">>> docker image $docker_prefix/oracle-ords:$ords_version is found, skip compile step and go on..."
fi;
##############################################################################################################
echo ""
echo "--------- Step 6: startup oracle ords docker image ---------"
echo ""
docker run -d -it --network=$docker_network \
-e TZ=Asia/Shanghai \
-e DB_HOSTNAME=oracle-xe \
-e DB_PORT=1521 \
-e DB_SERVICENAME=XEPDB1 \
-e APEX_PUBLIC_USER_PASS=oracle \
-e APEX_LISTENER_PASS=oracle \
-e APEX_REST_PASS=oracle \
-e ORDS_PASS=oracle \
-e SYS_PASS=$db_sys_pwd \
-e TOMCAT_FILE_NAME=$tomcat_file_name \
--volume $work_path/oracle-ords/$ords_version/config:/opt/ords \
--volume $work_path/apex/images:/ords/apex-images \
-p $ords_port:8080 \
$docker_prefix/oracle-ords:$ords_version
cd $work_path
echo ""
echo "----------------------- APEX Info -----------------------"
echo ""
echo "Admin URL: http://$ip_address:$ords_port/ords"
echo "Workspace: INTERNAL"
echo "User Name: $apex_admin_username"
echo "Password: $apex_admin_pwd"
echo ""
echo "------------------------ DB Info ------------------------"
echo ""
echo "CDB: sqlplus sys/$db_sys_pwd@$ip_address:$db_port/XE as sysdba"
echo "PDB: sqlplus sys/$db_sys_pwd@$ip_address:$db_port/XEPDB1 as sysdba"
echo ""
echo "---------------------- Config Info ----------------------"
echo ""
echo "Database Data File: $work_path/oradata/"
echo "ORDS Config File: $work_path/oracle-ords/"
echo ""
echo "---------------------- Docker Info ----------------------"
echo ""
echo "docker images"
echo "docker ps -a"
echo ""
echo "--------- All installations are done, enjoy it! ---------"
echo ""
echo "star me if you like it: https://github.com/wfg2513148/rapid-apex"
echo ""
|
from converters.models import TuBlog, TuBlogUser, TuUser
from src import create_app
from converters import content
from src.model.models import Blog, User, BlogParticipiation
def convert():
create_app()
def get_blog_type(blog):
if blog.blog_type == "open":
return 1
elif blog.blog_type == "hidden":
return 3
else:
return 2
for b in TuBlog.select():
blog_type = get_blog_type(b)
year = b.blog_date_add.year
month = b.blog_date_add.month
avatar = content.create_content(
b.blog_avatar, "blog_avatar", b.blog, b.user_owner, year, month
)
updated = b.blog_date_edit
if not updated:
updated = b.blog_date_add
owner = User.get_or_none(User.id == b.user_owner)
if not owner:
print(
"Skipped blog. Owner:"
+ TuUser.get(TuUser.user == b.user_owner).user_login
)
continue
about = content.replace_uploads_in_text(owner, b.blog_description)
url = b.blog_url or "blog" + str(b.blog)
blog = Blog.create(
id=b.blog,
created_date=b.blog_date_add,
updated_date=updated,
description=about,
title=b.blog_title,
url=url,
blog_type=blog_type,
creator=owner,
image=avatar,
)
BlogParticipiation.create(
blog=blog, user=owner, role=1,
)
for bu in TuBlogUser.select():
role = 1
if bu.user_role == 1:
role = 3
if bu.user_role > 1:
role = 1
BlogParticipiation.create(
blog=Blog.get(Blog.id == bu.blog),
user=User.get(User.id == bu.user),
role=role,
)
|
const express = require('express');
const router = express.Router();
const Book = require('../models/book');
// GET route: list all books
router.get('/', async (req, res) => {
try {
const books = await Book.find();
res.json(books);
} catch (err) {
res.status(500).json({ message: err.message });
}
});
// GET route: search for a book by title
router.get('/title/:title', getBooksByTitle, (req, res) => {
res.json(res.books);
});
// POST route: add a new book
router.post('/', async (req, res) => {
const book = new Book({
title: req.body.title,
author: req.body.author,
genre: req.body.genre,
year: req.body.year,
pages: req.body.pages,
rating: req.body.rating
});
try {
const newBook = await book.save();
res.status(201).json(newBook);
} catch (err) {
res.status(400).json({ message: err.message });
}
});
// PATCH route: update a book
router.patch('/:id', getBook, async (req, res) => {
if (req.body.title != null) {
res.book.title = req.body.title;
}
if (req.body.author != null) {
res.book.author = req.body.author;
}
if (req.body.genre != null) {
res.book.genre = req.body.genre;
}
if (req.body.year != null) {
res.book.year = req.body.year;
}
if (req.body.pages != null) {
res.book.pages = req.body.pages;
}
if (req.body.rating != null) {
res.book.rating = req.body.rating;
}
try {
const updatedBook = await res.book.save();
res.json(updatedBook);
} catch (err) {
res.status(400).json({ message: err.message });
}
});
// DELETE route: delete a book
router.delete('/:id', getBook, async (req, res) => {
try {
await res.book.remove();
res.json({ message: 'Book deleted' });
} catch (err) {
res.status(500).json({ message: err.message });
}
});
async function getBooksByTitle(req, res, next) {
try {
const books = await Book.find({ title: req.params.title });
if (books == null) {
return res.status(404).json({ message: 'Cannot find book with that title' });
}
res.books = books;
next();
} catch (err) {
return res.status(500).json({ message: err.message });
}
}
async function getBook(req, res, next) {
try {
const book = await Book.findById(req.params.id);
if (book == null) {
return res.status(404).json({ message: 'Cannot find book' });
}
res.book = book;
next();
} catch (err) {
return res.status(500).json({ message: err.message });
}
}
module.exports = router; |
import os
def terminate_long_running_processes(processes, threshold):
terminated_processes = []
for pid, running_time in processes:
if running_time > threshold:
os.system(f"kill -9 {pid}")
terminated_processes.append(pid)
return terminated_processes
# Test the function
processes = [(123, 50), (456, 30), (789, 90), (101, 20)]
threshold = 40
print(terminate_long_running_processes(processes, threshold)) # Output: [123, 789] |
#!/bin/bash -u
# Copyright 2018 ConsenSys AG.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
NO_LOCK_REQUIRED=true
. ./.env
. ./.common.sh
# Build and run containers and network
echo "docker-compose.yml" > ${LOCK_FILE}
echo "${bold}*************************************"
echo "Quorum Dev Quickstart"
echo "*************************************${normal}"
echo "Start network"
echo "--------------------"
if [ -f "docker-compose-deps.yml" ]; then
echo "Starting dependencies..."
docker-compose -f docker-compose-deps.yml up --detach
sleep 60
fi
echo "Starting network..."
docker-compose build --pull
docker-compose up --detach
#list services and endpoints
./list.sh
|
$(document).ready(function () {
refreshCharts()
});
function refreshCharts() {
$.getJSON("api/getBultPrice/", function (data) {
var dailyPrice = [];
$.each(data, function (key, val) {
//var innerArr = [val[0], parseFloat(val[1]).toFixed(4)];
var innerArr = [val["Item1"], val["Item2"]];
dailyPrice.push(innerArr);
});
showDailyBultPrice(dailyPrice);
});
}
function showDailyBultPrice(data) {
var options = {
series: [{
name: 'Bult / Tfuel',
data: data
}],
//chart: {
// type: 'area',
// stacked: false,
// height: 350,
// zoom: {
// type: 'x',
// enabled: true,
// autoScaleYaxis: true
// },
// toolbar: {
// autoSelected: 'zoom'
// }
//},
chart: {
type: 'line',
height: 350
},
dataLabels: {
enabled: false
},
markers: {
size: 0,
},
title: {
text: 'Bult / Tfuel',
align: 'left'
},
yaxis: {
labels: {
formatter: function (val) {
return val;
},
},
//title: {
// text: 'TBILL'
//},
},
xaxis: {
type: 'datetime',
},
stroke: {
width: [3]
},
tooltip: {
enabled: true,
shared: false,
y: {
formatter: function (val) {
return (val).toFixed(4)
}
}
},
theme: {
mode: 'dark'
}
};
var chart = new ApexCharts(document.querySelector("#chartBultPrice"), options);
$("#chartBultPrice").empty();
chart.render();
}
|
<gh_stars>1-10
'use strict';
/* global MockSaveBookmarkHtml, BookmarkEditor, Bookmark, GridItemsFactory */
requireApp('homescreen/test/unit/mock_save_bookmark.html.js');
requireApp('homescreen/js/grid_components.js');
requireApp('homescreen/js/bookmark.js');
requireApp('homescreen/js/bookmark_editor.js');
require('/shared/js/url_helper.js');
suite('bookmark.js >', function() {
var wrapperNode;
suiteSetup(function() {
wrapperNode = document.createElement('section');
wrapperNode.innerHTML = MockSaveBookmarkHtml;
document.body.appendChild(wrapperNode);
});
suiteTeardown(function() {
document.body.removeChild(wrapperNode);
});
suite('BookmarkEditor >', function() {
suiteSetup(function() {
BookmarkEditor.init({
data: {
name: 'Mozilla',
url: 'http://www.mozilla.org/es-ES/firefox/new/'
},
onsaved: function() { },
oncancelled: function() { }
});
});
test('The title has to be defined from options.data.name >', function() {
assert.equal(document.getElementById('bookmark-title').value,
'Mozilla');
});
test('The URL has to be defined from options.data.url >', function() {
assert.equal(document.getElementById('bookmark-url').value,
'http://www.mozilla.org/es-ES/firefox/new/');
});
});
suite('Bookmark >', function() {
var bookmark;
var URL = 'http://www.mozilla.org/es-ES/firefox/new/';
var icon = 'http://www.mozilla.org/images/icon.png';
var name = 'Mozilla';
suiteSetup(function() {
bookmark = new Bookmark({
bookmarkURL: URL,
name: name,
icon: icon,
iconable: false,
useAsyncPanZoom: true
});
});
test('All bookmark objects should be defined as bookmarks >', function() {
assert.equal(bookmark.type, GridItemsFactory.TYPE.BOOKMARK);
});
test('All bookmark objects are removable >', function() {
assert.isTrue(bookmark.removable);
});
test('All bookmark objects define en-US as default locale >', function() {
assert.equal(bookmark.manifest.default_locale, 'en-US');
});
test('This bookmark uses asyncPanZoom >', function() {
assert.isTrue(bookmark.useAsyncPanZoom);
});
test('This bookmark is not iconable >', function() {
assert.isFalse(bookmark.iconable);
});
test('This bookmark defines the url correctly >', function() {
assert.equal(bookmark.origin, URL);
assert.equal(bookmark.url, URL);
assert.equal(bookmark.bookmarkURL, URL);
});
test('This bookmark defines the icon correctly >', function() {
assert.equal(bookmark.manifest.icons[60], icon);
});
test('This bookmark defines the title correctly >', function() {
assert.equal(bookmark.manifest.name, name);
});
});
suite('BookmarkEditor - Invalid URL >', function() {
suiteSetup(function() {
BookmarkEditor.init({
data: {
name: 'Mozilla',
url: 'justAString'
}
});
});
test('Bookmarks with invalid URL should not be saved >', function() {
assert.ok(BookmarkEditor.addButton.disabled,
'Invalid URL, add button should be disabled');
});
});
suite('BookmarkEditor - Non-HTTP(S) URL >', function() {
suiteSetup(function() {
BookmarkEditor.init({
data: {
name: 'Mozilla',
url: 'rtsp://whatever.com'
}
});
});
test('Bookmarks with non-HTTP(S) URLs should be saved >', function() {
assert.isFalse(BookmarkEditor.addButton.disabled,
'Non-HTTP(S) URLs is ok, add button should be enabled');
});
});
});
|
<gh_stars>0
import renderToString from 'next-mdx-remote/render-to-string';
import { lunaComponents } from './luna/components';
import remarkLuna from './luna/remark';
import rehypeLuna from './luna/rehype';
import { fetcher } from './contentImportsFetcher';
import { getCustomComponents } from './customLunaComponents';
export async function renderMdxServer({filePath, data, content, lang}) {
const customComponents = getCustomComponents(filePath, lang);
const { components } = lunaComponents({ components: customComponents });
const meta = {...data};
meta['imports'] = {};
if (data.imports) {
Object.keys(data.imports).forEach(async (imKey) => {
meta.imports[imKey] = await fetcher({filePath, ...data.imports[imKey]});
});
await Promise.all(Object.values(meta.imports));
}
let bibliography = [];
if (typeof meta['bibliography'] === 'string') {
bibliography = await fetcher({filePath, file: meta['bibliography'], type: meta['bibliography'].split('.')[1]});
bibliography = bibliography.bibliography;
}
const mdxSource = await renderToString(
content,
{
components,
scope: meta,
mdxOptions: {
remarkPlugins: [ [remarkLuna, { bibliography }] ],
rehypePlugins: [ rehypeLuna ]
}
}
);
return { mdxSource, meta };
}
|
#!/bin/bash
# This script installs MongoDB 4.4 release candidate 2
echo "*********************************************"
echo "* Usage: install_mongodb_4_4.sh <VERSION> *"
echo "* (<VERSION> defaults to 4.4.0~rc3) *"
echo "*********************************************"
# set vars
VERS="4.4.0~rc3"
USER=`id -u -n`
KEY_NORMAL="deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse"
KEY_TESTING="deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/testing multiverse"
MONGODB_4_4_SRCS="/etc/apt/sources.list.d/mongodb-org-4.4.list"
MONGODB_4_4_KEY_SVR="https://www.mongodb.org/static/pgp/server-4.4.asc"
MSG="Logged in as root"
# get rid of existing sources for mongodb 4.4
rm $MONGODB_4_4_SRCS
# override version if provided
# set "sudo" if user not root
if [ "${USER}" != "root" ]
then
# grab key for mongodb 4.4 sources repo
wget -qO - `echo $MONGODB_4_4_KEY_SVR` | sudo apt-key add -
# add to sources list for mongodb 4.4
echo $KEY_NORMAL | sudo tee `echo $MONGODB_4_4_SRCS`
echo $KEY_TESTING | sudo tee -a `echo $MONGODB_4_4_SRCS`
# install MongoDB
sudo apt-get update
sudo apt-get install -y mongodb-org=`echo $VERS`
sudo mongod -f /etc/mongod.conf &
else
# grab key for mongodb 4.4 sources repo
wget -qO - `echo $MONGODB_4_4_KEY_SVR` | apt-key add -
# add to sources list for mongodb 4.4
echo $KEY_NORMAL | tee `echo $MONGODB_4_4_SRCS`
echo $KEY_TESTING | tee -a `echo $MONGODB_4_4_SRCS`
# install MongoDB
apt-get update
apt-get install -y mongodb-org=`echo $VERS`
mongod -f /etc/mongod.conf &
fi
ps -ax
|
#!/bin/sh
swift build && exit ${PIPESTATUS[0]}
|
<filename>src/java/org/opentele/server/dgks/monitoringdataset/version1_0_1/generated/KramPredictorType.java
package org.opentele.server.dgks.monitoringdataset.version1_0_1.generated;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for KramPredictorType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="KramPredictorType">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="Weight" type="{urn:oio:medcom:chronicdataset:1.0.0}LaboratoryReportType" minOccurs="0"/>
* <element name="Height" type="{urn:oio:medcom:chronicdataset:1.0.0}LaboratoryReportType" minOccurs="0"/>
* <element name="Smoking" type="{urn:oio:medcom:chronicdataset:1.0.0}LaboratoryReportType" minOccurs="0"/>
* <element name="Alcohol" type="{urn:oio:medcom:chronicdataset:1.0.0}LaboratoryReportType" minOccurs="0"/>
* <element name="Exercise" type="{urn:oio:medcom:chronicdataset:1.0.0}LaboratoryReportType" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "KramPredictorType", propOrder = {
"weight",
"height",
"smoking",
"alcohol",
"exercise"
})
public class KramPredictorType {
@XmlElement(name = "Weight")
protected LaboratoryReportType weight;
@XmlElement(name = "Height")
protected LaboratoryReportType height;
@XmlElement(name = "Smoking")
protected LaboratoryReportType smoking;
@XmlElement(name = "Alcohol")
protected LaboratoryReportType alcohol;
@XmlElement(name = "Exercise")
protected LaboratoryReportType exercise;
/**
* Gets the value of the weight property.
*
* @return
* possible object is
* {@link LaboratoryReportType }
*
*/
public LaboratoryReportType getWeight() {
return weight;
}
/**
* Sets the value of the weight property.
*
* @param value
* allowed object is
* {@link LaboratoryReportType }
*
*/
public void setWeight(LaboratoryReportType value) {
this.weight = value;
}
/**
* Gets the value of the height property.
*
* @return
* possible object is
* {@link LaboratoryReportType }
*
*/
public LaboratoryReportType getHeight() {
return height;
}
/**
* Sets the value of the height property.
*
* @param value
* allowed object is
* {@link LaboratoryReportType }
*
*/
public void setHeight(LaboratoryReportType value) {
this.height = value;
}
/**
* Gets the value of the smoking property.
*
* @return
* possible object is
* {@link LaboratoryReportType }
*
*/
public LaboratoryReportType getSmoking() {
return smoking;
}
/**
* Sets the value of the smoking property.
*
* @param value
* allowed object is
* {@link LaboratoryReportType }
*
*/
public void setSmoking(LaboratoryReportType value) {
this.smoking = value;
}
/**
* Gets the value of the alcohol property.
*
* @return
* possible object is
* {@link LaboratoryReportType }
*
*/
public LaboratoryReportType getAlcohol() {
return alcohol;
}
/**
* Sets the value of the alcohol property.
*
* @param value
* allowed object is
* {@link LaboratoryReportType }
*
*/
public void setAlcohol(LaboratoryReportType value) {
this.alcohol = value;
}
/**
* Gets the value of the exercise property.
*
* @return
* possible object is
* {@link LaboratoryReportType }
*
*/
public LaboratoryReportType getExercise() {
return exercise;
}
/**
* Sets the value of the exercise property.
*
* @param value
* allowed object is
* {@link LaboratoryReportType }
*
*/
public void setExercise(LaboratoryReportType value) {
this.exercise = value;
}
}
|
<reponame>lizij/Leetcode
package Game_of_Life;
import java.util.Arrays;
public class Solution {
private int[][] board;
public void gameOfLife(int[][] board) {
if (board == null || board.length == 0 || board[0].length == 0) {
return;
}
// link to board
this.board = board;
int m = board.length, n = board[0].length;
/**
* Brute force with copy: simulating Game of life rules
* 2ms
*/
// // copy board
// int[][] clone = new int[m][n];
// for (int i = 0; i < m; i++) {
// System.arraycopy(board[i], 0, clone[i], 0, n);
// }
//
// for (int i = 0; i < m; i++) {
// for (int j = 0; j < n; j++) {
// int count = countNeighborLiveCells(clone, i, j);
//
// if (clone[i][j] == 1 && (count < 2 || count > 3)) {
// // under-population or over-population
// board[i][j] = 0;
// } else if (clone[i][j] == 0 && count == 3) {
// // reproduction
// board[i][j] = 1;
// }
// }
// }
/**
* Brute force without copy
* Use [2nd bit, 1st bit] to store next result in place, default 01 or 00
* for board[i][j] as bit
* 1. bit == 01:
* a. under-population or over-population: 01 -> 01
* b. 2 or 3 live neighbors: 01 -> 11
* 2. bit == 00:
* a. 3 live neighbors: 00 -> 10
* b. others: 00 -> 00
* 2ms
*/
for (int i = 0; i < m; i++) {
for (int j = 0; j < n; j++) {
int count = countNeighborLiveCells(board, i, j);
if ((board[i][j] & 1) == 1 && (count == 2 || count == 3)) {
board[i][j] = 3;
} else if ((board[i][j] & 1) == 0 && count == 3) {
board[i][j] = 2;
}
}
}
for (int i = 0; i < m; i++) {
for (int j = 0; j < n; j++) {
board[i][j] = board[i][j] >> 1;
}
}
// reset this.board
this.board = null;
}
private int countNeighborLiveCells(int[][] board, int i, int j) {
// get live neighbor cells
if (!isValid(i, j)) {
return 0;
}
int[][] directions = {{-1, -1}, {-1, 0}, {-1, 1}, {0, -1}, {0, 1}, {1, -1}, {1, 0}, {1, 1}};
int count = 0;
for (int[] d: directions) {
int x = i + d[0];
int y = j + d[1];
if (isValid(x, y) && (board[x][y] & 1) == 1) {
count++;
}
}
return count;
}
private boolean isValid(int i, int j) {
return i >= 0 && i < board.length && j >= 0 && j < board[0].length;
}
public static void main(String[] args) {
Solution s = new Solution();
test(s, new int[][]{
{0, 1, 1, 0},
{1, 1, 0, 1},
{1, 0, 0, 1},
{1, 1, 1, 1}
});
// [[1,1,1,0],
// [1,0,0,1],
// [0,0,0,1],
// [1,1,1,1]]
test(s, new int[][]{
{1},{0},{0},{1},{0},{0},{1},{0},{0},{1}
});
}
private static void test(Solution s, int[][] input) {
s.gameOfLife(input);
for (int[] row: input) {
System.out.println(Arrays.toString(row));
}
}
} |
<reponame>coussej/calc<filename>calc.go
package calc
import "math"
// Abs return the absolute value of x
func Abs(x int) int {
if x < 0 {
return -x
}
return x
}
// Ceil returns the first integer higher than the given float64.
func Ceil(x float64) int {
return int(math.Ceil(x))
}
// Copysign returns a value with the magnitude of x and the sign of y
func Copysign(x, y int) int {
if y < 0 {
return -Abs(x)
}
return Abs(x)
}
// Floor returns the first integer lower than the given float64
func Floor(x float64) int {
return int(math.Floor(x))
}
// Max returns the largest integer from its arguments.
func Max(x ...int) (max int) {
for i, v := range x {
switch {
case i == 0:
max = v
case v > max:
max = v
}
}
return
}
// Min returns the smallest integer from its arguments.
func Min(x ...int) (min int) {
for i, v := range x {
switch {
case i == 0:
min = v
case v < min:
min = v
}
}
return
}
// Pow returns x^y, the base-x exponential of y, with as condition y >= 0. If
// y < 0, function returns 0.
func Pow(x, y int) (pow int) {
switch {
case x == 0 || y < 0:
pow = 0
case y == 0:
pow = 1
default:
pow = 1
for i := 0; i < y; i++ {
pow *= x
}
}
return
}
// Round returns the correctly rounded int from the given float64 x
func Round(x float64) int {
return Floor(x + .5)
}
|
const mongoose = require('mongoose');
const User = mongoose.model('User');
const uuid = require('uuid');
const bcrypt = require('bcrypt');
exports.validateRegister = (req, res, next) => {
req.sanitizeBody('name');
req.checkBody('name', 'You must supply a name!').notEmpty();
req.sanitizeBody('username');
req.checkBody('username', 'You must supply a username').notEmpty();
req.checkBody('email', 'That email is not valid').isEmail();
req.sanitizeBody('email').normalizeEmail({
gmail_remove_dots: false,
gmail_remove_extension: false,
gmail_remove_subaddress: false
});
req.sanitizeBody('learning_language');
req.checkBody('learning_language', 'You must supply a learning language!').notEmpty();
req.sanitizeBody('native_language');
req.checkBody('native_language', 'You must supply a native language!').notEmpty();
req.checkBody('password', '<PASSWORD>').notEmpty();
req.checkBody('confirm_password', 'Confirmed password cannot be blank').notEmpty();
req.checkBody('confirm_password', "<PASSWORD>").equals(req.body.password);
const errors = req.validationErrors();
if (errors) {
res.json(errors);
return;
}
next();
};
exports.register = async (req, res) => {
const user = await new User(req.body).save();
res.send(user);
};
exports.healthTest = (req, res) => {
res.json(req.body.user);
};
|
class Scheduler:
def __init__(self, scheduler):
self.scheduler = scheduler
self.lr_history = []
self.name = self.scheduler.__class__.__name__
def get_lr(self):
return self.scheduler.optimizer.state_dict()['param_groups'][0]['lr']
def iterate(self, iterations):
self.lr_history.append(self.get_lr())
if self.name in ['ReduceLROnPlateau']:
for i in range(iterations):
self.scheduler.step(0) # simulate constant loss
self.lr_history.append(self.get_lr())
else:
for i in range(iterations):
self.scheduler.step()
self.lr_history.append(self.get_lr())
|
<filename>src/bu_shapes.c<gh_stars>0
#include "bu_shapes.h"
#include "gf2d_draw.h"
#include "simple_logger.h"
Rect gf2d_rect(float x, float y, float w, float h)
{
Rect r;
gf2d_rect_set(r, x, y, w, h);
return r;
}
void gf2d_rect_draw(Rect r, Color color)
{
gf2d_draw_rect(gf2d_rect_to_sdl_rect(r), gfc_color_to_vector4(color));
}
void gf2d_shape_draw(Shape shape, Color color)
{
switch (shape.type)
{
case ST_RECT:
gf2d_rect_draw(shape.s.r, color);
break;
case ST_CIRCLE:
gf2d_draw_circle(vector2d(shape.s.c.x, shape.s.c.y), shape.s.c.r, gfc_color_to_vector4(color));
break;
case ST_EDGE:
gf2d_draw_line(vector2d(shape.s.e.x1, shape.s.e.y1), vector2d(shape.s.e.x2, shape.s.e.y2), gfc_color_to_vector4(color));
break;
}
}
Circle gf2d_circle(float x, float y, float r)
{
Circle c;
gf2d_circle_set(c, x, y, r);
return c;
}
SDL_Rect gf2d_rect_to_sdl_rect(Rect r)
{
SDL_Rect r2;
r2.x = r.x;
r2.y = r.y;
r2.w = r.w;
r2.h = r.h;
return r2;
}
Rect gf2d_rect_from_sdl_rect(SDL_Rect r)
{
Rect r2;
r2.x = r.x;
r2.y = r.y;
r2.w = r.w;
r2.h = r.h;
return r2;
}
Uint8 gf2d_point_in_rect(Vector2D p, Rect r)
{
if ((p.x >= r.x) && (p.x <= r.x + r.w) &&
(p.y >= r.y) && (p.y <= r.y + r.h))
return 1;
return 0;
}
Uint8 gf2d_rect_overlap_poc(Rect a, Rect b, Vector2D* poc, Vector2D* normal)
{
if ((a.x > b.x + b.w) ||
(b.x > a.x + a.w) ||
(a.y > b.y + b.h) ||
(b.y > a.y + a.h))
{
return 0;
}
if (poc)
{
poc->y = poc->x = 0;
if (normal)normal->x = normal->y = 0;
if (a.x + 1 >= b.x + b.w)
{
poc->x = a.x;
if (normal)normal->x = -1;
}
else if (b.x + 1 >= a.x + a.w)
{
poc->x = b.x;
if (normal)normal->x = 1;
}
if (a.y + 1 >= b.y + b.h)
{
poc->y = a.y;
if (normal)normal->y = -1;
}
if (b.y + 1 >= a.y + a.h)
{
if (normal)normal->y = 1;
poc->y = b.y;
}
}
return 1;
}
Uint8 gf2d_rect_overlap(Rect a, Rect b)
{
return gf2d_rect_overlap_poc(a, b, NULL, NULL);
}
Uint8 gf2d_point_in_cicle(Vector2D p, Circle c)
{
if (vector2d_magnitude_compare(vector2d(c.x - p.x, c.y - p.y), c.r) <= 0)return 1;
return 0;
}
Uint8 gf2d_circle_overlap_poc(Circle a, Circle b, Vector2D* poc, Vector2D* normal)
{
Vector2D v;
vector2d_set(v, a.x - b.x, a.y - b.y);
if (vector2d_magnitude_compare(v, a.r + b.r) <= 0)
{
if (poc)
{
vector2d_normalize(&v);
if (normal)
{
normal->x = v.x;
normal->y = v.y;
}
vector2d_scale(v, v, a.r);
poc->x = a.x + v.x;
poc->y = a.y + v.y;
}
return 1;
}
return 0;
}
Uint8 gf2d_circle_overlap(Circle a, Circle b)
{
return gf2d_circle_overlap_poc(a, b, NULL, NULL);
}
Uint8 gf2d_circle_rect_overlap_poc(Circle a, Rect b, Vector2D* poc, Vector2D* normal)
{
Rect newrect1, newrect2;
gf2d_rect_set(newrect1, b.x - a.r, b.y, b.w + a.r + a.r, b.h);
gf2d_rect_set(newrect2, b.x, b.y - a.r, b.w, b.h + a.r + a.r);
if (gf2d_point_in_cicle(vector2d(b.x, b.y), a))
{
if (poc)
{
poc->x = b.x;
poc->y = b.y;
}
if (normal)
{
normal->x = a.x - b.x;
normal->y = a.y - b.y;
vector2d_normalize(normal);
}
return 1;
}
if (gf2d_point_in_cicle(vector2d(b.x + b.w, b.y), a))
{
if (poc)
{
poc->x = b.x + b.w;
poc->y = b.y;
}
if (normal)
{
normal->x = a.x - (b.x + b.w);
normal->y = a.y - b.y;
vector2d_normalize(normal);
}
return 1;
}
if (gf2d_point_in_cicle(vector2d(b.x, b.y + b.h), a))
{
if (poc)
{
poc->x = b.x;
poc->y = b.y + b.h;
}
if (normal)
{
normal->x = a.x - b.x;
normal->y = a.y - (b.y + b.h);
vector2d_normalize(normal);
}
return 1;
}
if (gf2d_point_in_cicle(vector2d(b.x + b.w, b.y + b.h), a))
{
if (poc)
{
poc->x = b.x + b.w;
poc->y = b.y + b.h;
}
if (normal)
{
normal->x = a.x - (b.x + b.w);
normal->y = a.y - (b.y + b.h);
vector2d_normalize(normal);
}
return 1;
}
if ((gf2d_point_in_rect(vector2d(a.x, a.y), newrect1)) ||
(gf2d_point_in_rect(vector2d(a.x, a.y), newrect2)))
{
if (poc)
{
if (a.x < b.x)
{
poc->y = a.y;
poc->x = b.x;
if (normal)
{
normal->x = -1;
normal->y = 0;
}
}
else if (a.x > b.x + b.w)
{
poc->y = a.y;
poc->x = b.x + b.w;
if (normal)
{
normal->x = 1;
normal->y = 0;
}
}
if (a.y < b.y)
{
poc->y = b.y;
poc->x = a.x;
if (normal)
{
normal->x = 0;
normal->y = -1;
}
}
else if (a.y > b.y + b.y)
{
poc->y = b.y + b.h;
poc->x = a.x;
if (normal)
{
normal->x = 0;
normal->y = 1;
}
}
}
return 1;
}
return 0;
}
Uint8 gf2d_circle_rect_overlap(Circle a, Rect b)
{
Rect newrect;
gf2d_rect_set(newrect, b.x - a.r, b.y, b.w + a.r + a.r, b.h);
if (gf2d_point_in_rect(vector2d(a.x, a.y), newrect))return 1;
gf2d_rect_set(newrect, b.x, b.y - a.r, b.w, b.h + a.r + a.r);
if (gf2d_point_in_rect(vector2d(a.x, a.y), newrect))return 1;
if (gf2d_point_in_cicle(vector2d(b.x, b.y), a))return 1;
if (gf2d_point_in_cicle(vector2d(b.x + b.w, b.y), a))return 1;
if (gf2d_point_in_cicle(vector2d(b.x, b.y + b.h), a))return 1;
if (gf2d_point_in_cicle(vector2d(b.x + b.w, b.y + b.h), a))return 1;
return 0;
}
Uint8 gf2d_shape_overlap_poc(Shape a, Shape b, Vector2D* poc, Vector2D* normal)
{
switch (a.type)
{
case ST_CIRCLE:
switch (b.type)
{
case ST_CIRCLE:
return gf2d_circle_overlap_poc(a.s.c, b.s.c, poc, normal);
case ST_RECT:
return gf2d_circle_rect_overlap_poc(a.s.c, b.s.r, poc, normal);
case ST_EDGE:
return gf2d_edge_circle_intersection_poc(b.s.e, a.s.c, poc, normal);
}
case ST_RECT:
switch (b.type)
{
case ST_RECT:
return gf2d_rect_overlap_poc(a.s.r, b.s.r, poc, normal);
case ST_CIRCLE:
return gf2d_circle_rect_overlap_poc(b.s.c, a.s.r, poc, normal);
case ST_EDGE:
return gf2d_edge_rect_intersection_poc(b.s.e, a.s.r, poc, normal);
}
case ST_EDGE:
switch (b.type)
{
case ST_EDGE:
return gf2d_edge_intersect_poc(a.s.e, b.s.e, poc, normal);
case ST_CIRCLE:
return gf2d_edge_circle_intersection_poc(a.s.e, b.s.c, poc, normal);
case ST_RECT:
return gf2d_edge_rect_intersection_poc(a.s.e, b.s.r, poc, normal);
}
}
return 0;
}
Uint8 gf2d_shape_overlap(Shape a, Shape b)
{
return gf2d_shape_overlap_poc(a, b, NULL, NULL);
}
Shape gf2d_shape_rect(float x, float y, float w, float h)
{
Shape shape;
shape.type = ST_RECT;
shape.s.r.x = x;
shape.s.r.y = y;
shape.s.r.w = w;
shape.s.r.h = h;
return shape;
}
Shape gf2d_shape_from_rect(Rect r)
{
Shape shape;
shape.type = ST_RECT;
shape.s.r.x = r.x;
shape.s.r.y = r.y;
shape.s.r.w = r.w;
shape.s.r.h = r.h;
return shape;
}
Shape gf2d_shape_sdl_rect(SDL_Rect r)
{
Shape shape;
shape.type = ST_RECT;
shape.s.r.x = r.x;
shape.s.r.y = r.y;
shape.s.r.w = r.w;
shape.s.r.h = r.h;
return shape;
}
Shape gf2d_shape_circle(float x, float y, float r)
{
Shape shape;
shape.type = ST_CIRCLE;
shape.s.c.x = x;
shape.s.c.y = y;
shape.s.c.r = r;
return shape;
}
Shape gf2d_shape_from_circle(Circle c)
{
Shape shape;
shape.type = ST_CIRCLE;
shape.s.c.x = c.x;
shape.s.c.y = c.y;
shape.s.c.r = c.r;
return shape;
}
Shape gf2d_shape_edge(float x1, float y1, float x2, float y2)
{
return gf2d_shape_from_edge(gf2d_edge(x1, y1, x2, y2));
}
Shape gf2d_shape_from_edge(Edge e)
{
Shape shape;
shape.type = ST_EDGE;
gf2d_edge_copy(shape.s.e, e);
return shape;
}
void gf2d_shape_copy(Shape* dst, Shape src)
{
if (!dst)return;
memcpy(dst, &src, sizeof(Shape));
}
void gf2d_shape_move(Shape* shape, Vector2D move)
{
if (!shape)return;
shape->s.r.x += move.x;
shape->s.r.y += move.y;
}
Uint8 gf2d_edge_intersect_poc(
Edge a,
Edge b,
Vector2D* contact,
Vector2D* normal
)
{
float testx, testy;
float Ua, Ub, Uden;
Uden = ((b.y2 - b.y1) * (a.x2 - a.x1)) - ((b.x2 - b.x1) * (a.y2 - a.y1));
if (Uden == 0)
{
return 0;/*parallel, can't hit*/
}
Ua = (((b.x2 - b.x1) * (a.y1 - b.y1)) - ((b.y2 - b.y1) * (a.x1 - b.x1))) / Uden;
Ub = (((a.x2 - a.x1) * (a.y1 - b.y1)) - ((a.y2 - a.y1) * (a.x1 - b.x1))) / Uden;
testx = a.x1 + (Ua * (a.x2 - a.x1));
testy = a.y1 + (Ua * (a.y2 - a.y1));
if (contact != NULL)
{
contact->x = testx;
contact->y = testy;
}
if (normal != NULL)
{
normal->x = b.y2 - b.y1;
normal->y = b.x2 - b.x1;
vector2d_normalize(normal);
}
if ((Ua >= 0) && (Ua <= 1) && (Ub >= 0) && (Ub <= 1))
{
return 1;
}
return 0;
}
Uint8 gf2d_edge_intersect(Edge a, Edge b)
{
return gf2d_edge_intersect_poc(a, b, NULL, NULL);
}
Edge gf2d_edge(float x1, float y1, float x2, float y2)
{
Edge e;
gf2d_edge_set(e, x1, y1, x2, y2);
return e;
}
Edge gf2d_edge_from_vectors(Vector2D a, Vector2D b)
{
Edge e;
gf2d_edge_set(e, a.x, a.y, b.x, b.y);
return e;
}
Uint8 gf2d_edge_rect_intersection_poc(Edge e, Rect r, Vector2D* poc, Vector2D* normal)
{
Uint8 ret = 0;
if (gf2d_edge_intersect_poc(e, gf2d_edge(r.x, r.y, r.x + r.w, r.y), poc, NULL))//top
{
ret = 1;
}
if (gf2d_edge_intersect_poc(e, gf2d_edge(r.x, r.y, r.x, r.y + r.h), poc, NULL))//left
{
ret |= 2;
}
if (gf2d_edge_intersect_poc(e, gf2d_edge(r.x, r.y + r.h, r.x + r.w, r.y + r.h), poc, NULL))//bottom
{
ret |= 4;
}
if (gf2d_edge_intersect_poc(e, gf2d_edge(r.x + r.w, r.y, r.x + r.w, r.y + r.h), poc, NULL))//right
{
ret |= 8;
}
if (ret)
{
if (normal)
{
if ((ret & 5) || (ret == 8))// top & bottom or right
{
normal->x = 1;
normal->y = 0;
}
else if ((ret & 10) || (ret == 1))// left & right or top
{
normal->x = 0;
normal->y = -1;
}
else if (ret & 2)// left
{
normal->x = -1;
normal->y = 0;
}
else
{
normal->x = 0;
normal->y = 1;
}
}
return 1;
}
if ((gf2d_point_in_rect(vector2d(e.x1, e.y1), r)) ||
(gf2d_point_in_rect(vector2d(e.x2, e.y2), r)))
{
// if either end point is within the rect, we have a collision
return 1;
}
return 0;
}
Uint8 gf2d_edge_rect_intersection(Edge e, Rect r)
{
return gf2d_edge_rect_intersection_poc(e, r, NULL, NULL);
}
Uint8 gf2d_edge_intersect_shape(Edge e, Shape s)
{
return gf2d_shape_overlap(gf2d_shape_from_edge(e), s);
}
Uint8 gf2d_edge_intersect_shape_poc(Edge e, Shape s, Vector2D* poc, Vector2D* normal)
{
return gf2d_shape_overlap_poc(gf2d_shape_from_edge(e), s, poc, normal);
}
Uint8 gf2d_edge_circle_intersection_poc(Edge e, Circle c, Vector2D* poc, Vector2D* normal)
{
float dy = (e.y2 - e.y1);
float dx = (e.x1 - e.x2);
float C1 = (e.y2 - e.y1) * e.x1 + (e.x1 - e.x2) * e.y1;
float C3 = -dx * c.x + dy * c.y;
float det = (dy * dy - -dx * dx);
float cx2 = 0;
float cy2 = 0;
if (det != 0)
{
cx2 = (dy * C1 - dx * C3) / det;
cy2 = (dy * C3 - -dx * C1) / det;
}
if (MIN(e.x1, e.x2) <= cx2
&& cx2 <= MAX(e.x1, e.x2)
&& MIN(e.y1, e.y2) <= cy2
&& cy2 <= MAX(e.y1, e.y2))
{
if (fabs((cx2 - c.x) * (cx2 - c.x) + (cy2 - c.y) * (cy2 - c.y)) < c.r * c.r + 1)
{
if (poc)
{
poc->x = cx2;
poc->y = cy2;
if (normal)
{
normal->x = poc->x - c.x;
normal->y = poc->y - c.y;
vector2d_normalize(normal);
}
}
return 1;
}
}
return 0;
}
Uint8 gf2d_edge_circle_intersection(Edge e, Circle c)
{
return gf2d_edge_circle_intersection_poc(e, c, NULL, NULL);
}
void gf2d_edge_slog(Edge e)
{
slog("Edge: (%f,%f),(%f,%f)", e.x1, e.y1, e.x2, e.y2);
}
void gf2d_rect_slog(Rect r)
{
slog("Rect: (%f,%f,%f,%f)", r.x, r.y, r.w, r.h);
}
void gf2d_circle_slog(Circle c)
{
slog("Circle: (%f,%f) radius (%f)", c.x, c.y, c.r);
}
void gf2d_shape_slog(Shape shape)
{
switch (shape.type)
{
case ST_EDGE:
gf2d_edge_slog(shape.s.e);
break;
case ST_RECT:
gf2d_rect_slog(shape.s.r);
break;
case ST_CIRCLE:
gf2d_circle_slog(shape.s.c);
break;
}
}
Rect gf2d_edge_get_bounds(Edge e)
{
Rect r;
r.x = MIN(e.x1, e.x2);
r.y = MIN(e.y1, e.y2);
r.w = fabs(e.x1 - e.x2);
r.h = fabs(e.y1 - e.y2);
return r;
}
Rect gf2d_circle_get_bounds(Circle c)
{
Rect r;
r.x = c.x - c.r;
r.y = c.y - c.r;
r.w = c.r * 2;
r.h = c.r * 2;
return r;
}
Rect gf2d_shape_get_bounds(Shape shape)
{
Rect r = { 0,0,0,0 };
switch (shape.type)
{
case ST_EDGE:
gf2d_edge_get_bounds(shape.s.e);
break;
case ST_RECT:
return shape.s.r;
break;
case ST_CIRCLE:
gf2d_circle_get_bounds(shape.s.c);
break;
}
return r;
}
/*eol@eof*/ |
import test from 'ava';
import { RedBlackTreeEntry, emptyWithNumericKeys, first } from '../../src';
import { RedBlackTreeStructure } from '../../src/internals';
import { createTree, sortedValues } from '../test-utils';
var tree: RedBlackTreeStructure<any, any>, emptyTree: RedBlackTreeStructure<any, any>;
test.beforeEach(() => {
emptyTree = emptyWithNumericKeys();
tree = createTree();
});
test('returns undefined if the tree is empty', t => {
t.is(first(emptyTree), void 0);
});
test('returns a pointer to the first node', t => {
const node = <RedBlackTreeEntry<any, any>>first(tree);
t.not(node, void 0);
t.is(node.key, sortedValues[0]);
});
|
<reponame>peshos/peshos.poll
"use strict";
var PeshOS = PeshOS || {};
PeshOS.Poll = PeshOS.Poll || {};
PeshOS.Poll.Configuration = (function () {
var isConfigured = false,
defaultPoll = {
name: '<NAME>',
description: 'This is my first poll',
width: 400,
height: 400,
votebutton: 'vote',
resultsbutton: 'results',
voteonce: true,
disablevoting: false,
questions: [{ answerText: 'Answer question 1', chartText: 'Chart 1' }, { answerText: 'Answer question 2', chartText: 'Chart 2'}]
},
ConfigurationModel = function () {
if (!PeshOS.Poll.configObj.poll) {
PeshOS.Poll.configObj.poll = $.extend(true, {}, defaultPoll);
}
var self = this;
self.name = ko.observable(PeshOS.Poll.configObj.poll.name);
self.name.subscribe(function (newName) {
PeshOS.Poll.configObj.poll.name = newName;
PeshOS.Poll.configObj.chartConfiguration.title = newName;
});
self.description = ko.observable(PeshOS.Poll.configObj.poll.description);
self.description.subscribe(function (newDescription) {
PeshOS.Poll.configObj.poll.description = newDescription;
});
self.width = ko.observable(PeshOS.Poll.configObj.poll.width);
self.width.subscribe(function (newWidth) {
PeshOS.Poll.configObj.poll.width = newWidth;
});
self.height = ko.observable(PeshOS.Poll.configObj.poll.height);
self.height.subscribe(function (newHeight) {
PeshOS.Poll.configObj.poll.height = newHeight;
});
self.votebutton = ko.observable(PeshOS.Poll.configObj.poll.votebutton);
self.votebutton.subscribe(function (newVotebutton) {
PeshOS.Poll.configObj.poll.votebutton = newVotebutton;
});
self.resultsbutton = ko.observable(PeshOS.Poll.configObj.poll.resultsbutton);
self.resultsbutton.subscribe(function (newResultsbutton) {
PeshOS.Poll.configObj.poll.resultsbutton = newResultsbutton;
});
self.voteonce = ko.observable(PeshOS.Poll.configObj.poll.voteonce);
self.voteonce.subscribe(function (newVoteonce) {
PeshOS.Poll.configObj.poll.voteonce = newVoteonce;
});
self.disablevoting = ko.observable(PeshOS.Poll.configObj.poll.disablevoting);
self.disablevoting.subscribe(function (newDisablevoting) {
PeshOS.Poll.configObj.poll.disablevoting = newDisablevoting;
});
self.questions = ko.observableArray(PeshOS.Poll.configObj.poll.questions);
self.questions.subscribe(function (newQuestions) {
PeshOS.Poll.configObj.poll.questions = newQuestions;
});
self.addQuestion = function () {
self.questions.push({
answerText: "Answer question",
chartText: "Chart"
});
};
self.removeQuestion = function (question) {
self.questions.remove(question);
};
};
function configure() {
if (isConfigured) {
return;
}
ko.cleanNode(document.getElementById('pollConfiguration'));
ko.applyBindings(new ConfigurationModel(), document.getElementById('pollConfiguration'));
isConfigured = true;
};
return {
configure: configure
}
}()); |
function validateUserProfile(userProfile) {
if (
typeof userProfile.userName === 'string' &&
typeof userProfile.email === 'string' &&
userProfile.age === undefined &&
userProfile.tags === undefined &&
userProfile.birthday === undefined
) {
return true;
}
if (
typeof userProfile.userName === 'string' &&
typeof userProfile.email === 'string' &&
typeof userProfile.age === 'number' &&
(userProfile.tags === undefined || Array.isArray(userProfile.tags)) &&
(userProfile.birthday === undefined || userProfile.birthday instanceof Date)
) {
return true;
}
return false;
} |
<filename>src/icons/legacy/AngleDown.tsx
// Generated by script, don't edit it please.
import createSvgIcon from '../../createSvgIcon';
import AngleDownSvg from '@rsuite/icon-font/lib/legacy/AngleDown';
const AngleDown = createSvgIcon({
as: AngleDownSvg,
ariaLabel: 'angle down',
category: 'legacy',
displayName: 'AngleDown'
});
export default AngleDown;
|
<filename>my-bonsai-corner/src/main/java/com/ratz/mybonsaicorner/services/UserService.java
package com.ratz.mybonsaicorner.services;
public interface UserService {
}
|
<filename>src/app/store/index.js
import Vue from 'vue';
import Vuex from 'vuex';
import Observer from 'mutation-observer';
import debounce from 'tiny-debounce';
import {hasElement} from 'Resources/helpers';
Vue.use(Vuex);
import state from './state';
import actions from './actions';
import mutations from './mutations';
import getters from './getters';
const store = new Vuex.Store({
state,
actions,
mutations,
getters
});
const checkForMxBgs = () => {
store.commit('bgShown', hasElement('mx-underlay'));
};
window._bgObserver = new Observer(debounce(checkForMxBgs, 100));
window._bgObserver.observe(document, {
subtree: true,
childList: true,
attributes: false,
characterData: false,
attributeOldValue: false,
characterDataOldValue: false
});
export default store;
|
const Boom = require('@hapi/boom');
const get = require('lodash/get');
const Company = require('../../models/Company');
const translate = require('../../helpers/translate');
const UtilsHelper = require('../../helpers/utils');
const { TRAINING_ORGANISATION_MANAGER, VENDOR_ADMIN } = require('../../helpers/constants');
const { language } = translate;
exports.companyExists = async (req) => {
try {
const company = await Company.countDocuments({ _id: req.params._id });
if (!company) throw Boom.notFound(translate[language].CompanyNotFound);
return true;
} catch (e) {
req.log('error', e);
return Boom.isBoom(e) ? e : Boom.badImplementation(e);
}
};
exports.authorizeCompanyUpdate = async (req) => {
const companyId = get(req, 'auth.credentials.company._id', null);
const vendorRole = get(req, 'auth.credentials.role.vendor.name') || null;
const isVendorAdmin = !!vendorRole && [TRAINING_ORGANISATION_MANAGER, VENDOR_ADMIN].includes(vendorRole);
if (!isVendorAdmin && (!companyId || !UtilsHelper.areObjectIdsEquals(req.params._id, companyId))) {
throw Boom.forbidden();
}
const nameAlreadyExists = await Company
.countDocuments({ _id: { $ne: req.params._id }, name: req.payload.name }, { limit: 1 })
.collation({ locale: 'fr', strength: 1 });
if (nameAlreadyExists) throw Boom.conflict(translate[language].companyExists);
return null;
};
exports.authorizeCompanyCreation = async (req) => {
const { name } = req.payload;
const nameAlreadyExists = await Company
.countDocuments({ name }, { limit: 1 })
.collation({ locale: 'fr', strength: 1 });
if (nameAlreadyExists) throw Boom.conflict(translate[language].companyExists);
return null;
};
|
import { Command } from 'commander';
import * as Inquirer from 'inquirer';
import { SimpleGit } from 'simple-git/promise';
import { exec } from 'child_process';
import * as util from 'util';
import { Container } from 'typedi';
import { OptionsService } from '../../service/Options';
export interface DiffQuery {
from?: string;
to?: string;
source?: string;
destination?: string;
}
export async function AskDiff(cmd: Command, qDiff: DiffQuery, git: SimpleGit) {
const branches = await git.branchLocal();
qDiff.source = ((await Inquirer.prompt([
{
name: 'source',
message: 'Choose a source branch',
type: 'list',
choices: branches.all,
default: 'hapify',
},
])) as any).source;
const commits = (await git.log([qDiff.source, '-n', '20', '--'])).all.map((c) => ({ name: `[${c.date}] ${c.message}`, value: c.hash }));
const fromAnswer = (await Inquirer.prompt([
{
name: 'from',
message: 'Choose the first commit',
type: 'list',
choices: [{ name: 'Enter a commit hash', value: null }, new Inquirer.Separator(), ...commits],
default: commits.length > 1 ? commits[1].value : null,
when: () => commits.length > 0,
},
{
name: 'fromHash',
message: 'Enter the first commit hash',
when: (answer: any) => !answer.from,
validate: (input) => input.length > 0,
},
])) as { from?: string; fromHash?: string };
qDiff.from = fromAnswer.fromHash || fromAnswer.from;
const toAnswer = (await Inquirer.prompt([
{
name: 'to',
message: 'Choose the second commit',
type: 'list',
choices: [{ name: 'Enter a commit hash', value: null }, new Inquirer.Separator(), ...commits],
default: commits.length > 0 ? commits[0].value : null,
when: () => commits.length > 0,
},
{
name: 'toHash',
message: 'Enter the second commit hash',
when: (answer: any) => !answer.to,
validate: (input) => input.length > 0,
},
])) as { to?: string; toHash?: string };
qDiff.to = toAnswer.toHash || toAnswer.to;
qDiff.destination = ((await Inquirer.prompt([
{
name: 'destination',
message: 'Choose a destination branch',
type: 'list',
choices: branches.all,
default: 'develop',
},
])) as any).destination;
}
export async function ApplyDiff(qDiff: DiffQuery, git: SimpleGit): Promise<string> {
const options = Container.get(OptionsService);
const command = `git format-patch --stdout ${qDiff.from}..${qDiff.to} | git am -3 -k`;
const confirm = ((await Inquirer.prompt([
{
name: 'confirm',
message: `Confirm run command: "${command}" on branch ${qDiff.destination}`,
type: 'confirm',
default: false,
},
])) as any).confirm;
if (confirm) {
await git.checkout(qDiff.destination);
const { stdout, stderr } = await util.promisify(exec)(command, {
cwd: options.dir(),
});
if (stderr && stderr.length) {
throw new Error(`${stderr}\n${stdout}`);
}
return stdout;
}
return null;
}
|
const validateEmail = (email) => {
const re = /^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/;
return re.test(String(email).toLowerCase());
} |
from sklearn.cluster import KMeans
def KMeans_clustering(dataset, k):
kmeans_model = KMeans(n_clusters=k).fit(dataset)
clusters = kmeans_model.cluster_centers_
labels = kmeans_model.labels_
return clusters, labels
clusters, labels = KMeans_clustering(dataset, k=2) |
#!/bin/bash
script client.txt
gcc main.c client.c interfaces.c ip_validator.c -o client
|
package com.linkedin.datahub.graphql.resolvers.type;
import com.google.common.collect.Iterables;
import com.linkedin.datahub.graphql.types.EntityType;
import com.linkedin.datahub.graphql.types.LoadableType;
import graphql.TypeResolutionEnvironment;
import graphql.schema.GraphQLObjectType;
import graphql.schema.TypeResolver;
import java.util.List;
import java.util.stream.Collectors;
/**
* Responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Entity} interface type.
*/
public class EntityInterfaceTypeResolver implements TypeResolver {
private final List<EntityType<?, ?>> _entities;
public EntityInterfaceTypeResolver(final List<EntityType<?, ?>> entities) {
_entities = entities;
}
@Override
public GraphQLObjectType getType(TypeResolutionEnvironment env) {
Object javaObject = env.getObject();
final LoadableType<?, ?> filteredEntity = Iterables.getOnlyElement(_entities.stream()
.filter(entity -> javaObject.getClass().isAssignableFrom(entity.objectClass()))
.collect(Collectors.toList()));
return env.getSchema().getObjectType(filteredEntity.objectClass().getSimpleName());
}
}
|
#include "stdio.h"
#include "string.h"
#include "stdlib.h"
#include <stdio.h>
#include <string.h>
typedef struct node {
char *key;
int data[8];
}node;
void addNewNode(char* inputString){
char *prtH = inputString;
char *key;
int val [8];
int i=0;
printf("%s--\n",(prtH));
while (*prtH !=' '&& *prtH !='\n'){
printf("%c",*prtH++);
i++;
}
key = (char*)malloc(i);
prtH = inputString;
char *prtVal = (inputString+i+1);
char *pp = prtVal;
while (*prtH !=' '&& *prtH !='\n'){
printf("%c",*prtH);
*key++= *prtH++;
}
i=0;
printf("\n");
int k=0;
while (*prtH >'0'&& *prtH <'0' &&*prtH !='\n'){
printf("--%c---\n",*prtH);
*prtVal = *prtH++;
printf("--%c---\n",*prtVal++);
}
for (int k=0;k<8;k++)
{
printf("%c",*pp+k);
}
printf("\n END");
}
int main() {
/* Enter your code here. Read input from STDIN. Print output to STDOUT */
/* int loop = 0;
scanf("%d",&loop);
char *placeHolder[loop];
for (int i=0;i<loop;i++){
placeHolder[i] = (char*)malloc(100);
scanf("%s",placeHolder[i]);
}
for (int i=0;i<loop;i++){
printf("here is i %d -> %s\n",i,placeHolder[i]);
}
*/
char test [] = "TEST 12345678";
addNewNode(test);
return 0;
}
|
<reponame>saucelabs/travis-core<gh_stars>100-1000
module Travis
module Addons
module Archive
require 'travis/addons/archive/event_handler'
require 'travis/addons/archive/task'
end
end
end
|
# Copyright (c) 2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# What to do
sign=false
verify=false
build=false
setupenv=false
# Systems to build
linux=true
windows=true
osx=true
# Other Basic variables
SIGNER=
VERSION=
commit=false
url=https://github.com/infinitemoneycoin-project/infinitemoneycoin
proc=2
mem=2000
lxc=true
osslTarUrl=http://downloads.sourceforge.net/project/osslsigncode/osslsigncode/osslsigncode-1.7.1.tar.gz
osslPatchUrl=https://bitcoincore.org/cfields/osslsigncode-Backports-to-1.7.1.patch
scriptName=$(basename -- "$0")
signProg="gpg --detach-sign"
commitFiles=true
# Help Message
read -d '' usage <<- EOF
Usage: $scriptName [-c|u|v|b|s|B|o|h|j|m|] signer version
Run this script from the directory containing the infinitemoneycoin, gitian-builder, gitian.sigs, and infinitemoneycoin-detached-sigs.
Arguments:
signer GPG signer to sign each build assert file
version Version number, commit, or branch to build. If building a commit or branch, the -c option must be specified
Options:
-c|--commit Indicate that the version argument is for a commit or branch
-u|--url Specify the URL of the repository. Default is https://github.com/infinitemoneycoinlliumcoin/infinitemoneycoinlliumMN
-v|--verify Verify the gitian build
-b|--build Do a gitian build
-s|--sign Make signed binaries for Windows and Mac OSX
-B|--buildsign Build both signed and unsigned binaries
-o|--os Specify which Operating Systems the build is for. Default is lwx. l for linux, w for windows, x for osx, a for aarch64
-j Number of processes to use. Default 2
-m Memory to allocate in MiB. Default 2000
--kvm Use KVM instead of LXC
--setup Setup the gitian building environment. Uses KVM. If you want to use lxc, use the --lxc option. Only works on Debian-based systems (Ubuntu, Debian)
--detach-sign Create the assert file for detached signing. Will not commit anything.
--no-commit Do not commit anything to git
-h|--help Print this help message
EOF
# Get options and arguments
while :; do
case $1 in
# Verify
-v|--verify)
verify=true
;;
# Build
-b|--build)
build=true
;;
# Sign binaries
-s|--sign)
sign=true
;;
# Build then Sign
-B|--buildsign)
sign=true
build=true
;;
# PGP Signer
-S|--signer)
if [ -n "$2" ]
then
SIGNER=$2
shift
else
echo 'Error: "--signer" requires a non-empty argument.'
exit 1
fi
;;
# Operating Systems
-o|--os)
if [ -n "$2" ]
then
linux=false
windows=false
osx=false
aarch64=false
if [[ "$2" = *"l"* ]]
then
linux=true
fi
if [[ "$2" = *"w"* ]]
then
windows=true
fi
if [[ "$2" = *"x"* ]]
then
osx=true
fi
if [[ "$2" = *"a"* ]]
then
aarch64=true
fi
shift
else
echo 'Error: "--os" requires an argument containing an l (for linux), w (for windows), x (for Mac OSX), or a (for aarch64)\n'
exit 1
fi
;;
# Help message
-h|--help)
echo "$usage"
exit 0
;;
# Commit or branch
-c|--commit)
commit=true
;;
# Number of Processes
-j)
if [ -n "$2" ]
then
proc=$2
shift
else
echo 'Error: "-j" requires an argument'
exit 1
fi
;;
# Memory to allocate
-m)
if [ -n "$2" ]
then
mem=$2
shift
else
echo 'Error: "-m" requires an argument'
exit 1
fi
;;
# URL
-u)
if [ -n "$2" ]
then
url=$2
shift
else
echo 'Error: "-u" requires an argument'
exit 1
fi
;;
# kvm
--kvm)
lxc=false
;;
# Detach sign
--detach-sign)
signProg="true"
commitFiles=false
;;
# Commit files
--no-commit)
commitFiles=false
;;
# Setup
--setup)
setup=true
;;
*) # Default case: If no more options then break out of the loop.
break
esac
shift
done
# Set up LXC
if [[ $lxc = true ]]
then
export USE_LXC=1
export LXC_BRIDGE=lxcbr0
sudo ifconfig lxcbr0 up 10.0.2.2
fi
# Check for OSX SDK
if [[ ! -e "gitian-builder/inputs/MacOSX10.11.sdk.tar.gz" && $osx == true ]]
then
echo "Cannot build for OSX, SDK does not exist. Will build for other OSes"
osx=false
fi
# Get signer
if [[ -n"$1" ]]
then
SIGNER=$1
shift
fi
# Get version
if [[ -n "$1" ]]
then
VERSION=$1
COMMIT=$VERSION
shift
fi
# Check that a signer is specified
if [[ $SIGNER == "" ]]
then
echo "$scriptName: Missing signer."
echo "Try $scriptName --help for more information"
exit 1
fi
# Check that a version is specified
if [[ $VERSION == "" ]]
then
echo "$scriptName: Missing version."
echo "Try $scriptName --help for more information"
exit 1
fi
# Add a "v" if no -c
if [[ $commit = false ]]
then
COMMIT="v${VERSION}"
fi
echo ${COMMIT}
# Setup build environment
if [[ $setup = true ]]
then
sudo apt-get install ruby apache2 git apt-cacher-ng python-vm-builder qemu-kvm qemu-utils
git clone https://github.com/infinitemoneycoinlliumcoin/gitian.sigs.git
git clone https://github.com/infinitemoneycoinlliumcoin/infinitemoneycoin-detached-sigs.git
git clone https://github.com/devrandom/gitian-builder.git
pushd ./gitian-builder
if [[ -n "$USE_LXC" ]]
then
sudo apt-get install lxc
bin/make-base-vm --suite trusty --arch amd64 --lxc
else
bin/make-base-vm --suite trusty --arch amd64
fi
popd
fi
# Set up build
pushd ./infinitemoneycoin
git fetch
git checkout ${COMMIT}
popd
# Build
if [[ $build = true ]]
then
# Make output folder
mkdir -p ./infinitemoneycoin-binaries/${VERSION}
# Build Dependencies
echo ""
echo "Building Dependencies"
echo ""
pushd ./gitian-builder
mkdir -p inputs
wget -N -P inputs $osslPatchUrl
wget -N -P inputs $osslTarUrl
make -C ../infinitemoneycoin/depends download SOURCES_PATH=`pwd`/cache/common
# Linux
if [[ $linux = true ]]
then
echo ""
echo "Compiling ${VERSION} Linux"
echo ""
./bin/gbuild -j ${proc} -m ${mem} --commit infinitemoneycoin=${COMMIT} --url infinitemoneycoin=${url} ../infinitemoneycoin/contrib/gitian-descriptors/gitian-linux.yml
./bin/gsign -p $signProg --signer $SIGNER --release ${VERSION}-linux --destination ../gitian.sigs/ ../infinitemoneycoin/contrib/gitian-descriptors/gitian-linux.yml
mv build/out/infinitemoneycoin-*.tar.gz build/out/src/infinitemoneycoin-*.tar.gz ../infinitemoneycoin-binaries/${VERSION}
fi
# Windows
if [[ $windows = true ]]
then
echo ""
echo "Compiling ${VERSION} Windows"
echo ""
./bin/gbuild -j ${proc} -m ${mem} --commit infinitemoneycoin=${COMMIT} --url infinitemoneycoin=${url} ../infinitemoneycoin/contrib/gitian-descriptors/gitian-win.yml
./bin/gsign -p $signProg --signer $SIGNER --release ${VERSION}-win-unsigned --destination ../gitian.sigs/ ../infinitemoneycoin/contrib/gitian-descriptors/gitian-win.yml
mv build/out/infinitemoneycoin-*-win-unsigned.tar.gz inputs/infinitemoneycoin-win-unsigned.tar.gz
mv build/out/infinitemoneycoin-*.zip build/out/infinitemoneycoin-*.exe ../infinitemoneycoin-binaries/${VERSION}
fi
# Mac OSX
if [[ $osx = true ]]
then
echo ""
echo "Compiling ${VERSION} Mac OSX"
echo ""
./bin/gbuild -j ${proc} -m ${mem} --commit infinitemoneycoin=${COMMIT} --url infinitemoneycoin=${url} ../infinitemoneycoin/contrib/gitian-descriptors/gitian-osx.yml
./bin/gsign -p $signProg --signer $SIGNER --release ${VERSION}-osx-unsigned --destination ../gitian.sigs/ ../infinitemoneycoin/contrib/gitian-descriptors/gitian-osx.yml
mv build/out/infinitemoneycoin-*-osx-unsigned.tar.gz inputs/infinitemoneycoin-osx-unsigned.tar.gz
mv build/out/infinitemoneycoin-*.tar.gz build/out/infinitemoneycoin-*.dmg ../infinitemoneycoin-binaries/${VERSION}
fi
# AArch64
if [[ $aarch64 = true ]]
then
echo ""
echo "Compiling ${VERSION} AArch64"
echo ""
./bin/gbuild -j ${proc} -m ${mem} --commit infinitemoneycoin=${COMMIT} --url infinitemoneycoin=${url} ../infinitemoneycoin/contrib/gitian-descriptors/gitian-aarch64.yml
./bin/gsign -p $signProg --signer $SIGNER --release ${VERSION}-aarch64 --destination ../gitian.sigs/ ../infinitemoneycoin/contrib/gitian-descriptors/gitian-aarch64.yml
mv build/out/infinitemoneycoin-*.tar.gz build/out/src/infinitemoneycoin-*.tar.gz ../infinitemoneycoin-binaries/${VERSION}
popd
if [[ $commitFiles = true ]]
then
# Commit to gitian.sigs repo
echo ""
echo "Committing ${VERSION} Unsigned Sigs"
echo ""
pushd gitian.sigs
git add ${VERSION}-linux/${SIGNER}
git add ${VERSION}-aarch64/${SIGNER}
git add ${VERSION}-win-unsigned/${SIGNER}
git add ${VERSION}-osx-unsigned/${SIGNER}
git commit -a -m "Add ${VERSION} unsigned sigs for ${SIGNER}"
popd
fi
fi
# Verify the build
if [[ $verify = true ]]
then
# Linux
pushd ./gitian-builder
echo ""
echo "Verifying v${VERSION} Linux"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-linux ../infinitemoneycoin/contrib/gitian-descriptors/gitian-linux.yml
# Windows
echo ""
echo "Verifying v${VERSION} Windows"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-win-unsigned ../infinitemoneycoin/contrib/gitian-descriptors/gitian-win.yml
# Mac OSX
echo ""
echo "Verifying v${VERSION} Mac OSX"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-osx-unsigned ../infinitemoneycoin/contrib/gitian-descriptors/gitian-osx.yml
# AArch64
echo ""
echo "Verifying v${VERSION} AArch64"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-aarch64 ../infinitemoneycoin/contrib/gitian-descriptors/gitian-aarch64.yml
# Signed Windows
echo ""
echo "Verifying v${VERSION} Signed Windows"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-osx-signed ../infinitemoneycoin/contrib/gitian-descriptors/gitian-osx-signer.yml
# Signed Mac OSX
echo ""
echo "Verifying v${VERSION} Signed Mac OSX"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-osx-signed ../infinitemoneycoin/contrib/gitian-descriptors/gitian-osx-signer.yml
popd
fi
# Sign binaries
if [[ $sign = true ]]
then
pushd ./gitian-builder
# Sign Windows
if [[ $windows = true ]]
then
echo ""
echo "Signing ${VERSION} Windows"
echo ""
./bin/gbuild -i --commit signature=${COMMIT} ../infinitemoneycoin/contrib/gitian-descriptors/gitian-win-signer.yml
./bin/gsign -p $signProg --signer $SIGNER --release ${VERSION}-win-signed --destination ../gitian.sigs/ ../infinitemoneycoin/contrib/gitian-descriptors/gitian-win-signer.yml
mv build/out/infinitemoneycoin-*win64-setup.exe ../infinitemoneycoin-binaries/${VERSION}
mv build/out/infinitemoneycoin-*win32-setup.exe ../infinitemoneycoin-binaries/${VERSION}
fi
# Sign Mac OSX
if [[ $osx = true ]]
then
echo ""
echo "Signing ${VERSION} Mac OSX"
echo ""
./bin/gbuild -i --commit signature=${COMMIT} ../infinitemoneycoin/contrib/gitian-descriptors/gitian-osx-signer.yml
./bin/gsign -p $signProg --signer $SIGNER --release ${VERSION}-osx-signed --destination ../gitian.sigs/ ../infinitemoneycoin/contrib/gitian-descriptors/gitian-osx-signer.yml
mv build/out/infinitemoneycoin-osx-signed.dmg ../infinitemoneycoin-binaries/${VERSION}/infinitemoneycoin-${VERSION}-osx.dmg
fi
popd
if [[ $commitFiles = true ]]
then
# Commit Sigs
pushd gitian.sigs
echo ""
echo "Committing ${VERSION} Signed Sigs"
echo ""
git add ${VERSION}-win-signed/${SIGNER}
git add ${VERSION}-osx-signed/${SIGNER}
git commit -a -m "Add ${VERSION} signed binary sigs for ${SIGNER}"
popd
fi
fi
|
class IndexCounter {
// singleton to count the indices for
constructor() {
this._counter = -1;
}
count() {
this._counter++;
return this._counter;
}
reset() {
this._counter = -1;
}
setCounter(value) {
this._counter = value;
}
getCounter() {
return this._counter;
}
}
export default (new IndexCounter()) |
# Copyright (c) 2012 The Khronos Group Inc.
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and /or associated documentation files (the "Materials "), to deal in the Materials without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Materials, and to permit persons to whom the Materials are furnished to do so, subject to
# the following conditions:
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Materials.
# THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
import wx
import wx.lib.scrolledpanel
from Core.Gui.Dialog.FSettingSizer import *
from Core.Common.FConstants import *
class FSettingsScrolledSizer(wx.BoxSizer):
def __init__(self, parent, testProcedure, applicationMap, settings = None,
editable = True):
wx.BoxSizer.__init__(self, wx.VERTICAL)
self.__settingSizers = []
title = wx.StaticText(parent, wx.ID_ANY, "Test Settings")
scrolledPanel = wx.lib.scrolledpanel.ScrolledPanel(parent, wx.ID_ANY,
style=wx.SUNKEN_BORDER)
self.Add(title, 0, wx.ALIGN_CENTER | wx.ALL, 5)
self.Add(scrolledPanel, 1, wx.EXPAND | wx.TOP, 5)
topSizer = wx.BoxSizer(wx.VERTICAL)
for step, app, op, setting in testProcedure.GetStepGenerator():
sizer = FSettingSizer(scrolledPanel, applicationMap, editable,
self.__OnUpdateList)
if (settings == None):
default = testProcedure.GetGlobalSetting(step)
else:
default = settings[step]
if (op == VALIDATE and op not in OPS_NEEDING_APP):
sizer.SetOperation(">>", op, ">>" + op)
sizer.Enable(False)
else:
sizer.SetOperation(app, op, "[" + app + "]" + op,
testProcedure.GetSettingManager(), default)
topSizer.Add(sizer, 0, wx.EXPAND | wx.ALL, 5)
self.__settingSizers.append(sizer)
padSizer = wx.BoxSizer(wx.VERTICAL)
padSizer.Add(topSizer, 1, wx.EXPAND | wx.ALL, 5)
scrolledPanel.SetSizer(padSizer)
scrolledPanel.SetAutoLayout(True)
scrolledPanel.SetupScrolling(scroll_x = False)
def IsSettingOk(self):
for settingSizer in self.__settingSizers:
if (settingSizer.GetOperation() == VALIDATE): continue
if (settingSizer.GetSettingName() == None):
return False
return True
def GetSettings(self):
settings = []
for settingSizer in self.__settingSizers:
settings.append(settingSizer.GetSetting())
return settings
def __OnUpdateList(self):
for sizer in self.__settingSizers:
sizer.UpdateList() |
<gh_stars>0
/*
* Run.sql
* Chapter 17, Oracle10g PL/SQL Programming
* by <NAME>, <NAME>, <NAME>
*
* This script tests the DBMS_JOB.RUN procedure
*/
SET VERIFY OFF
SET VERIFY OFF
UNDEFINE job_number
exec CLEAN_SCHEMA.jobs
exec CLEAN_SCHEMA.procs
exec CLEAN_SCHEMA.tables
PROMPT
PROMPT Create email_tbl to hold e-mail details
PROMPT
CREATE TABLE email_tbl (
EMAILID NUMBER(10)
CONSTRAINT emailid_pk PRIMARY KEY,
SENDER VARCHAR2(100 CHAR)
NOT NULL,
RECIPIENTS VARCHAR2(4000 CHAR)
NOT NULL,
CC VARCHAR2(4000),
BCC VARCHAR2(4000),
SUBJECT VARCHAR2(50),
MESSAGE VARCHAR2(4000),
ATTACHMENT VARCHAR2(4000),
DATE_LOGGED TIMESTAMP DEFAULT SYSTIMESTAMP,
DATE_SENT TIMESTAMP);
PROMPT
PROMPT Insert four sample e-mails -- modify if you wish to receive e-mails
PROMPT
INSERT INTO email_tbl
VALUES (1, '<EMAIL>', '<EMAIL>',
'<EMAIL>', '<EMAIL>',
'Subject 1', 'This is the message for e-mail 1',
'This is inline attachment 1', null, null);
INSERT INTO email_tbl
VALUES (2, '<EMAIL>', '<EMAIL>',
'<EMAIL>', '<EMAIL>',
'Subject 2', 'This is the message for e-mail 2',
'This is inline attachment 2', null, null);
INSERT INTO email_tbl
VALUES (3, '<EMAIL>', '<EMAIL>',
'<EMAIL>', '<EMAIL>',
'Subject 3', 'This is the message for e-mail 3',
'This is inline attachment 3', null, null);
INSERT INTO email_tbl
VALUES (4, '<EMAIL>', '<EMAIL>',
'<EMAIL>', '<EMAIL>',
'Subject 4', 'This is the message for e-mail 4',
'This is inline attachment 4', null, null);
INSERT INTO email_tbl
VALUES (5, '<EMAIL>', '<EMAIL>',
'<EMAIL>', '<EMAIL>',
'Subject 5', 'This is the message for e-mail 5',
'This is inline attachment 5', null, systimestamp);
INSERT INTO email_tbl
VALUES (6, '<EMAIL>', '<EMAIL>',
'<EMAIL>', '<EMAIL>',
'Subject 6', 'This is the message for e-mail 6',
'This is inline attachment 6', null, systimestamp);
COMMIT;
PROMPT
PROMPT Create a package called email_manager to send e-mail messages
PROMPT
CREATE OR REPLACE PACKAGE email_manager
IS
PROCEDURE smtp (i_host_string VARCHAR2);
PROCEDURE inline_email;
END;
/
CREATE OR REPLACE PACKAGE BODY email_manager
IS
PROCEDURE smtp (i_host_string VARCHAR2)
AS
v_host_string VARCHAR2(500) :=
i_host_string;
v_conn_string UTL_SMTP.CONNECTION;
CURSOR email_cur
IS
SELECT *
FROM email_tbl
WHERE DATE_SENT IS NULL;
BEGIN
FOR y IN email_cur
LOOP
UTL_SMTP.HELO(v_conn_string, v_host_string);
UTL_SMTP.MAIL(v_conn_string, y.sender);
UTL_SMTP.RCPT(v_conn_string, y.recipients);
UTL_SMTP.OPEN_DATA(v_conn_string);
UTL_SMTP.WRITE_DATA(v_conn_string, y.message);
UTL_SMTP.CLOSE_DATA(v_conn_string);
UTL_SMTP.QUIT(v_conn_string);
UPDATE email_tbl
SET date_sent = systimestamp
WHERE emailid = y.emailid;
END LOOP;
COMMIT;
EXCEPTION
WHEN OTHERS
THEN
DBMS_OUTPUT.PUT_LINE(SQLERRM);
END smtp;
PROCEDURE inline_email
AS
CURSOR email_cur
IS
SELECT *
FROM email_tbl
WHERE DATE_SENT IS NULL;
BEGIN
FOR y IN email_cur
LOOP
UTL_MAIL.SEND_ATTACH_VARCHAR2 (
SENDER => y.sender,
RECIPIENTS => y.recipients,
CC => y.cc,
BCC => y.bcc,
SUBJECT => y.subject,
MESSAGE => y.message,
ATTACHMENT => y.attachment,
ATT_INLINE => TRUE);
UPDATE email_tbl
SET date_sent = systimestamp
WHERE emailid = y.emailid;
END LOOP;
COMMIT;
EXCEPTION
WHEN OTHERS
THEN
DBMS_OUTPUT.PUT_LINE(SQLERRM);
END inline_email;
END;
/
PROMPT
PROMPT This procedure retrieves details about jobs
PROMPT
CREATE OR REPLACE PROCEDURE get_job_details(
i_job_number IN NUMBER,
cv_job_details IN OUT SYS_REFCURSOR)
IS
BEGIN
OPEN cv_job_details FOR
SELECT job, schema_user schema,
to_char(next_date, 'dd-mon-yyyy hh24:mi:ss') NEXT_DATE,
interval, what, broken
FROM user_jobs
WHERE job = i_job_number;
EXCEPTION
WHEN OTHERS
THEN
DBMS_OUTPUT.PUT_LINE(SQLERRM);
END get_job_details;
/
PROMPT
PROMPT Provide a record count from the EMAIL_TBL table
PROMPT
SELECT count(1)
FROM email_tbl;
PROMPT
PROMPT JOB Numbers
PROMPT
COL what FORMAT A40
SELECT job, what
FROM user_jobs;
PROMPT
PROMPT DBMS_JOB.RUN example
PROMPT
DECLARE
v_error EXCEPTION;
PRAGMA EXCEPTION_INIT(v_error, -23421);
BEGIN
DBMS_JOB.RUN(job => &&job_number, FORCE => TRUE);
COMMIT;
EXCEPTION
WHEN v_error
THEN
DBMS_OUTPUT.PUT_LINE('The job number entered was not valid');
END;
/
SET PAGES 9999
SELECT *
FROM dba_jobs_running
WHERE job = &&job_number;
PROMPT
PROMPT Check the USER_JOBS view
PROMPT
VARIABLE v_job_details REFCURSOR
EXEC GET_JOB_DETAILS(&&job_number, :v_job_details)
COL schema_user FORMAT A15
COL next_date FORMAT A20
COL interval FORMAT A60
COL what FORMAT A4000
SET PAGES 9999
PRINT v_job_details
PROMPT
PROMPT Provide a record count from the EMAIL_TBL table
PROMPT
SELECT count(1)
FROM email_tbl;
|
import React from "react"
import Layout from "../components/layout"
// import Image from "../components/image"
import HomepageHero from "../components/homepageHero"
import CurrentWork from "../components/home/currentWork"
import CaseStudyLinks from "../components/home/caseStudyLinks"
import SEO from "../components/seo"
import Helmet from "react-helmet"
import { withPrefix } from "gatsby"
const IndexPage = () => (
<Layout lightVersion={true}>
<Helmet>
<script src={withPrefix('js/safe-focus.js')} type="text/javascript" />
</Helmet>
<SEO title="Home" />
<HomepageHero />
<CurrentWork />
<CaseStudyLinks/>
{/*
<div style={{ maxWidth: `300px`, marginBottom: `1.45rem` }}>
<Image />
</div>
*/}
</Layout>
)
export default IndexPage
|
<filename>test/test-markdown.js
const Markdown = require("../lib/copy-as-markdown");
exports["test link"] = function(assert) {
var actual;
actual = Markdown.link("http://example.com", "text");
assert.equal(actual, "[text](http://example.com)", "normal input");
actual = Markdown.link("http://example.com", "");
assert.equal(actual, "[(No Title)](http://example.com)", "empty title");
actual = Markdown.link("http://example.com", '[Shin_Bangumi] Anime \\ Yuruyuri <S> San * High');
assert.equal(actual, '[[Shin_Bangumi] Anime \\ Yuruyuri <S> San * High](http://example.com)', "no escape by default");
actual = Markdown.link("http://example.com", '[Shin_Bangumi] Anime \\ Yuruyuri <S> San * High', { escape: true });
assert.equal(actual, '[\\\[Shin\\\_Bangumi\\\] Anime \\\\ Yuruyuri \\\<S\\\> San \\\* High](http://example.com)', "escapes when explicitly requested");
actual = Markdown.link("http://example.com",
'',
{ escape: false }
);
assert.equal(actual, '[](http://example.com)', "disabled escape");
};
exports["test image"] = function(assert) {
var actual;
actual = Markdown.image("https://media.giphy.com/media/ACQ6dBWweIEIU/giphy.gif", "coin jump");
assert.equal(actual, "", "normal input");
};
require("sdk/test").run(exports);
|
/* Copyright (c) 2017-2020 <NAME>. */
package com.epion_t3.devtools.component;
import com.epion_t3.devtools.bean.DevGeneratorContext;
public interface Component {
void execute(DevGeneratorContext context);
}
|
<filename>src/scenes/NftDetailPage/NftDetailModel.tsx
import { useSetContentIsLoaded } from 'contexts/shimmer/ShimmerContext';
import styled from 'styled-components';
import { Nft } from 'types/Nft';
import { useEffect } from 'react';
type Props = {
nft: Nft;
};
// TODO: Clean this up once fixed
// https://github.com/google/model-viewer/issues/1502
declare global {
namespace JSX {
interface IntrinsicElements {
'model-viewer': ModelViewerJSX &
React.DetailedHTMLProps<React.HTMLAttributes<HTMLElement>, HTMLElement>;
}
}
}
interface ModelViewerJSX {
src: string;
poster?: string;
class: string;
}
function NftDetailModel({ nft }: Props) {
const setContentIsLoaded = useSetContentIsLoaded();
useEffect(setContentIsLoaded, [setContentIsLoaded]);
return (
<StyledNftDetailModel>
<model-viewer class="model-viewer" auto-rotate camera-controls src={nft.animation_url} />
</StyledNftDetailModel>
);
}
const StyledNftDetailModel = styled.div`
width: 100%;
height: 100%;
`;
export default NftDetailModel;
|
def countOccurrences(list_words):
result = {}
for word in list_words:
if word not in result:
result[word] = 1
else:
result[word] += 1
return result |
<filename>api/src/models/creature_types.js
const creature_types = (sequelize, DataTypes) => {
const Creature_types = sequelize.define('creature_types', {
});
return Creature_types;
};
export default creature_types; |
/*ckwg +29
* Copyright 2017 by Kitware, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name Kitware, Inc. nor the names of any contributors may be
* used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS''
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef MAPTK_ABSTRACTTOOL_H_
#define MAPTK_ABSTRACTTOOL_H_
#include <vital/config/config_block_types.h>
#include <vital/logger/logger.h>
#include <vital/types/camera_map.h>
#include <vital/types/landmark_map.h>
#include <vital/types/feature_track_set.h>
#include <vital/types/image_container.h>
#include <vtkSmartPointer.h>
#include <vtkImageData.h>
#include <qtGlobal.h>
#include <QtGui/QAction>
class AbstractToolPrivate;
/// A class to hold data that is modified by the tool
class ToolData
{
public:
typedef kwiver::vital::feature_track_set_sptr feature_track_set_sptr;
typedef kwiver::vital::camera_map_sptr camera_map_sptr;
typedef kwiver::vital::landmark_map_sptr landmark_map_sptr;
typedef kwiver::vital::config_block_sptr config_block_sptr;
typedef vtkSmartPointer<vtkImageData> depth_sptr;
/// Deep copy the feature tracks into this data class
void copyTracks(feature_track_set_sptr const&);
/// Deep copy the cameras into this data class
void copyCameras(camera_map_sptr const&);
/// Deep copy the landmarks into this data class
void copyLandmarks(landmark_map_sptr const&);
/// Deep copy a depth image into this data class
void copyDepth(depth_sptr const&);
unsigned int activeFrame;
std::string videoPath;
feature_track_set_sptr tracks;
depth_sptr active_depth;
camera_map_sptr cameras;
landmark_map_sptr landmarks;
config_block_sptr config;
kwiver::vital::logger_handle_t logger;
};
Q_DECLARE_METATYPE(std::shared_ptr<ToolData>)
class AbstractTool : public QAction
{
Q_OBJECT
public:
typedef kwiver::vital::feature_track_set_sptr feature_track_set_sptr;
typedef kwiver::vital::camera_map_sptr camera_map_sptr;
typedef kwiver::vital::landmark_map_sptr landmark_map_sptr;
typedef kwiver::vital::config_block_sptr config_block_sptr;
typedef vtkSmartPointer<vtkImageData> depth_sptr;
enum Output
{
Tracks = 0x1,
Cameras = 0x2,
Landmarks = 0x4,
ActiveFrame = 0x8,
KeyFrames = 0x10,
Depth = 0x20
};
Q_DECLARE_FLAGS(Outputs, Output)
explicit AbstractTool(QObject* parent = 0);
virtual ~AbstractTool();
/// Get the types of output produced by the tool.
virtual Outputs outputs() const = 0;
/// Get if the tool can be canceled.
///
/// This method must be overridden by tool implementations. It should return
/// \c false if the tool cannot be interrupted by the user. A return value of
/// \c true implies that calling cancel() may have an effect.
virtual bool isCancelable() const = 0;
/// Return a shared pointer to the tools data
std::shared_ptr<ToolData> data();
/// Set the active frame to be used by the tool.
void setActiveFrame(unsigned int frame);
/// Set the image paths to be used as input to the tool.
void setImagePaths(std::vector<std::string> const&);
/// Set the feature tracks to be used as input to the tool.
void setTracks(feature_track_set_sptr const&);
/// Set the cameras to be used as input to the tool.
void setCameras(camera_map_sptr const&);
/// Set the landmarks to be used as input to the tool.
void setLandmarks(landmark_map_sptr const&);
/// Set the video source path.
void setVideoPath(std::string const&);
/// Set the config file if any
void setConfig(config_block_sptr&);
/// Execute the tool.
///
/// Tool implementations should override this method to verify that they have
/// sufficient data before calling the base implementation.
///
/// \param window Optional pointer to a widget to use as a context for any
/// dialogs that the tool may need to display.
///
/// \return \c true if tool execution was started successfully, otherwise
/// \c false.
virtual bool execute(QWidget* window = 0);
/// Get the active frame.
unsigned int activeFrame() const;
/// Get tracks.
///
/// This returns the new tracks resulting from the tool execution. If the
/// tool does not output tracks, the tracks will be a copy of the input
/// tracks.
///
/// This may also be used by tool implementations to get the input tracks.
/// (The tracks will be a copy that can be safely modified.)
///
/// \warning Users must not call this method while the tool is executing,
/// as doing so may not be thread safe.
feature_track_set_sptr tracks() const;
/// Get cameras.
///
/// This returns the new cameras resulting from the tool execution. If the
/// tool does not output cameras, the cameras will be a copy of the input
/// cameras.
///
/// This may also be used by tool implementations to get the input cameras.
/// (The cameras will be a copy that can be safely modified.)
///
/// \warning Users must not call this method while the tool is executing,
/// as doing so may not be thread safe.
camera_map_sptr cameras() const;
/// Get landmarks.
///
/// This returns the new landmarks resulting from the tool execution. If the
/// tool does not output landmarks, the landmarks will be a copy of the input
/// landmarks.
///
/// This may also be used by tool implementations to get the input landmarks.
/// (The landmarks will be a copy that can be safely modified.)
///
/// \warning Users must not call this method while the tool is executing,
/// as doing so may not be thread safe.
landmark_map_sptr landmarks() const;
signals:
/// Emitted when the tool execution is completed.
void completed();
/// Emitted when an intermediate update of the data is available to show progress.
void updated(std::shared_ptr<ToolData>);
/// Emitted when the tool execution terminates due to user cancellation.
void canceled();
public slots:
/// Ask the tool to cancel execution.
///
/// This sets a flag indicating that the user has requested the tool
/// execution should halt. The tool may or may not honor such a request.
///
/// \sa canceled, isCancelable
virtual void cancel();
protected:
/// Execute the tool.
///
/// This method must be overridden by tool implementations. The default
/// implementation of execute() calls this method in a separate thread.
virtual void run() = 0;
/// Check if the user has requested that tool execution be canceled.
bool isCanceled() const;
/// Test if the tool has track data.
///
/// \return \c true if the tool data has a non-zero number of feature tracks,
/// otherwise \c false
bool hasTracks() const;
/// Test if the tool has camera data.
///
/// \return \c true if the tool data has a non-zero number of cameras,
/// otherwise \c false
bool hasCameras() const;
/// Test if the tool has landmark data.
///
/// \return \c true if the tool data has a non-zero number of landmarks,
/// otherwise \c false
bool hasLandmarks() const;
/// Test if the tool has video data.
///
/// \return \c true if the tool data has an associated video source,
/// otherwise \c false
bool hasVideoSource() const;
/// Set the tracks produced by the tool.
///
/// This sets the tracks that are produced by the tool as output. Unlike
/// setTracks, this does not make a deep copy of the provided tracks.
void updateTracks(feature_track_set_sptr const&);
/// Set the cameras produced by the tool.
///
/// This sets the cameras that are produced by the tool as output. Unlike
/// setCameras, this does not make a deep copy of the provided cameras.
void updateCameras(camera_map_sptr const&);
/// Set the depth map produced by the tool.
void updateDepth(depth_sptr const& newDepth);
/// Set the landmarks produced by the tool.
///
/// This sets the landmarks that are produced by the tool as output. Unlike
/// setCameras, this does not make a deep copy of the provided landmarks.
void updateLandmarks(landmark_map_sptr const&);
private:
QTE_DECLARE_PRIVATE_RPTR(AbstractTool)
QTE_DECLARE_PRIVATE(AbstractTool)
QTE_DISABLE_COPY(AbstractTool)
};
Q_DECLARE_OPERATORS_FOR_FLAGS(AbstractTool::Outputs)
#endif
|
#!/usr/bin/env bash
build_version=v1.11.1
build_docker_image_name=golang
build_docker_os=alpine
build_docker_tag=1.13.8-${build_docker_os}
build_docker_set=${build_docker_image_name}:${build_docker_tag}
build_docker_image_set_name=${build_docker_os}
build_docker_image_set_tag=3.10
build_docker_image_set=${build_docker_image_set_name}:${build_docker_image_set_tag}
build_docker_image_mk_out_bin=go-cron-bin
build_docker_image_mk_out_path=build
build_root_path=../../
build_root_name=temp-go-cron
buuld_root_conf_path=${build_root_path}/conf/release/
build_need_proxy=0
go_proxy_url=https://goproxy.cn/
alpinelinux_proxy=mirrors.aliyun.com
docker_none_mark=none
run_path=$(pwd)
shell_run_name=$(basename $0)
shell_run_path=$(
cd $(dirname $0)
pwd
)
pV() {
echo -e "\033[;36m$1\033[0m"
}
pI() {
echo -e "\033[;32m$1\033[0m"
}
pD() {
echo -e "\033[;34m$1\033[0m"
}
pW() {
echo -e "\033[;33m$1\033[0m"
}
pE() {
echo -e "\033[;31m$1\033[0m"
}
checkFuncBack() {
if [[ $? -ne 0 ]]; then
echo -e "\033[;31mRun [ $1 ] error exit code 1\033[0m"
exit 1
fi
}
checkBinary() {
binary_checker=$(which $1)
checkFuncBack "which $1"
if [[ ! -n "${binary_checker}" ]]; then
echo -e "\033[;31mCheck binary [ $1 ] error exit\033[0m"
exit 1
# else
# echo -e "\033[;32mCli [ $1 ] event check success\033[0m\n-> \033[;34m$1 at Path: ${evn_checker}\033[0m"
fi
}
check_root() {
if [[ ${EUID} != 0 ]]; then
echo "no not root user"
fi
}
dockerIsHasContainByName() {
if [[ ! -n $1 ]]; then
pW "Want find contain is empty"
echo "-1"
else
c_status=$(docker inspect $1)
if [ ! $? -eq 0 ]; then
echo "1"
else
echo "0"
fi
fi
}
dockerStopContainWhenRunning() {
if [[ ! -n $1 ]]; then
pW "Want stop contain is empty"
else
c_status=$(docker inspect --format='{{ .State.Status}}' $1)
if [ "running" == ${c_status} ]; then
pD "-> docker stop contain [ $1 ]"
docker stop $1
checkFuncBack "docker stop $1"
fi
fi
}
dockerRemoveContainSafe() {
if [[ ! -n $1 ]]; then
pW "Want remove contain is empty"
else
has_contain=$(dockerIsHasContainByName $1)
if [[ ${has_contain} -eq 0 ]]; then
dockerStopContainWhenRunning $1
c_status=$(docker inspect --format='{{ .State.Status}}' $1)
if [ "exited" == ${c_status} ]; then
pD "-> docker rm contain [ $1 ]"
docker rm $1
checkFuncBack "docker rm $1"
fi
if [ "created" == ${c_status} ]; then
pD "-> docker rm contain [ $1 ]"
docker rm $1
checkFuncBack "docker rm $1"
fi
else
pE "dockerRemoveContainSafe Not found contain [ $1 ]"
fi
fi
}
# checkenv
checkBinary docker
while getopts "hpb:n:i:r:z:" arg; do #after param has ":" need option
case $arg in
p) # -p open proxy of build
build_need_proxy=1
;;
b) # -b [v1.0.0] build version of contains
build_version=${OPTARG}
;;
n) # -n [temp-go-cron] name of build
build_root_name=${OPTARG}
;;
i) # -i [1.13.8-alpine] build docker image tag of golang https://hub.docker.com/_/golang
build_docker_tag=${OPTARG}
build_docker_set=${build_docker_image_name}:${build_docker_tag}
;;
r) # -r [go-cron-bin] raw name of build
build_docker_image_mk_out_bin=${OPTARG}
;;
z) # -z [3.10] build docker image alpine tag https://hub.docker.com/_/alpine
build_docker_image_set_tag=${OPTARG}
build_docker_image_set=${build_docker_image_set_name}:${build_docker_image_set_tag}
;;
h)
echo -e "this script to mark docker build file
use as ${shell_run_name} -p
ars:
-p open proxy of build
-b [v1.0.0] build version of contains
-n [temp-go-cron] name of build
-i [1.13.8-alpine] build docker image tag of golang https://hub.docker.com/_/golang
-r [go-cron-bin] raw name of build
-z [3.10] build docker image alpine tag https://hub.docker.com/_/alpine
"
;;
?) # other param?
echo "unkonw argument, plase use -h to show help"
exit 1
;;
esac
done
if [[ ${build_need_proxy} -eq 1 ]]; then
# replace build Dockerfile
echo -e "# This dockerfile uses extends image https://hub.docker.com/_${build_docker_image_name}
# VERSION ${build_version}
# Author: ${USER}
# dockerfile offical document https://docs.docker.com/engine/reference/builder/
FROM ${build_docker_set} as builder
RUN sed -i 's/dl-cdn.alpinelinux.org/${alpinelinux_proxy}/g' /etc/apk/repositories
RUN apk --no-cache add make git gcc libtool musl-dev
COPY \$PWD /usr/src/myapp
WORKDIR /usr/src/myapp
RUN make initDockerImagesMod dockerLocalImageBuildFile
FROM ${build_docker_image_set}
RUN sed -i 's/dl-cdn.alpinelinux.org/${alpinelinux_proxy}/g' /etc/apk/repositories
RUN apk --no-cache add ca-certificates && \\
rm -rf /var/cache/apk/* /tmp/*
COPY --from=builder /usr/src/myapp/${build_docker_image_mk_out_bin} /usr/src/myapp/
COPY --from=builder /usr/src/myapp/conf/release/config.yaml /usr/src/myapp/conf/
WORKDIR /usr/src/myapp
CMD [\"tail\", \"-f\", \"/etc/alpine-release\"]
" >${build_root_path}Dockerfile
else
# replace build Dockerfile
echo -e "# This dockerfile uses extends image https://hub.docker.com/_${build_docker_image_name}
# VERSION ${build_version}
# Author: ${USER}
# dockerfile offical document https://docs.docker.com/engine/reference/builder/
FROM ${build_docker_set} as builder
RUN apk --no-cache add make git gcc libtool musl-dev
COPY \$PWD /usr/src/myapp
WORKDIR /usr/src/myapp
RUN make initDockerImagesMod dockerLocalImageBuildFile
FROM ${build_docker_image_set}
RUN apk --no-cache add ca-certificates && \\
rm -rf /var/cache/apk/* /tmp/*
COPY --from=builder /usr/src/myapp/${build_docker_image_mk_out_bin} /usr/src/myapp/
COPY --from=builder /usr/src/myapp/conf/release/config.yaml /usr/src/myapp/conf/
WORKDIR /usr/src/myapp
CMD [\"tail\", \"-f\", \"/etc/alpine-release\"]
" >${build_root_path}Dockerfile
fi
echo -e "# copy right
# Licenses http://www.apache.org/licenses/LICENSE-2.0
# more info see https://docs.docker.com/compose/compose-file/ or https://docker.github.io/compose/compose-file/
version: '3.7'
networks:
default:
#volumes:
# web-data:
services:
${build_root_name}:
container_name: \"\${ROOT_NAME}\"
image: '\${ROOT_NAME}:\${DIST_TAG}' # see local docker file
ports:
- \"39000:\${ENV_CRON_PORT}\"
volumes:
- \"\$PWD/log:/usr/src/myapp/log\"
environment:
- ENV_CRON_HTTPS_ENABLE=false
- ENV_CRON_AUTO_HOST=false
- ENV_CRON_HOST=\${ENV_CRON_HOST}:\${ENV_CRON_PORT}
# - ENV_CRON_HOST=0.0.0.0:39000
working_dir: \"/usr/src/myapp\"
command:
- \"./${build_docker_image_mk_out_bin}\"
- \"-c\"
- \"conf/config.yaml\"
" >${build_root_path}docker-compose.yml
echo -e "# copy right
# Licenses http://www.apache.org/licenses/LICENSE-2.0
# more info see https://docs.docker.com/compose/compose-file/ or https://docker.github.io/compose/compose-file/
version: '3.7'
networks:
default:
#volumes:
# web-data:
services:
${build_root_name}:
container_name: \"${build_root_name}\"
image: '${build_root_name}:${build_version}' # see local docker file
ports:
- \"39000:39000\"
volumes:
- \"\$PWD/log:/usr/src/myapp/log\"
environment:
- ENV_CRON_HTTPS_ENABLE=false
- ENV_CRON_AUTO_HOST=false
- ENV_CRON_HOST=0.0.0.0:39000
working_dir: \"/usr/src/myapp\"
command:
- \"./${build_docker_image_mk_out_bin}\"
- \"-c\"
- \"conf/config.yaml\"
" >${buuld_root_conf_path}docker-compose.yml
# for remove docker images which no tag mark by <none>
docker images | grep ${docker_none_mark} | awk '{print $3}' | xargs docker rmi
exit 0
|
#! /bin/bash -e
: "${JENKINS_WAR:="/usr/share/jenkins/jenkins.war"}"
: "${JENKINS_HOME:="/var/lib/jenkins"}"
touch "${COPY_REFERENCE_FILE_LOG}" || { echo "Can not write to ${COPY_REFERENCE_FILE_LOG}. Wrong volume permissions?"; exit 1; }
echo "--- Copying files at $(date)" >> "$COPY_REFERENCE_FILE_LOG"
find /usr/share/jenkins/ref/ \( -type f -o -type l \) -exec bash -c '. /usr/local/bin/jenkins-support; for arg; do copy_reference_file "$arg"; done' _ {} +
# if `docker run` first argument start with `--` the user is passing jenkins launcher arguments
if [[ $# -lt 1 ]] || [[ "$1" == "--"* ]]; then
# read JAVA_OPTS and JENKINS_OPTS into arrays to avoid need for eval (and associated vulnerabilities)
java_opts_array=()
while IFS= read -r -d '' item; do
java_opts_array+=( "$item" )
done < <([[ $JAVA_OPTS ]] && xargs printf '%s\0' <<<"$JAVA_OPTS")
if [[ "$DEBUG" ]] ; then
java_opts_array+=( \
'-Xdebug' \
'-Xrunjdwp:server=y,transport=dt_socket,address=5005,suspend=y' \
)
fi
jenkins_opts_array=( )
while IFS= read -r -d '' item; do
jenkins_opts_array+=( "$item" )
done < <([[ $JENKINS_OPTS ]] && xargs printf '%s\0' <<<"$JENKINS_OPTS")
exec java -Duser.home="$JENKINS_HOME" "${java_opts_array[@]}" -jar ${JENKINS_WAR} "${jenkins_opts_array[@]}" "$@"
fi
# As argument is not jenkins, assume user want to run his own process, for example a `bash` shell to explore this image
exec "$@"
|
#!/bin/bash -u
set -e
if [ $# -ne 1 ]; then
echo "Usage: `basename $0` <src rpm filename>"
exit 1
fi
echo "Extracting information from src rpm..."
PKGNAME=`rpmquery -qp --queryformat '%{NAME}' $1 2>/dev/null`
# We've seen cases from, e.g., rpmforge, where the src.rpm ver/release
# don't match the contained package ver/release. So extract the spec
# the spec file and get it from there.
test -d tmp || mkdir tmp
cd tmp
rm -f ${PKGNAME}.spec
( rpm2cpio ../$1 | cpio --quiet -i ${PKGNAME}.spec ) 2>&1 >/dev/null
# Spec files can roll multiple packages which results in the query
# returning dupe versions, just go with the first. *Hopefully* this
# is the 99& case.
VERSION=`rpmquery -q --specfile --queryformat '%{VERSION}\n' ${PKGNAME}.spec | head -n 1`
RELEASE=`rpmquery -q --specfile --queryformat '%{RELEASE}\n' ${PKGNAME}.spec | head -n 1`
ARCH=`rpmquery -q --specfile --queryformat '%{ARCH}\n' ${PKGNAME}.spec | head -n 1`
cd ../
rm -rf tmp
echo "Name: $PKGNAME"
echo "Version: $VERSION"
echo "Release: $RELEASE"
sed -e "s;SRC_SRPM :=.*;SRC_SRPM := \$(CURDIR)/$1;" -e "s;^PKGNAME :=.*;PKGNAME := $PKGNAME;" -e "s;^VERSION :=.*;VERSION := $VERSION;" -e "s;^RELEASE :=.*;RELEASE := $RELEASE;" Makefile.tmpl > Makefile
if [ x"$ARCH" == "xnoarch" ]; then
echo "Arch: $ARCH"
sed -i -e "s;ARCH .*;ARCH := $ARCH;" Makefile
fi
|
@register.filter
def to_percent(obj, significant_digits):
if obj is None:
return ""
try:
value = float(obj) * 100 # Convert to percentage
format_str = "{:.%df}%%" % significant_digits # Create format string with specified significant digits
return format_str.format(value) # Format the value as percentage with specified significant digits
except (ValueError, TypeError):
return "" |
#!/usr/bin/env bash
# Convenience script to build Infer when using opam
# Copyright (c) 2015 - present Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
set -e
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
INFER_ROOT="$SCRIPT_DIR/../"
PLATFORM="$(uname)"
NCPU="$(getconf _NPROCESSORS_ONLN 2>/dev/null || echo 1)"
function usage() {
echo "Usage: $0 [-y] [targets]"
echo
echo " targets:"
echo " all build everything (default)"
echo " clang build C and Objective-C analyzer"
echo " java build Java analyzer"
echo
echo " options:"
echo " -h,--help show this message"
echo " -y,--yes automatically agree to everything"
echo
echo " examples:"
echo " $0 # build Java and C/Objective-C analyzers"
echo " $0 java clang # equivalent way of doing the above"
echo " $0 java # build only the Java analyzer"
}
# arguments
BUILD_CLANG=no
BUILD_JAVA=no
INTERACTIVE=yes
ORIG_ARGS="$*"
while [[ $# > 0 ]]; do
opt_key="$1"
case $opt_key in
all)
BUILD_CLANG=yes
BUILD_JAVA=yes
shift
continue
;;
clang)
BUILD_CLANG=yes
shift
continue
;;
java)
BUILD_JAVA=yes
shift
continue
;;
-h|--help)
usage
exit 0
;;
-y|--yes)
INTERACTIVE=no
shift
continue
;;
*)
usage
exit 1
esac
shift
done
# if no arguments then build both clang and Java
if [ "$BUILD_CLANG" = "no" ] && [ "$BUILD_JAVA" = "no" ]; then
BUILD_CLANG=yes
BUILD_JAVA=yes
fi
# enable --yes option for some commands in non-interactive mode
YES=
if [ "$INTERACTIVE" = "no" ]; then
YES=--yes
fi
check_installed () {
local cmd=$1
if ! which $cmd >/dev/null 2>&1; then
echo "dependency not found: $cmd"
exit 1
fi
}
setup_opam () {
OCAML_VERSION="4.02.3"
opam init -j $NCPU --no-setup --yes
OPAMSWITCH=infer-$OCAML_VERSION
opam switch install -j $NCPU $OPAMSWITCH --alias-of $OCAML_VERSION || true
}
add_opam_git_pin () {
PACKAGE_NAME=$1
REPO_URL=$2
PIN_HASH=$3
if [ "$(opam show -f pinned "$PACKAGE_NAME")" != "git ($PIN_HASH)" ]; then
opam pin add --yes --no-action "$PACKAGE_NAME" "$REPO_URL"
fi
}
install_opam_deps () {
add_opam_git_pin merlin 'https://github.com/the-lambda-church/merlin.git#reason-0.0.1' 87ea0e79
add_opam_git_pin merlin_extend 'https://github.com/let-def/merlin-extend.git#reason-0.0.1' ef634252
add_opam_git_pin reason 'https://github.com/squaresLab/reason.git' f04946f2
# trick to avoid rsync'inc the whole directory to opam since we are only interested in
# installing the dependencies
INFER_DEPS_DIR=$(mktemp -d infer-deps-XXXX)
cp opam "$INFER_DEPS_DIR"
# give unique name to the package to force opam to recheck the dependencies are all installed
opam pin add --yes --no-action "$INFER_DEPS_DIR" "$INFER_DEPS_DIR"
opam install -j $NCPU --yes --deps-only "$INFER_DEPS_DIR"
opam pin remove "$INFER_DEPS_DIR"
rm -fr "$INFER_DEPS_DIR"
}
echo "initializing opam... "
check_installed opam
setup_opam
eval $(SHELL=bash opam config env --switch=$OPAMSWITCH)
echo "installing infer dependencies... "
install_opam_deps
echo "preparing build... "
if [ ! -f .release ]; then
./autogen.sh > /dev/null
fi
if [ "$BUILD_CLANG" = "no" ]; then
INFER_CONFIGURE_OPTS+=" --disable-c-analyzers"
fi
if [ "$BUILD_JAVA" = "no" ]; then
INFER_CONFIGURE_OPTS+=" --disable-java-analyzers"
fi
./configure $INFER_CONFIGURE_OPTS
if [ "$BUILD_CLANG" = "yes" ] && ! facebook-clang-plugins/clang/setup.sh --only-check-install; then
echo ""
echo " Warning: you are not using a release of Infer. The C and"
echo " Objective-C analyses require a custom clang to be compiled"
echo " now. This step takes ~30-60 minutes, possibly more."
echo ""
echo " To speed this along, you are encouraged to use a release of"
echo " Infer instead:"
echo ""
echo " http://fbinfer.com/docs/getting-started.html"
echo ""
echo " If you are only interested in analyzing Java programs, simply"
echo " run this script with only the \"java\" argument:"
echo ""
echo " $0 java"
echo ""
confirm="n"
printf "Are you sure you want to compile clang? (y/N) "
if [ "$INTERACTIVE" = "no" ]; then
confirm="y"
echo "$confirm"
else
read confirm
fi
if [ "x$confirm" != "xy" ]; then
exit 0
fi
fi
make -j $NCPU all || (
echo
echo ' compilation failure; you can try running'
echo
echo ' make clean'
echo " $0 $ORIG_ARGS"
echo
exit 1)
|
def find_max_min(lst):
max_element = lst[0]
min_element = lst[0]
for i in range(len(lst)):
if max_element < lst[i]:
max_element = lst[i]
if min_element > lst[i]:
min_element = lst[i]
return max_element, min_element
# Driver Code
lst = [10, 11, 2, 7, 15, 4]
max_element, min_element = find_max_min(lst)
print("Maximum element in the list :", max_element)
print("Minimum element in the list :", min_element) |
<filename>node_modules/@formatjs/intl-numberformat/lib/src/core.js
import { defineProperty, invariant, SupportedLocales, unpackData, InitializeNumberFormat, FormatNumericToParts, ToNumber, CanonicalizeLocaleList, } from '@formatjs/ecma402-abstract';
import * as currencyDigitsData from './data/currency-digits.json';
import { names as numberingSystemNames } from './data/numbering-systems.json';
// eslint-disable-next-line import/no-cycle
import getInternalSlots from './get_internal_slots';
var RESOLVED_OPTIONS_KEYS = [
'locale',
'numberingSystem',
'style',
'currency',
'currencyDisplay',
'currencySign',
'unit',
'unitDisplay',
'minimumIntegerDigits',
'minimumFractionDigits',
'maximumFractionDigits',
'minimumSignificantDigits',
'maximumSignificantDigits',
'useGrouping',
'notation',
'compactDisplay',
'signDisplay',
];
/**
* https://tc39.es/ecma402/#sec-intl-numberformat-constructor
*/
export var NumberFormat = function (locales, options) {
// Cannot use `new.target` bc of IE11 & TS transpiles it to something else
if (!this || !(this instanceof NumberFormat)) {
return new NumberFormat(locales, options);
}
InitializeNumberFormat(this, locales, options, {
getInternalSlots: getInternalSlots,
localeData: NumberFormat.localeData,
availableLocales: NumberFormat.availableLocales,
getDefaultLocale: NumberFormat.getDefaultLocale,
currencyDigitsData: currencyDigitsData,
numberingSystemNames: numberingSystemNames,
});
var internalSlots = getInternalSlots(this);
var dataLocale = internalSlots.dataLocale;
var dataLocaleData = NumberFormat.localeData[dataLocale];
invariant(dataLocaleData !== undefined, "Cannot load locale-dependent data for " + dataLocale + ".");
internalSlots.pl = new Intl.PluralRules(dataLocale, {
minimumFractionDigits: internalSlots.minimumFractionDigits,
maximumFractionDigits: internalSlots.maximumFractionDigits,
minimumIntegerDigits: internalSlots.minimumIntegerDigits,
minimumSignificantDigits: internalSlots.minimumSignificantDigits,
maximumSignificantDigits: internalSlots.maximumSignificantDigits,
});
return this;
};
defineProperty(NumberFormat.prototype, 'formatToParts', {
value: function formatToParts(x) {
return FormatNumericToParts(this, toNumeric(x), {
getInternalSlots: getInternalSlots,
});
},
});
defineProperty(NumberFormat.prototype, 'resolvedOptions', {
value: function resolvedOptions() {
if (typeof this !== 'object' || !(this instanceof NumberFormat)) {
throw TypeError('Method Intl.NumberFormat.prototype.resolvedOptions called on incompatible receiver');
}
var internalSlots = getInternalSlots(this);
var ro = {};
for (var _i = 0, RESOLVED_OPTIONS_KEYS_1 = RESOLVED_OPTIONS_KEYS; _i < RESOLVED_OPTIONS_KEYS_1.length; _i++) {
var key = RESOLVED_OPTIONS_KEYS_1[_i];
var value = internalSlots[key];
if (value !== undefined) {
ro[key] = value;
}
}
return ro;
},
});
var formatDescriptor = {
enumerable: false,
configurable: true,
get: function () {
if (typeof this !== 'object' || !(this instanceof NumberFormat)) {
throw TypeError('Intl.NumberFormat format property accessor called on incompatible receiver');
}
var internalSlots = getInternalSlots(this);
// eslint-disable-next-line @typescript-eslint/no-this-alias
var numberFormat = this;
var boundFormat = internalSlots.boundFormat;
if (boundFormat === undefined) {
// https://tc39.es/proposal-unified-intl-numberformat/section11/numberformat_diff_out.html#sec-number-format-functions
boundFormat = function (value) {
// TODO: check bigint
var x = toNumeric(value);
return numberFormat
.formatToParts(x)
.map(function (x) { return x.value; })
.join('');
};
try {
// https://github.com/tc39/test262/blob/master/test/intl402/NumberFormat/prototype/format/format-function-name.js
Object.defineProperty(boundFormat, 'name', {
configurable: true,
enumerable: false,
writable: false,
value: '',
});
}
catch (e) {
// In older browser (e.g Chrome 36 like polyfill.io)
// TypeError: Cannot redefine property: name
}
internalSlots.boundFormat = boundFormat;
}
return boundFormat;
},
};
try {
// https://github.com/tc39/test262/blob/master/test/intl402/NumberFormat/prototype/format/name.js
Object.defineProperty(formatDescriptor.get, 'name', {
configurable: true,
enumerable: false,
writable: false,
value: 'get format',
});
}
catch (e) {
// In older browser (e.g Chrome 36 like polyfill.io)
// TypeError: Cannot redefine property: name
}
Object.defineProperty(NumberFormat.prototype, 'format', formatDescriptor);
// Static properties
defineProperty(NumberFormat, 'supportedLocalesOf', {
value: function supportedLocalesOf(locales, options) {
return SupportedLocales(NumberFormat.availableLocales, CanonicalizeLocaleList(locales), options);
},
});
NumberFormat.__addLocaleData = function __addLocaleData() {
var data = [];
for (var _i = 0; _i < arguments.length; _i++) {
data[_i] = arguments[_i];
}
for (var _a = 0, data_1 = data; _a < data_1.length; _a++) {
var datum = data_1[_a];
var availableLocales = datum.availableLocales;
for (var _b = 0, availableLocales_1 = availableLocales; _b < availableLocales_1.length; _b++) {
var locale = availableLocales_1[_b];
try {
NumberFormat.localeData[locale] = unpackData(locale, datum);
}
catch (e) {
// Ignore if we got no data
}
}
}
NumberFormat.availableLocales = Object.keys(NumberFormat.localeData);
if (!NumberFormat.__defaultLocale) {
NumberFormat.__defaultLocale = NumberFormat.availableLocales[0];
}
};
NumberFormat.__defaultLocale = 'en';
NumberFormat.localeData = {};
NumberFormat.availableLocales = [];
NumberFormat.getDefaultLocale = function () {
return NumberFormat.__defaultLocale;
};
NumberFormat.polyfilled = true;
function toNumeric(val) {
if (typeof val === 'bigint') {
return val;
}
return ToNumber(val);
}
try {
// IE11 does not have Symbol
if (typeof Symbol !== 'undefined') {
Object.defineProperty(NumberFormat.prototype, Symbol.toStringTag, {
configurable: true,
enumerable: false,
writable: false,
value: 'Intl.NumberFormat',
});
}
// https://github.com/tc39/test262/blob/master/test/intl402/NumberFormat/length.js
Object.defineProperty(NumberFormat.prototype.constructor, 'length', {
configurable: true,
enumerable: false,
writable: false,
value: 0,
});
// https://github.com/tc39/test262/blob/master/test/intl402/NumberFormat/supportedLocalesOf/length.js
Object.defineProperty(NumberFormat.supportedLocalesOf, 'length', {
configurable: true,
enumerable: false,
writable: false,
value: 1,
});
Object.defineProperty(NumberFormat, 'prototype', {
configurable: false,
enumerable: false,
writable: false,
value: NumberFormat.prototype,
});
}
catch (e) {
// Meta fix so we're test262-compliant, not important
}
|
/*
* insertAcademicRecords.sql
* Chapter 10, Oracle10g PL/SQL Programming
* by <NAME>, <NAME> and <NAME>
*
* This script inserts values into STUDENTS and CLASSES tables.
*/
INSERT
INTO students
VALUES
(1,3,'Political Science','Boxer','Barbara','');
INSERT
INTO students
VALUES
(2,3,'History','MacDermott','Donal','');
INSERT
INTO students
VALUES
(3,3,'Science','Einstein','Albert','');
INSERT
INTO classes
VALUES
('PoS',101,30,3,'Introduction to Political Science');
INSERT
INTO classes
VALUES
('His',101,30,3,'Introduction to History');
INSERT
INTO classes
VALUES
('Sci',101,30,3,'Introduction to Science');
|
sudo docker run -d --name onlinecompiler --net=host --restart=always \
-v /storage/g/OnlineCompiler/frontend/build:/app/dist \
-v /storage/docker/OnlineCompiler/data:/app/data \
jansora/onlinecompiler:v2
|
var mongoose = require('mongoose')
var autopopulate = require('mongoose-autopopulate')
var Schema = mongoose.Schema
var schema = new Schema({
_org: {
type: Schema.Types.ObjectId,
ref: 'Organisation',
required: true,
autopopulate: true
},
contextType: {
type: String,
required: true
},
name: {
type: String,
required: true
},
file_name: {
type: String,
required: true
},
timestamp: {
type: Date,
default: Date.now,
required: true
}
})
schema.plugin(autopopulate)
schema.index({ _org: 1, timestamp: -1, name: 1 })
schema.virtual('static_path').get(function () {
// TODO
})
module.exports = mongoose.model('File', schema)
|
<filename>src/main/java/actions/IAction.java
package actions;
/*
*/
public interface IAction {
void run();
}
|
import { Component, OnInit } from '@angular/core';
@Component({
selector: 'app-signin-oidc',
templateUrl: './signin-oidc.component.html',
styleUrls: ['./signin-oidc.component.css']
})
export class SigninOidcComponent implements OnInit {
constructor() { }
ngOnInit() {
}
}
|
class ConfigParser:
def parse(self, file_path):
try:
with open(file_path, 'r') as file:
config_data = {}
for line in file:
line = line.strip()
if line and not line.startswith('#'):
key, value = line.split('=')
config_data[key.strip()] = value.strip()
return config_data
except FileNotFoundError:
raise FileNotFoundError("Unable to read the configuration file")
# Usage
parser = ConfigParser()
file_path = 'config.txt'
try:
parsed_data = parser.parse(file_path)
print(parsed_data)
except FileNotFoundError as e:
print(e) |
<filename>apps/service-notification/src/email/sagas/auth.sagas.ts<gh_stars>1-10
import { Injectable, Logger } from '@nestjs/common';
import { ICommand, ofType, Saga } from '@nestjs/cqrs';
import { Observable } from 'rxjs';
import { delay, map } from 'rxjs/operators';
import { InjectQueue } from '@nestjs/bull';
import {
EmailVerifiedEvent, ForgotPasswordSentEvent,
UserLoggedInEvent,
UserRegisteredEvent,
VerificationEmailSentEvent,
} from '@ultimatebackend/core/cqrs';
import { Queue } from 'bull';
import { QUEUE_PROCESS_IDS } from '../email.constants';
@Injectable()
export class AuthSagas {
logger = new Logger(this.constructor.name);
constructor(@InjectQueue('notification_queue') readonly queue: Queue) {}
@Saga()
userLoggedIn = (events$: Observable<any>): Observable<ICommand> => {
return events$
.pipe(
ofType(UserLoggedInEvent),
delay(1000),
map( event => {
// this.logger.log(JSON.stringify(event.user));
// if (event.user) { this.queue.add(QUEUE_PROCESS_IDS.UserLoggedIn, event.user, { removeOnComplete: true, attempts: 3}); }
return null;
}),
);
};
@Saga()
resetPassword = (events$: Observable<any>): Observable<ICommand> => {
return events$
.pipe(
ofType(ForgotPasswordSentEvent),
delay(1000),
map( event => {
this.logger.log(JSON.stringify(event.user));
if (event.user) { this.queue.add(QUEUE_PROCESS_IDS.ResetPassword, event.user, { removeOnComplete: true, attempts: 3}); }
return null;
}),
);
};
@Saga()
userRegistered = (events$: Observable<any>): Observable<ICommand> => {
return events$
.pipe(
ofType(UserRegisteredEvent),
delay(1000),
map( event => {
this.logger.log(JSON.stringify(event.user));
if (event.user.service === 'social') {
this.queue.add(QUEUE_PROCESS_IDS.EmailVerified, event.user, { removeOnComplete: true, attempts: 3});
} else {
this.queue.add(QUEUE_PROCESS_IDS.UserRegistered, event.user, { removeOnComplete: true, attempts: 3});
}
return null;
}),
);
};
@Saga()
resendVerificationCode = (events$: Observable<any>): Observable<ICommand> => {
return events$
.pipe(
ofType(VerificationEmailSentEvent),
delay(1000),
map( event => {
this.logger.log(JSON.stringify(event.user));
this.queue.add(QUEUE_PROCESS_IDS.SendVerificationCode, event.user, { removeOnComplete: true, attempts: 3});
return null;
}),
);
};
@Saga()
emailVerified = (events$: Observable<any>): Observable<ICommand> => {
return events$
.pipe(
ofType(EmailVerifiedEvent),
delay(1000),
map( event => {
this.logger.log(JSON.stringify(event.user));
this.queue.add(QUEUE_PROCESS_IDS.EmailVerified, event.user, { removeOnComplete: true, attempts: 3});
return null;
}),
);
}
}
|
<filename>src/examples/java/com/globalcollect/gateway/sdk/java/payouts/ApprovePayoutExample.java
package com.globalcollect.gateway.sdk.java.payouts;
import java.net.URISyntaxException;
import com.globalcollect.gateway.sdk.java.ExampleBase;
import com.globalcollect.gateway.sdk.java.gc.GcClient;
import com.globalcollect.gateway.sdk.java.gc.payout.ApprovePayoutRequest;
import com.globalcollect.gateway.sdk.java.gc.payout.PayoutResponse;
public class ApprovePayoutExample extends ExampleBase {
public void example() throws URISyntaxException {
GcClient client = getGcClient();
ApprovePayoutRequest body = new ApprovePayoutRequest();
body.setDatePayout("20150102");
PayoutResponse response = client.merchant("merchantId").payouts().approve("payoutId", body);
}
}
|
# Sum of list
list_length = len(input_list)
sum = 0
for i in range(list_length):
sum += input_list[i]
print(sum) |
package br.indie.fiscal4j.nfe310.utils;
import org.apache.commons.lang3.StringUtils;
import java.util.Objects;
/**
* @Author <NAME> on 01/06/17.
* <p>
* Classe que verifica se uma chave passada como parâmetro é valida.
* Pode ser chamada por new {@link #NFVerificaChave(String)} e depois {@link #isChaveValida()}
* Ou por NFVerificaChave.{@link #isChaveValida(String)}, verifique a classe de testes NFVerificaChaveTest para mais
* detalhes .
*/
public class NFVerificaChave {
private final String chave;
public NFVerificaChave(final String chave) {
this.chave = chave;
}
public Integer calculaDV() {
final char[] valores = this.chaveAcessoSemDV().toCharArray();
final int[] valoresInt = {2, 3, 4, 5, 6, 7, 8, 9};
int indice = 0;
int soma = 0;
int valorTemp;
int multTemp;
for (int i = valores.length; i > 0; i--) {
if (indice >= valoresInt.length) {
indice = 0;
}
valorTemp = Integer.parseInt(String.valueOf(valores[i - 1]));
multTemp = valoresInt[indice++];
soma += valorTemp * multTemp;
}
final int dv = 11 - (soma % 11);
return ((dv == 11) || (dv == 10)) ? 0 : dv;
}
private String chaveAcessoSemDV() {
return StringUtils.substring(this.chave, 0, 43);
}
private Integer getChaveAcessoDV() {
return Integer.valueOf(StringUtils.substring(this.chave, 43, 45));
}
public boolean isChaveValida() {
if (StringUtils.length(this.chave) < 44) {
return false;
}
return Objects.equals(getChaveAcessoDV(), calculaDV());
}
/**
* Metodo estatico para verificar se uma chave de acesso eh valida.
*
* @param chave Chave a ser verificada.
* @return Se chave valida ou nao.
*/
public static boolean isChaveValida(String chave) {
NFVerificaChave nfVerificaChave = new NFVerificaChave(chave);
return nfVerificaChave.isChaveValida();
}
}
|
<gh_stars>0
export declare function useUrl(): {
withBase: (path: string) => string;
};
|
<reponame>nightskylark/DevExtreme<filename>testing/tests/DevExpress.ui.widgets/slideOut.markup.tests.js
"use strict";
var $ = require("jquery");
require("ui/slide_out");
require("common.css!");
var SLIDEOUT_CLASS = "dx-slideout",
SLIDEOUT_ITEM_CONTAINER_CLASS = "dx-slideout-item-container",
SLIDEOUT_ITEM_CLASS = "dx-slideout-item",
LIST_CLASS = "dx-list",
LIST_ITEM_CLASS = "dx-list-item";
QUnit.testStart(function() {
var markup = '\
<div id="slideOut"></div>\
<div id="slideOutWithTemplate">\
<div data-options="dxTemplate: { name: \'content\'}">\
content\
</div>\
</div>';
$("#qunit-fixture").html(markup);
});
QUnit.module("render widget", {
beforeEach: function() {
this.$element = $("#slideOut");
}
});
QUnit.test("render widget", function(assert) {
var slideOut = this.$element.dxSlideOut();
assert.ok(slideOut.hasClass(SLIDEOUT_CLASS), "widget class was added");
assert.equal(slideOut.find("." + LIST_CLASS).length, 1, "menu was rendered");
assert.equal(slideOut.find("." + SLIDEOUT_ITEM_CONTAINER_CLASS).length, 1, "item container was rendered");
});
QUnit.test("render item", function(assert) {
var slideOut = this.$element.dxSlideOut({
dataSource: [{ text: "testItem" }],
selectedIndex: 0
});
assert.equal(slideOut.find("." + SLIDEOUT_ITEM_CLASS).length, 1);
});
QUnit.test("update items during beginUpdate/endUpdate should refresh list only once", function(assert) {
var $slideOut = this.$element.dxSlideOut({}),
instance = $slideOut.dxSlideOut("instance");
instance.beginUpdate();
instance.option("items", [1]);
instance.option("items", [1, 2]);
assert.equal($slideOut.find("." + LIST_ITEM_CLASS).length, 0, "list was not only once");
instance.endUpdate();
assert.equal($slideOut.find("." + LIST_ITEM_CLASS).length, 2, "list was updated");
});
QUnit.test("show only one item at same time", function(assert) {
var slideOut = this.$element.dxSlideOut({
dataSource: [
{ text: "testItem1" },
{ text: "testItem2" }
],
selectedIndex: 0
});
assert.equal(slideOut.find("." + SLIDEOUT_ITEM_CLASS).length, 1);
});
QUnit.test("show only one item at same time if contentTemplate is used", function(assert) {
var slideOut = $("#slideOutWithTemplate").dxSlideOut({
dataSource: [
{ text: "1" },
{ text: "2" }
],
contentTemplate: function() {
return "itemText";
},
selectedIndex: 0
});
assert.equal($.trim(slideOut.find("." + SLIDEOUT_ITEM_CONTAINER_CLASS).text()), "itemText", "item was rendered only once");
});
QUnit.test("show only one item at same time if contentTemplate append markup in container", function(assert) {
var slideOut = $("#slideOutWithTemplate").dxSlideOut({
dataSource: [
{ text: "1" },
{ text: "2" }
],
contentTemplate: function(element) {
$(element).append($("<div>").text("itemText"));
},
selectedIndex: 0
});
assert.equal($.trim(slideOut.find("." + SLIDEOUT_ITEM_CONTAINER_CLASS).text()), "itemText", "item was rendered only once");
});
|
TERMUX_PKG_HOMEPAGE=http://libical.github.io/libical/
TERMUX_PKG_DESCRIPTION="Libical is an Open Source implementation of the iCalendar protocols and protocol data units"
TERMUX_PKG_LICENSE="LGPL-2.0"
TERMUX_PKG_MAINTAINER="@termux"
TERMUX_PKG_VERSION=3.0.10
TERMUX_PKG_SRCURL=https://github.com/libical/libical/releases/download/v$TERMUX_PKG_VERSION/libical-$TERMUX_PKG_VERSION.tar.gz
TERMUX_PKG_SHA256=f933b3e6cf9d56a35bb5625e8e4a9c3a50239a85aea05ed842932c1a1dc336b4
TERMUX_PKG_DEPENDS="libc++, libxml2"
TERMUX_PKG_BREAKS="libical-dev"
TERMUX_PKG_REPLACES="libical-dev"
TERMUX_PKG_EXTRA_CONFIGURE_ARGS=" -DSHARED_ONLY=true -DICAL_GLIB=false -DUSE_BUILTIN_TZDATA=true -DPERL_EXECUTABLE=/usr/bin/perl"
|
#!/usr/bin/env bash
set -e
cd "$(dirname "$0")"
dotnet restore
dotnet tool restore
codegen() {
dest="$1"
printf "Generating extensions wrappers (%s)..." "$1"
shift
dotnet run -p bld/ExtensionsGenerator/MoreLinq.ExtensionsGenerator.csproj -c Release -- "$@" > "$dest"
printf "Done.\n"
}
codegen MoreLinq/Extensions.g.cs -x "[/\\\\]ToDataTable\.cs$" -u System.Linq -u System.Collections MoreLinq
codegen MoreLinq/Extensions.ToDataTable.g.cs -i "[/\\\\]ToDataTable\.cs$" -u System.Data -u System.Linq.Expressions MoreLinq
if [[ -z "$1" ]]; then
configs="Debug Release"
else
configs="$1"
fi
for c in $configs; do
dotnet build --no-restore -c $c
done
|
#!/bin/bash
# -*- mode: shell-script; indent-tabs-mode: nil; sh-basic-offset: 4; -*-
# ex: ts=8 sw=4 sts=4 et filetype=sh
check() {
return 0
}
depends() {
echo systemd network
}
install() {
inst_multiple afterburn
inst_simple "$moddir/afterburn-hostname.service" \
"$systemdutildir/system/afterburn-hostname.service"
# We want the afterburn-hostname to be firstboot only, so Ignition-provided
# hostname changes do not get overwritten on subsequent boots
mkdir -p "$initdir/$systemdsystemunitdir/ignition-complete.target.requires"
ln -s "../afterburn-hostname.service" "$initdir/$systemdsystemunitdir/ignition-complete.target.requires/afterburn-hostname.service"
}
|
package mezz.jei.input;
public interface ICloseable {
void close();
boolean isOpen();
}
|
<gh_stars>0
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.tdb.base.objectfile;
import static org.apache.jena.atlas.lib.FileOps.clearDirectory ;
import org.apache.jena.atlas.lib.FileOps ;
import org.apache.jena.tdb.ConfigTest ;
import org.apache.jena.tdb.base.file.FileFactory ;
import org.apache.jena.tdb.base.file.Location ;
import org.apache.jena.tdb.base.objectfile.StringFile ;
public class TestStringFileDisk extends AbstractTestStringFile
{
String fn = null ;
@Override
protected StringFile createStringFile()
{
String dir = ConfigTest.getTestingDir() ;
clearDirectory(dir) ;
Location loc = Location.create(dir) ;
fn = loc.getPath("xyz", "node") ;
FileOps.delete(fn) ;
return FileFactory.createStringFileDisk(fn) ;
}
@Override
protected void removeStringFile(StringFile f)
{
f.close() ;
FileOps.delete(fn) ;
}
}
|
<gh_stars>10-100
import PageNotFound from './PageNotFound.js'
import './PageNotFound.scss'
export default PageNotFound
|
/**
* Orthanc - A Lightweight, RESTful DICOM Store
* Copyright (C) 2012-2016 <NAME>, Medical Physics
* Department, University Hospital of Liege, Belgium
* Copyright (C) 2017-2020 <NAME>., Belgium
*
* This program is free software: you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/>.
**/
#include "../../PrecompiledHeaders.h"
#include "SequenceOfOperationsJob.h"
#include "../../Logging.h"
#include "../../OrthancException.h"
#include "../../SerializationToolbox.h"
#include "../IJobUnserializer.h"
namespace Orthanc
{
static const char* CURRENT = "Current";
static const char* DESCRIPTION = "Description";
static const char* NEXT_OPERATIONS = "Next";
static const char* OPERATION = "Operation";
static const char* OPERATIONS = "Operations";
static const char* ORIGINAL_INPUTS = "OriginalInputs";
static const char* TRAILING_TIMEOUT = "TrailingTimeout";
static const char* TYPE = "Type";
static const char* WORK_INPUTS = "WorkInputs";
class SequenceOfOperationsJob::Operation : public boost::noncopyable
{
private:
size_t index_;
std::unique_ptr<IJobOperation> operation_;
std::unique_ptr<JobOperationValues> originalInputs_;
std::unique_ptr<JobOperationValues> workInputs_;
std::list<Operation*> nextOperations_;
size_t currentInput_;
public:
Operation(size_t index,
IJobOperation* operation) :
index_(index),
operation_(operation),
originalInputs_(new JobOperationValues),
workInputs_(new JobOperationValues),
currentInput_(0)
{
if (operation == NULL)
{
throw OrthancException(ErrorCode_NullPointer);
}
}
void AddOriginalInput(const JobOperationValue& value)
{
if (currentInput_ != 0)
{
// Cannot add input after processing has started
throw OrthancException(ErrorCode_BadSequenceOfCalls);
}
else
{
originalInputs_->Append(value.Clone());
}
}
const JobOperationValues& GetOriginalInputs() const
{
return *originalInputs_;
}
void Reset()
{
workInputs_->Clear();
currentInput_ = 0;
}
void AddNextOperation(Operation& other,
bool unserializing)
{
if (other.index_ <= index_)
{
throw OrthancException(ErrorCode_InternalError);
}
if (!unserializing &&
currentInput_ != 0)
{
// Cannot add input after processing has started
throw OrthancException(ErrorCode_BadSequenceOfCalls);
}
else
{
nextOperations_.push_back(&other);
}
}
bool IsDone() const
{
return currentInput_ >= originalInputs_->GetSize() + workInputs_->GetSize();
}
void Step()
{
if (IsDone())
{
throw OrthancException(ErrorCode_BadSequenceOfCalls);
}
const JobOperationValue* input;
if (currentInput_ < originalInputs_->GetSize())
{
input = &originalInputs_->GetValue(currentInput_);
}
else
{
input = &workInputs_->GetValue(currentInput_ - originalInputs_->GetSize());
}
JobOperationValues outputs;
operation_->Apply(outputs, *input);
if (!nextOperations_.empty())
{
std::list<Operation*>::iterator first = nextOperations_.begin();
outputs.Move(*(*first)->workInputs_);
std::list<Operation*>::iterator current = first;
++current;
while (current != nextOperations_.end())
{
(*first)->workInputs_->Copy(*(*current)->workInputs_);
++current;
}
}
currentInput_ += 1;
}
void Serialize(Json::Value& target) const
{
target = Json::objectValue;
target[CURRENT] = static_cast<unsigned int>(currentInput_);
operation_->Serialize(target[OPERATION]);
originalInputs_->Serialize(target[ORIGINAL_INPUTS]);
workInputs_->Serialize(target[WORK_INPUTS]);
Json::Value tmp = Json::arrayValue;
for (std::list<Operation*>::const_iterator it = nextOperations_.begin();
it != nextOperations_.end(); ++it)
{
tmp.append(static_cast<int>((*it)->index_));
}
target[NEXT_OPERATIONS] = tmp;
}
Operation(IJobUnserializer& unserializer,
Json::Value::ArrayIndex index,
const Json::Value& serialized) :
index_(index)
{
if (serialized.type() != Json::objectValue ||
!serialized.isMember(OPERATION) ||
!serialized.isMember(ORIGINAL_INPUTS) ||
!serialized.isMember(WORK_INPUTS))
{
throw OrthancException(ErrorCode_BadFileFormat);
}
currentInput_ = SerializationToolbox::ReadUnsignedInteger(serialized, CURRENT);
operation_.reset(unserializer.UnserializeOperation(serialized[OPERATION]));
originalInputs_.reset(JobOperationValues::Unserialize
(unserializer, serialized[ORIGINAL_INPUTS]));
workInputs_.reset(JobOperationValues::Unserialize
(unserializer, serialized[WORK_INPUTS]));
}
};
SequenceOfOperationsJob::SequenceOfOperationsJob() :
done_(false),
current_(0),
trailingTimeout_(boost::posix_time::milliseconds(1000))
{
}
SequenceOfOperationsJob::~SequenceOfOperationsJob()
{
for (size_t i = 0; i < operations_.size(); i++)
{
if (operations_[i] != NULL)
{
delete operations_[i];
}
}
}
void SequenceOfOperationsJob::SetDescription(const std::string& description)
{
boost::mutex::scoped_lock lock(mutex_);
description_ = description;
}
void SequenceOfOperationsJob::GetDescription(std::string& description)
{
boost::mutex::scoped_lock lock(mutex_);
description = description_;
}
void SequenceOfOperationsJob::Register(IObserver& observer)
{
boost::mutex::scoped_lock lock(mutex_);
observers_.push_back(&observer);
}
void SequenceOfOperationsJob::Lock::SetTrailingOperationTimeout(unsigned int timeout)
{
that_.trailingTimeout_ = boost::posix_time::milliseconds(timeout);
}
size_t SequenceOfOperationsJob::Lock::AddOperation(IJobOperation* operation)
{
if (IsDone())
{
throw OrthancException(ErrorCode_BadSequenceOfCalls);
}
size_t index = that_.operations_.size();
that_.operations_.push_back(new Operation(index, operation));
that_.operationAdded_.notify_one();
return index;
}
void SequenceOfOperationsJob::Lock::AddInput(size_t index,
const JobOperationValue& value)
{
if (IsDone())
{
throw OrthancException(ErrorCode_BadSequenceOfCalls);
}
else if (index >= that_.operations_.size() ||
index < that_.current_)
{
throw OrthancException(ErrorCode_ParameterOutOfRange);
}
else
{
that_.operations_[index]->AddOriginalInput(value);
}
}
void SequenceOfOperationsJob::Lock::Connect(size_t input,
size_t output)
{
if (IsDone())
{
throw OrthancException(ErrorCode_BadSequenceOfCalls);
}
else if (input >= output ||
input >= that_.operations_.size() ||
output >= that_.operations_.size() ||
input < that_.current_ ||
output < that_.current_)
{
throw OrthancException(ErrorCode_ParameterOutOfRange);
}
else
{
Operation& a = *that_.operations_[input];
Operation& b = *that_.operations_[output];
a.AddNextOperation(b, false /* not unserializing */);
}
}
JobStepResult SequenceOfOperationsJob::Step(const std::string& jobId)
{
boost::mutex::scoped_lock lock(mutex_);
if (current_ == operations_.size())
{
LOG(INFO) << "Executing the trailing timeout in the sequence of operations";
operationAdded_.timed_wait(lock, trailingTimeout_);
if (current_ == operations_.size())
{
// No operation was added during the trailing timeout: The
// job is over
LOG(INFO) << "The sequence of operations is over";
done_ = true;
for (std::list<IObserver*>::iterator it = observers_.begin();
it != observers_.end(); ++it)
{
(*it)->SignalDone(*this);
}
return JobStepResult::Success();
}
else
{
LOG(INFO) << "New operation were added to the sequence of operations";
}
}
assert(current_ < operations_.size());
while (current_ < operations_.size() &&
operations_[current_]->IsDone())
{
current_++;
}
if (current_ < operations_.size())
{
operations_[current_]->Step();
}
return JobStepResult::Continue();
}
void SequenceOfOperationsJob::Reset()
{
boost::mutex::scoped_lock lock(mutex_);
current_ = 0;
done_ = false;
for (size_t i = 0; i < operations_.size(); i++)
{
operations_[i]->Reset();
}
}
float SequenceOfOperationsJob::GetProgress()
{
boost::mutex::scoped_lock lock(mutex_);
return (static_cast<float>(current_) /
static_cast<float>(operations_.size() + 1));
}
void SequenceOfOperationsJob::GetPublicContent(Json::Value& value)
{
boost::mutex::scoped_lock lock(mutex_);
value["CountOperations"] = static_cast<unsigned int>(operations_.size());
value["Description"] = description_;
}
bool SequenceOfOperationsJob::Serialize(Json::Value& value)
{
boost::mutex::scoped_lock lock(mutex_);
value = Json::objectValue;
std::string jobType;
GetJobType(jobType);
value[TYPE] = jobType;
value[DESCRIPTION] = description_;
value[TRAILING_TIMEOUT] = static_cast<unsigned int>(trailingTimeout_.total_milliseconds());
value[CURRENT] = static_cast<unsigned int>(current_);
Json::Value tmp = Json::arrayValue;
for (size_t i = 0; i < operations_.size(); i++)
{
Json::Value operation = Json::objectValue;
operations_[i]->Serialize(operation);
tmp.append(operation);
}
value[OPERATIONS] = tmp;
return true;
}
SequenceOfOperationsJob::SequenceOfOperationsJob(IJobUnserializer& unserializer,
const Json::Value& serialized) :
done_(false)
{
std::string jobType;
GetJobType(jobType);
if (SerializationToolbox::ReadString(serialized, TYPE) != jobType ||
!serialized.isMember(OPERATIONS) ||
serialized[OPERATIONS].type() != Json::arrayValue)
{
throw OrthancException(ErrorCode_BadFileFormat);
}
description_ = SerializationToolbox::ReadString(serialized, DESCRIPTION);
trailingTimeout_ = boost::posix_time::milliseconds
(SerializationToolbox::ReadUnsignedInteger(serialized, TRAILING_TIMEOUT));
current_ = SerializationToolbox::ReadUnsignedInteger(serialized, CURRENT);
const Json::Value& ops = serialized[OPERATIONS];
// Unserialize the individual operations
operations_.reserve(ops.size());
for (Json::Value::ArrayIndex i = 0; i < ops.size(); i++)
{
operations_.push_back(new Operation(unserializer, i, ops[i]));
}
// Connect the next operations
for (Json::Value::ArrayIndex i = 0; i < ops.size(); i++)
{
if (!ops[i].isMember(NEXT_OPERATIONS) ||
ops[i][NEXT_OPERATIONS].type() != Json::arrayValue)
{
throw OrthancException(ErrorCode_BadFileFormat);
}
const Json::Value& next = ops[i][NEXT_OPERATIONS];
for (Json::Value::ArrayIndex j = 0; j < next.size(); j++)
{
if (next[j].type() != Json::intValue ||
next[j].asInt() < 0 ||
next[j].asUInt() >= operations_.size())
{
throw OrthancException(ErrorCode_BadFileFormat);
}
else
{
operations_[i]->AddNextOperation(*operations_[next[j].asUInt()], true);
}
}
}
}
}
|
const Sequelize = require('sequelize')
const sequelize = require('../database')
const Legends = sequelize.define(
'legend',
{
legend_id: {
primaryKey: true,
type: Sequelize.INTEGER.UNSIGNED,
},
legend_name_key: Sequelize.STRING,
bio_name: Sequelize.STRING,
weapon_one: Sequelize.STRING,
weapon_two: Sequelize.STRING,
strength: Sequelize.TINYINT.UNSIGNED,
dexterity: Sequelize.TINYINT.UNSIGNED,
defense: Sequelize.TINYINT.UNSIGNED,
speed: Sequelize.TINYINT.UNSIGNED,
},
{
indexes: [
{
fields: ['legend_name_key'],
},
],
}
)
module.exports = Legends
|
// Autogenerated from library/elements.i
package ideal.library.elements;
public interface readonly_entity extends any_entity { }
|
#!/bin/sh
#SCHEMES="b f a c t x e 8"
SCHEMES="b a c t x e 8"
DMTXWRITE="$(which dmtxwrite)"
DMTXREAD="$(which dmtxread)"
MOGRIFY=$(which mogrify)
COMPARE_DIR="compare_generated"
if [[ ! -x "$DMTXWRITE" ]]; then
echo "Unable to execute \"$DMTXWRITE\""
exit 1
fi
if [[ ! -x "$DMTXREAD" ]]; then
echo "Unable to execute \"$DMTXREAD\""
exit 1
fi
if [[ ! -x "$MOGRIFY" ]]; then
echo "Unable to find or execute mogrify"
exit 1
fi
if [[ ! -d "$COMPARE_DIR" ]]; then
$(which mkdir) "$COMPARE_DIR"
fi
ERROR_COUNT=0
echo "Generating and reading back barcodes from input messages"
echo "-----------------------------------------------------------------"
for file in input_messages/message_*.dat; do
ENCODE=$(cat $file)
MESSAGE=$(basename $file .dat | cut -d'_' -f2)
for scheme in $SCHEMES; do
OUTPUT="${COMPARE_DIR}/barcode_${MESSAGE}_${scheme}"
$DMTXWRITE -e$scheme -o ${OUTPUT}.png $file 1>/dev/null 2>&1
ERROR=$?
if [[ "$ERROR" -eq 70 ]]; then
# XXX revisit this to use more specific error code when available
echo " SKIP: message $MESSAGE scheme ${scheme} (unsupported character)"
continue;
elif [[ "$ERROR" -ne 0 && "$ERROR" -ne 70 ]]; then
echo " ERROR: dmtxwrite failed"
exit "$ERROR";
fi
$MOGRIFY -depth 8 -type TrueColor ${OUTPUT}.png
ERROR=$?
if [[ $? -ne 0 ]]; then
echo " ERROR: mogrify failed"
exit "$ERROR";
fi
DECODE=$($DMTXREAD ${OUTPUT}.png)
ERROR=$?
if [[ $? -ne 0 ]]; then
echo " ERROR: dmtxread failed"
exit "$ERROR";
fi
if [[ "$ENCODE" == "$DECODE" ]]; then
echo "SUCCESS: message $MESSAGE scheme ${scheme}"
else
echo "FAILURE: message $MESSAGE scheme ${scheme}"
ERROR_COUNT=$[$ERROR_COUNT + 1]
fi
done
done
echo "$ERROR_COUNT error(s) encountered"
echo ""
exit 0
|
#include <iostream>
int main() {
int a = 3;
int b = 5;
int sum = a + b;
std::cout << sum << std::endl;
return 0;
} |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.phoenix;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import io.trino.Session;
import io.trino.plugin.jdbc.UnsupportedTypeHandling;
import io.trino.spi.type.ArrayType;
import io.trino.spi.type.CharType;
import io.trino.testing.AbstractTestQueryFramework;
import io.trino.testing.QueryRunner;
import io.trino.testing.TestingSession;
import io.trino.testing.datatype.CreateAndInsertDataSetup;
import io.trino.testing.datatype.CreateAsSelectDataSetup;
import io.trino.testing.datatype.DataSetup;
import io.trino.testing.datatype.SqlDataTypeTest;
import io.trino.testing.sql.TestTable;
import io.trino.testing.sql.TrinoSqlExecutor;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.math.RoundingMode;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.ZoneId;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Verify.verify;
import static io.trino.plugin.jdbc.DecimalConfig.DecimalMapping.ALLOW_OVERFLOW;
import static io.trino.plugin.jdbc.DecimalConfig.DecimalMapping.STRICT;
import static io.trino.plugin.jdbc.DecimalSessionSessionProperties.DECIMAL_DEFAULT_SCALE;
import static io.trino.plugin.jdbc.DecimalSessionSessionProperties.DECIMAL_MAPPING;
import static io.trino.plugin.jdbc.DecimalSessionSessionProperties.DECIMAL_ROUNDING_MODE;
import static io.trino.plugin.jdbc.TypeHandlingJdbcSessionProperties.UNSUPPORTED_TYPE_HANDLING;
import static io.trino.plugin.jdbc.UnsupportedTypeHandling.CONVERT_TO_VARCHAR;
import static io.trino.plugin.phoenix.PhoenixQueryRunner.createPhoenixQueryRunner;
import static io.trino.spi.type.BigintType.BIGINT;
import static io.trino.spi.type.BooleanType.BOOLEAN;
import static io.trino.spi.type.CharType.createCharType;
import static io.trino.spi.type.DateType.DATE;
import static io.trino.spi.type.DecimalType.createDecimalType;
import static io.trino.spi.type.DoubleType.DOUBLE;
import static io.trino.spi.type.IntegerType.INTEGER;
import static io.trino.spi.type.RealType.REAL;
import static io.trino.spi.type.SmallintType.SMALLINT;
import static io.trino.spi.type.TimeZoneKey.getTimeZoneKey;
import static io.trino.spi.type.TinyintType.TINYINT;
import static io.trino.spi.type.VarbinaryType.VARBINARY;
import static io.trino.spi.type.VarcharType.VARCHAR;
import static io.trino.spi.type.VarcharType.createVarcharType;
import static java.lang.String.format;
import static java.math.RoundingMode.HALF_UP;
import static java.math.RoundingMode.UNNECESSARY;
import static java.time.ZoneOffset.UTC;
import static java.util.Arrays.asList;
import static org.assertj.core.api.Assertions.assertThat;
/**
* @see <a href="https://phoenix.apache.org/language/datatypes.html">Phoenix data types</a>
*/
public class TestPhoenixTypeMapping
extends AbstractTestQueryFramework
{
private TestingPhoenixServer phoenixServer;
private final ZoneId jvmZone = ZoneId.systemDefault();
// no DST in 1970, but has DST in later years (e.g. 2018)
private final ZoneId vilnius = ZoneId.of("Europe/Vilnius");
// minutes offset change since 1970-01-01, no DST
private final ZoneId kathmandu = ZoneId.of("Asia/Kathmandu");
@BeforeClass
public void setUp()
{
checkState(jvmZone.getId().equals("America/Bahia_Banderas"), "This test assumes certain JVM time zone");
LocalDate dateOfLocalTimeChangeForwardAtMidnightInJvmZone = LocalDate.of(1970, 1, 1);
checkIsGap(jvmZone, dateOfLocalTimeChangeForwardAtMidnightInJvmZone.atStartOfDay());
LocalDate dateOfLocalTimeChangeForwardAtMidnightInSomeZone = LocalDate.of(1983, 4, 1);
checkIsGap(vilnius, dateOfLocalTimeChangeForwardAtMidnightInSomeZone.atStartOfDay());
LocalDate dateOfLocalTimeChangeBackwardAtMidnightInSomeZone = LocalDate.of(1983, 10, 1);
checkIsDoubled(vilnius, dateOfLocalTimeChangeBackwardAtMidnightInSomeZone.atStartOfDay().minusMinutes(1));
checkIsGap(kathmandu, LocalDate.of(1986, 1, 1).atStartOfDay());
}
@Override
protected QueryRunner createQueryRunner()
throws Exception
{
phoenixServer = TestingPhoenixServer.getInstance();
return createPhoenixQueryRunner(phoenixServer, ImmutableMap.of(), ImmutableList.of());
}
@AfterClass(alwaysRun = true)
public void destroy()
{
TestingPhoenixServer.shutDown();
}
@Test
public void testBasicTypes()
{
SqlDataTypeTest.create()
.addRoundTrip("boolean", "true", BOOLEAN, "true")
.addRoundTrip("boolean", "false", BOOLEAN, "false")
.addRoundTrip("bigint", "123456789012", BIGINT, "123456789012")
.addRoundTrip("integer", "1234567890", INTEGER, "1234567890")
.addRoundTrip("smallint", "32456", SMALLINT, "SMALLINT '32456'")
.addRoundTrip("tinyint", "5", TINYINT, "TINYINT '5'")
.addRoundTrip("double", "123.45", DOUBLE, "DOUBLE '123.45'")
.addRoundTrip("real", "123.45", REAL, "REAL '123.45'")
.execute(getQueryRunner(), trinoCreateAsSelect("test_basic_types"));
}
@Test
public void testBoolean()
{
SqlDataTypeTest.create()
.addRoundTrip("boolean", "true", BOOLEAN, "true")
.addRoundTrip("boolean", "false", BOOLEAN, "false")
.addRoundTrip("boolean", "NULL", BOOLEAN, "CAST(NULL AS BOOLEAN)")
.execute(getQueryRunner(), trinoCreateAsSelect("test_boolean"))
.addRoundTrip("integer primary key", "1", INTEGER, "1")
.execute(getQueryRunner(), phoenixCreateAndInsert("tpch.test_boolean"));
}
@Test
public void testTinyInt()
{
SqlDataTypeTest.create()
.addRoundTrip("tinyint", "-128", TINYINT, "TINYINT '-128'") // min value in Phoenix
.addRoundTrip("tinyint", "0", TINYINT, "TINYINT '0'")
.addRoundTrip("tinyint", "127", TINYINT, "TINYINT '127'") // max value in Phoenix
.addRoundTrip("tinyint", "NULL", TINYINT, "CAST(NULL AS TINYINT)")
.execute(getQueryRunner(), trinoCreateAsSelect("test_tinyint"))
.addRoundTrip("integer primary key", "1", INTEGER, "1")
.execute(getQueryRunner(), phoenixCreateAndInsert("tpch.test_tinyint"));
}
@Test
public void testUnsignedTinyInt()
{
SqlDataTypeTest.create()
.addRoundTrip("unsigned_tinyint", "0", TINYINT, "TINYINT '0'") // min value in Phoenix
.addRoundTrip("unsigned_tinyint", "127", TINYINT, "TINYINT '127'") // max value in Phoenix
.addRoundTrip("unsigned_tinyint", "NULL", TINYINT, "CAST(NULL AS TINYINT)")
.addRoundTrip("integer primary key", "1", INTEGER, "1")
.execute(getQueryRunner(), phoenixCreateAndInsert("tpch.test_unsigned_tinyint"));
}
@Test
public void testSmallInt()
{
SqlDataTypeTest.create()
.addRoundTrip("smallint", "-32768", SMALLINT, "SMALLINT '-32768'") // min value in Phoenix
.addRoundTrip("smallint", "0", SMALLINT, "SMALLINT '0'")
.addRoundTrip("smallint", "32767", SMALLINT, "SMALLINT '32767'") // max value in Phoenix
.addRoundTrip("smallint", "NULL", SMALLINT, "CAST(NULL AS SMALLINT)")
.execute(getQueryRunner(), trinoCreateAsSelect("test_smallint"))
.addRoundTrip("integer primary key", "1", INTEGER, "1")
.execute(getQueryRunner(), phoenixCreateAndInsert("tpch.test_smallint"));
}
@Test
public void testUnsignedSmallInt()
{
SqlDataTypeTest.create()
.addRoundTrip("unsigned_smallint", "0", SMALLINT, "SMALLINT '0'") // min value in Phoenix
.addRoundTrip("unsigned_smallint", "32767", SMALLINT, "SMALLINT '32767'") // max value in Phoenix
.addRoundTrip("unsigned_smallint", "NULL", SMALLINT, "CAST(NULL AS SMALLINT)")
.addRoundTrip("integer primary key", "1", INTEGER, "1")
.execute(getQueryRunner(), phoenixCreateAndInsert("tpch.test_unsigned_smallint"));
}
@Test
public void testInteger()
{
SqlDataTypeTest.create()
.addRoundTrip("integer", "-2147483648", INTEGER, "-2147483648") // min value in Phoenix
.addRoundTrip("integer", "0", INTEGER, "0")
.addRoundTrip("integer", "2147483647", INTEGER, "2147483647") // max value in Phoenix
.addRoundTrip("integer", "NULL", INTEGER, "CAST(NULL AS INTEGER)")
.execute(getQueryRunner(), trinoCreateAsSelect("test_integer"))
.addRoundTrip("integer primary key", "1", INTEGER, "1")
.execute(getQueryRunner(), phoenixCreateAndInsert("tpch.test_integer"));
}
@Test
public void testUnsignedInt()
{
SqlDataTypeTest.create()
.addRoundTrip("unsigned_int", "0", INTEGER, "0") // min value in Phoenix
.addRoundTrip("unsigned_int", "2147483647", INTEGER, "2147483647") // max value in Phoenix
.addRoundTrip("unsigned_int", "NULL", INTEGER, "CAST(NULL AS INTEGER)")
.addRoundTrip("integer primary key", "1", INTEGER, "1")
.execute(getQueryRunner(), phoenixCreateAndInsert("tpch.test_unsigned_int"));
}
@Test
public void testBigInt()
{
SqlDataTypeTest.create()
.addRoundTrip("bigint", "-9223372036854775808", BIGINT, "-9223372036854775808") // min value in Phoenix
.addRoundTrip("bigint", "0", BIGINT, "BIGINT '0'")
.addRoundTrip("bigint", "9223372036854775807", BIGINT, "9223372036854775807") // max value in Phoenix
.addRoundTrip("bigint", "NULL", BIGINT, "CAST(NULL AS BIGINT)")
.execute(getQueryRunner(), trinoCreateAsSelect("test_bigint"))
.addRoundTrip("integer primary key", "1", INTEGER, "1")
.execute(getQueryRunner(), phoenixCreateAndInsert("tpch.test_bigint"));
}
@Test
public void testUnsignedLong()
{
SqlDataTypeTest.create()
.addRoundTrip("unsigned_long", "0", BIGINT, "BIGINT '0'") // min value in Phoenix
.addRoundTrip("unsigned_long", "9223372036854775807", BIGINT, "BIGINT '9223372036854775807'") // max value in Phoenix
.addRoundTrip("unsigned_long", "NULL", BIGINT, "CAST(NULL AS BIGINT)")
.addRoundTrip("integer primary key", "1", INTEGER, "1")
.execute(getQueryRunner(), phoenixCreateAndInsert("tpch.test_unsigned_long"));
}
@Test
public void testFloat()
{
// Not testing Nan/-Infinity/+Infinity as those are not supported by Phoenix
SqlDataTypeTest.create()
.addRoundTrip("real", "REAL '-3.402823466E38'", REAL, "REAL '-3.402823466E38'") // min value in Phoenix
.addRoundTrip("real", "REAL '0.0'", REAL, "REAL '0.0'")
.addRoundTrip("real", "REAL '123.456E10'", REAL, "REAL '123.456E10'")
.addRoundTrip("real", "REAL '3.402823466E38'", REAL, "REAL '3.402823466E38'") // max value in Phoenix
.addRoundTrip("real", "NULL", REAL, "CAST(NULL AS REAL)")
.execute(getQueryRunner(), trinoCreateAsSelect("test_float"));
SqlDataTypeTest.create()
.addRoundTrip("float", "-3.402823466E38", REAL, "REAL '-3.402823466E38'") // min value in Phoenix
.addRoundTrip("float", "0.0", REAL, "REAL '0.0'")
.addRoundTrip("float", "123.456E10", REAL, "REAL '123.456E10'")
.addRoundTrip("float", "3.402823466E38", REAL, "REAL '3.402823466E38'") // max value in Phoenix
.addRoundTrip("float", "NULL", REAL, "CAST(NULL AS REAL)")
.addRoundTrip("integer primary key", "1", INTEGER, "1")
.execute(getQueryRunner(), phoenixCreateAndInsert("tpch.test_float"));
}
@Test
public void testUnsignedFloat()
{
// Not testing Nan/-Infinity/+Infinity as those are not supported by Phoenix
SqlDataTypeTest.create()
.addRoundTrip("unsigned_float", "0.0", REAL, "REAL '0.0'") // min value in Phoenix
.addRoundTrip("unsigned_float", "123.456E10", REAL, "REAL '123.456E10'")
.addRoundTrip("unsigned_float", "3.402823466E38", REAL, "REAL '3.402823466E38'") // max value in Phoenix
.addRoundTrip("unsigned_float", "NULL", REAL, "CAST(NULL AS REAL)")
.addRoundTrip("integer primary key", "1", INTEGER, "1")
.execute(getQueryRunner(), phoenixCreateAndInsert("tpch.test_unsigned_float"));
}
@Test
public void testDouble()
{
// Not testing Nan/-Infinity/+Infinity as those are not supported by Phoenix
SqlDataTypeTest.create()
.addRoundTrip("double", "-1.7976931348623158E308", DOUBLE, "DOUBLE '-1.7976931348623158E308'") // min value in Phoenix
.addRoundTrip("double", "0.0", DOUBLE, "DOUBLE '0.0'")
.addRoundTrip("double", "1.0E100", DOUBLE, "DOUBLE '1.0E100'")
.addRoundTrip("double", "123.456E10", DOUBLE, "DOUBLE '123.456E10'")
.addRoundTrip("double", "1.7976931348623158E308", DOUBLE, "DOUBLE '1.7976931348623158E308'") // max value in Phoenix
.addRoundTrip("double", "NULL", DOUBLE, "CAST(NULL AS DOUBLE)")
.execute(getQueryRunner(), trinoCreateAsSelect("test_double"))
.addRoundTrip("integer primary key", "1", INTEGER, "1")
.execute(getQueryRunner(), phoenixCreateAndInsert("tpch.test_double"));
}
@Test
public void testUnsignedDouble()
{
// Not testing Nan/-Infinity/+Infinity as those are not supported by Phoenix
SqlDataTypeTest.create()
.addRoundTrip("unsigned_double", "0.0", DOUBLE, "DOUBLE '0.0'") // min value in Phoenix
.addRoundTrip("unsigned_double", "1.0E100", DOUBLE, "DOUBLE '1.0E100'")
.addRoundTrip("unsigned_double", "123.456E10", DOUBLE, "DOUBLE '123.456E10'")
.addRoundTrip("unsigned_double", "1.7976931348623158E308", DOUBLE, "DOUBLE '1.7976931348623158E308'") // max value in Phoenix
.addRoundTrip("unsigned_double", "NULL", DOUBLE, "CAST(NULL AS DOUBLE)")
.addRoundTrip("integer primary key", "1", INTEGER, "1")
.execute(getQueryRunner(), phoenixCreateAndInsert("tpch.test_unsigned_double"));
}
@Test
public void testVarchar()
{
SqlDataTypeTest.create()
.addRoundTrip("varchar(10)", "'text_a'", createVarcharType(10), "CAST('text_a' AS VARCHAR(10))")
.addRoundTrip("varchar(255)", "'text_b'", createVarcharType(255), "CAST('text_b' AS VARCHAR(255))")
.addRoundTrip("varchar(65535)", "'text_d'", createVarcharType(65535), "CAST('text_d' AS VARCHAR(65535))")
.addRoundTrip("varchar(10485760)", "'text_f'", createVarcharType(10485760), "CAST('text_f' AS VARCHAR(10485760))")
.addRoundTrip("varchar", "'unbounded'", VARCHAR, "VARCHAR 'unbounded'")
.addRoundTrip("varchar(10)", "NULL", createVarcharType(10), "CAST(NULL AS VARCHAR(10))")
.addRoundTrip("varchar", "NULL", VARCHAR, "CAST(NULL AS VARCHAR)")
.execute(getQueryRunner(), trinoCreateAsSelect("test_varchar"))
.addRoundTrip("integer primary key", "1", INTEGER, "1")
.execute(getQueryRunner(), phoenixCreateAndInsert("tpch.test_varchar"));
}
@Test
public void testChar()
{
SqlDataTypeTest.create()
.addRoundTrip("char(10)", "'text_a'", createCharType(10), "CAST('text_a' AS CHAR(10))")
.addRoundTrip("char(255)", "'text_b'", createCharType(255), "CAST('text_b' AS CHAR(255))")
.addRoundTrip("char(65536)", "'text_e'", createCharType(CharType.MAX_LENGTH), "CAST('text_e' AS CHAR(65536))")
.addRoundTrip("char(10)", "NULL", createCharType(10), "CAST(NULL AS CHAR(10))")
.execute(getQueryRunner(), trinoCreateAsSelect("test_char"))
.addRoundTrip("integer primary key", "1", INTEGER, "1")
.execute(getQueryRunner(), phoenixCreateAndInsert("tpch.test_char"));
}
@Test
public void testBinary()
{
// Not testing max length (2147483647) because it leads to 'Requested array size exceeds VM limit'
SqlDataTypeTest.create()
.addRoundTrip("binary(1)", "NULL", VARBINARY, "X'00'") // NULL stored as zeros
.addRoundTrip("binary(10)", "DECODE('', 'HEX')", VARBINARY, "X'00000000000000000000'") // empty stored as zeros
.addRoundTrip("binary(5)", "DECODE('68656C6C6F', 'HEX')", VARBINARY, "to_utf8('hello')")
.addRoundTrip("binary(26)", "DECODE('5069C4996B6E6120C582C4856B61207720E69DB1E4BAACE983BD', 'HEX')", VARBINARY, "to_utf8('Piękna łąka w 東京都')")
.addRoundTrip("binary(16)", "DECODE('4261672066756C6C206F6620F09F92B0', 'HEX')", VARBINARY, "to_utf8('Bag full of 💰')")
.addRoundTrip("binary(17)", "DECODE('0001020304050607080DF9367AA7000000', 'HEX')", VARBINARY, "X'0001020304050607080DF9367AA7000000'") // non-text
.addRoundTrip("binary(6)", "DECODE('000000000000', 'HEX')", VARBINARY, "X'000000000000'")
.addRoundTrip("integer primary key", "1", INTEGER, "1")
.execute(getQueryRunner(), phoenixCreateAndInsert("tpch.test_binary"));
// Verify 'IS NULL' doesn't get rows where the value is X'00...' padded in Phoenix
try (TestTable table = new TestTable(new PhoenixSqlExecutor(phoenixServer.getJdbcUrl()), "tpch.test_binary", "(null_binary binary(1), empty_binary binary(10), pk integer primary key)", ImmutableList.of("NULL, DECODE('', 'HEX'), 1"))) {
assertQueryReturnsEmptyResult(format("SELECT * FROM %s WHERE null_binary IS NULL", table.getName()));
assertQueryReturnsEmptyResult(format("SELECT * FROM %s WHERE empty_binary IS NULL", table.getName()));
}
}
@Test
public void testVarbinary()
{
SqlDataTypeTest.create()
.addRoundTrip("varbinary", "NULL", VARBINARY, "CAST(NULL AS varbinary)")
.addRoundTrip("varbinary", "X''", VARBINARY, "CAST(NULL AS varbinary)") // empty stored as NULL
.addRoundTrip("varbinary", "X'68656C6C6F'", VARBINARY, "to_utf8('hello')")
.addRoundTrip("varbinary", "X'5069C4996B6E6120C582C4856B61207720E69DB1E4BAACE983BD'", VARBINARY, "to_utf8('Piękna łąka w 東京都')")
.addRoundTrip("varbinary", "X'4261672066756C6C206F6620F09F92B0'", VARBINARY, "to_utf8('Bag full of 💰')")
.addRoundTrip("varbinary", "X'0001020304050607080DF9367AA7000000'", VARBINARY, "X'0001020304050607080DF9367AA7000000'") // non-text
.addRoundTrip("varbinary", "X'000000000000'", VARBINARY, "X'000000000000'")
.execute(getQueryRunner(), trinoCreateAsSelect("test_varbinary"));
SqlDataTypeTest.create()
.addRoundTrip("integer primary key", "1", INTEGER, "1")
.addRoundTrip("varbinary", "NULL", VARBINARY, "CAST(NULL AS varbinary)")
.addRoundTrip("varbinary", "DECODE('', 'HEX')", VARBINARY, "CAST(NULL AS varbinary)") // empty stored as NULL
.addRoundTrip("varbinary", "DECODE('68656C6C6F', 'HEX')", VARBINARY, "to_utf8('hello')")
.addRoundTrip("varbinary", "DECODE('5069C4996B6E6120C582C4856B61207720E69DB1E4BAACE983BD', 'HEX')", VARBINARY, "to_utf8('Piękna łąka w 東京都')")
.addRoundTrip("varbinary", "DECODE('4261672066756C6C206F6620F09F92B0', 'HEX')", VARBINARY, "to_utf8('Bag full of 💰')")
.addRoundTrip("varbinary", "DECODE('0001020304050607080DF9367AA7000000', 'HEX')", VARBINARY, "X'0001020304050607080DF9367AA7000000'") // non-text
.addRoundTrip("varbinary", "DECODE('000000000000', 'HEX')", VARBINARY, "X'000000000000'")
.execute(getQueryRunner(), phoenixCreateAndInsert("tpch.test_varbinary"));
}
@Test
public void testDecimal()
{
SqlDataTypeTest.create()
.addRoundTrip("decimal(3, 0)", "CAST('193' AS decimal(3, 0))", createDecimalType(3, 0), "CAST('193' AS decimal(3, 0))")
.addRoundTrip("decimal(3, 0)", "CAST('19' AS decimal(3, 0))", createDecimalType(3, 0), "CAST('19' AS decimal(3, 0))")
.addRoundTrip("decimal(3, 0)", "CAST('-193' AS decimal(3, 0))", createDecimalType(3, 0), "CAST('-193' AS decimal(3, 0))")
.addRoundTrip("decimal(3, 1)", "CAST('10.0' AS decimal(3, 1))", createDecimalType(3, 1), "CAST('10.0' AS decimal(3, 1))")
.addRoundTrip("decimal(3, 1)", "CAST('10.1' AS decimal(3, 1))", createDecimalType(3, 1), "CAST('10.1' AS decimal(3, 1))")
.addRoundTrip("decimal(3, 1)", "CAST('-10.1' AS decimal(3, 1))", createDecimalType(3, 1), "CAST('-10.1' AS decimal(3, 1))")
.addRoundTrip("decimal(3, 2)", "CAST('3.14' AS decimal(3, 2))", createDecimalType(3, 2), "CAST('3.14' AS decimal(3, 2))")
.addRoundTrip("decimal(4, 2)", "CAST('2' AS decimal(4, 2))", createDecimalType(4, 2), "CAST('2' AS decimal(4, 2))")
.addRoundTrip("decimal(4, 2)", "CAST('2.3' AS decimal(4, 2))", createDecimalType(4, 2), "CAST('2.3' AS decimal(4, 2))")
.addRoundTrip("decimal(24, 2)", "CAST('2' AS decimal(24, 2))", createDecimalType(24, 2), "CAST('2' AS decimal(24, 2))")
.addRoundTrip("decimal(24, 2)", "CAST('2.3' AS decimal(24, 2))", createDecimalType(24, 2), "CAST('2.3' AS decimal(24, 2))")
.addRoundTrip("decimal(24, 2)", "CAST('123456789.3' AS decimal(24, 2))", createDecimalType(24, 2), "CAST('123456789.3' AS decimal(24, 2))")
.addRoundTrip("decimal(24, 4)", "CAST('12345678901234567890.31' AS decimal(24, 4))", createDecimalType(24, 4), "CAST('12345678901234567890.31' AS decimal(24, 4))")
.addRoundTrip("decimal(24, 23)", "CAST('3.12345678901234567890123' AS decimal(24, 23))", createDecimalType(24, 23), "CAST('3.12345678901234567890123' AS decimal(24, 23))")
.addRoundTrip("decimal(30, 5)", "CAST('3141592653589793238462643.38327' AS decimal(30, 5))", createDecimalType(30, 5), "CAST('3141592653589793238462643.38327' AS decimal(30, 5))")
.addRoundTrip("decimal(30, 5)", "CAST('-3141592653589793238462643.38327' AS decimal(30, 5))", createDecimalType(30, 5), "CAST('-3141592653589793238462643.38327' AS decimal(30, 5))")
.addRoundTrip("decimal(38, 0)", "CAST('27182818284590452353602874713526624977' AS decimal(38, 0))", createDecimalType(38, 0), "CAST('27182818284590452353602874713526624977' AS decimal(38, 0))")
.addRoundTrip("decimal(38, 0)", "CAST('-27182818284590452353602874713526624977' AS decimal(38, 0))", createDecimalType(38, 0), "CAST('-27182818284590452353602874713526624977' AS decimal(38, 0))")
.addRoundTrip("decimal(3, 0)", "NULL", createDecimalType(3, 0), "CAST(NULL AS decimal(3, 0))")
.addRoundTrip("decimal(38, 0)", "CAST(NULL AS decimal(38, 0))", createDecimalType(38, 0), "CAST(NULL AS decimal(38, 0))")
.execute(getQueryRunner(), trinoCreateAsSelect("test_decimal"));
SqlDataTypeTest.create()
.addRoundTrip("decimal(3, 0)", "CAST(193 AS decimal(3, 0))", createDecimalType(3, 0), "CAST('193' AS decimal(3, 0))")
.addRoundTrip("decimal(3, 0)", "CAST(19 AS decimal(3, 0))", createDecimalType(3, 0), "CAST('19' AS decimal(3, 0))")
.addRoundTrip("decimal(3, 0)", "CAST(-193 AS decimal(3, 0))", createDecimalType(3, 0), "CAST('-193' AS decimal(3, 0))")
.addRoundTrip("decimal(3, 1)", "CAST(10.0 AS decimal(3, 1))", createDecimalType(3, 1), "CAST('10.0' AS decimal(3, 1))")
.addRoundTrip("decimal(3, 1)", "CAST(10.1 AS decimal(3, 1))", createDecimalType(3, 1), "CAST('10.1' AS decimal(3, 1))")
.addRoundTrip("decimal(3, 1)", "CAST(-10.1 AS decimal(3, 1))", createDecimalType(3, 1), "CAST('-10.1' AS decimal(3, 1))")
.addRoundTrip("decimal(3, 2)", "CAST(3.14 AS decimal(3, 2))", createDecimalType(3, 2), "CAST('3.14' AS decimal(3, 2))")
.addRoundTrip("decimal(4, 2)", "CAST(2 AS decimal(4, 2))", createDecimalType(4, 2), "CAST('2' AS decimal(4, 2))")
.addRoundTrip("decimal(4, 2)", "CAST(2.3 AS decimal(4, 2))", createDecimalType(4, 2), "CAST('2.3' AS decimal(4, 2))")
.addRoundTrip("decimal(24, 2)", "CAST(2 AS decimal(24, 2))", createDecimalType(24, 2), "CAST('2' AS decimal(24, 2))")
.addRoundTrip("decimal(24, 2)", "CAST(2.3 AS decimal(24, 2))", createDecimalType(24, 2), "CAST('2.3' AS decimal(24, 2))")
.addRoundTrip("decimal(24, 2)", "CAST(123456789.3 AS decimal(24, 2))", createDecimalType(24, 2), "CAST('123456789.3' AS decimal(24, 2))")
.addRoundTrip("decimal(24, 4)", "CAST(12345678901234567890.31 AS decimal(24, 4))", createDecimalType(24, 4), "CAST('12345678901234567890.31' AS decimal(24, 4))")
.addRoundTrip("decimal(24, 23)", "CAST(3.12345678901234567890123 AS decimal(24, 23))", createDecimalType(24, 23), "CAST('3.12345678901234567890123' AS decimal(24, 23))")
.addRoundTrip("decimal(30, 5)", "CAST(3141592653589793238462643.38327 AS decimal(30, 5))", createDecimalType(30, 5), "CAST('3141592653589793238462643.38327' AS decimal(30, 5))")
.addRoundTrip("decimal(30, 5)", "CAST(-3141592653589793238462643.38327 AS decimal(30, 5))", createDecimalType(30, 5), "CAST('-3141592653589793238462643.38327' AS decimal(30, 5))")
.addRoundTrip("decimal(38, 0)", "CAST(27182818284590452353602874713526624977 AS decimal(38, 0))", createDecimalType(38, 0), "CAST('27182818284590452353602874713526624977' AS decimal(38, 0))")
.addRoundTrip("decimal(38, 0)", "CAST(-27182818284590452353602874713526624977 AS decimal(38, 0))", createDecimalType(38, 0), "CAST('-27182818284590452353602874713526624977' AS decimal(38, 0))")
.addRoundTrip("decimal(3, 0)", "CAST(NULL AS decimal(3, 0))", createDecimalType(3, 0), "CAST(NULL AS decimal(3, 0))")
.addRoundTrip("decimal(38, 0)", "CAST(NULL AS decimal(38, 0))", createDecimalType(38, 0), "CAST(NULL AS decimal(38, 0))")
.addRoundTrip("integer primary key", "1", INTEGER, "1")
.execute(getQueryRunner(), phoenixCreateAndInsert("tpch.test_decimal"));
}
@Test
public void testDecimalUnspecifiedPrecision()
{
PhoenixSqlExecutor phoenixSqlExecutor = new PhoenixSqlExecutor(phoenixServer.getJdbcUrl());
try (TestTable testTable = new TestTable(
phoenixSqlExecutor,
"tpch.test_var_decimal",
"(pk bigint primary key, d_col decimal)",
asList("1, 1.12", "2, 123456.789", "3, -1.12", "4, -123456.789"))) {
assertQueryFails(
sessionWithDecimalMappingAllowOverflow(UNNECESSARY, 0),
"SELECT d_col FROM " + testTable.getName(),
"Rounding necessary");
assertQuery(
sessionWithDecimalMappingAllowOverflow(HALF_UP, 0),
"SELECT d_col FROM " + testTable.getName(),
"VALUES (1), (123457), (-1), (-123457)");
assertQueryFails(
sessionWithDecimalMappingAllowOverflow(UNNECESSARY, 1),
"SELECT d_col FROM " + testTable.getName(),
"Rounding necessary");
assertQuery(
sessionWithDecimalMappingAllowOverflow(HALF_UP, 1),
"SELECT d_col FROM " + testTable.getName(),
"VALUES (1.1), (123456.8), (-1.1), (-123456.8)");
assertQueryFails(
sessionWithDecimalMappingAllowOverflow(UNNECESSARY, 2),
"SELECT d_col FROM " + testTable.getName(),
"Rounding necessary");
assertQuery(
sessionWithDecimalMappingAllowOverflow(HALF_UP, 2),
"SELECT d_col FROM " + testTable.getName(),
"VALUES (1.12), (123456.79), (-1.12), (-123456.79)");
assertQuery(
sessionWithDecimalMappingAllowOverflow(UNNECESSARY, 3),
"SELECT d_col FROM " + testTable.getName(),
"VALUES (1.12), (123456.789), (-1.12), (-123456.789)");
assertQueryFails(
sessionWithDecimalMappingStrict(CONVERT_TO_VARCHAR),
"SELECT d_col FROM " + testTable.getName(),
"Rounding necessary");
}
}
@Test(dataProvider = "sessionZonesDataProvider")
public void testDate(ZoneId sessionZone)
{
Session session = Session.builder(getSession())
.setTimeZoneKey(getTimeZoneKey(sessionZone.getId()))
.build();
SqlDataTypeTest.create()
.addRoundTrip("date", "DATE '-5877641-06-23'", DATE, "DATE '-5877641-06-23'") // min value in Trino
.addRoundTrip("date", "DATE '-0001-01-01'", DATE, "DATE '-0001-01-01'")
.addRoundTrip("date", "DATE '0001-01-01'", DATE, "DATE '0001-01-01'")
.addRoundTrip("date", "DATE '1582-10-04'", DATE, "DATE '1582-10-04'")
.addRoundTrip("date", "DATE '1582-10-05'", DATE, "DATE '1582-10-15'") // begin julian->gregorian switch
.addRoundTrip("date", "DATE '1582-10-14'", DATE, "DATE '1582-10-24'") // end julian->gregorian switch
.addRoundTrip("date", "DATE '1582-10-15'", DATE, "DATE '1582-10-15'")
.addRoundTrip("date", "DATE '1899-12-31'", DATE, "DATE '1899-12-31'")
.addRoundTrip("date", "DATE '1900-01-01'", DATE, "DATE '1900-01-01'")
.addRoundTrip("date", "DATE '1952-04-04'", DATE, "DATE '1952-04-04'") // before epoch
.addRoundTrip("date", "DATE '1970-01-01'", DATE, "DATE '1970-01-01'")
.addRoundTrip("date", "DATE '1970-02-03'", DATE, "DATE '1970-02-03'")
.addRoundTrip("date", "DATE '2017-07-01'", DATE, "DATE '2017-07-01'") // summer on northern hemisphere (possible DST)
.addRoundTrip("date", "DATE '2017-01-01'", DATE, "DATE '2017-01-01'") // winter on northern hemisphere (possible DST on southern hemisphere)
.addRoundTrip("date", "DATE '1983-04-01'", DATE, "DATE '1983-04-01'")
.addRoundTrip("date", "DATE '1983-10-01'", DATE, "DATE '1983-10-01'")
.addRoundTrip("date", "DATE '9999-12-31'", DATE, "DATE '9999-12-31'")
.addRoundTrip("date", "DATE '5881580-07-11'", DATE, "DATE '5881580-07-11'") // max value in Trino
.addRoundTrip("date", "NULL", DATE, "CAST(NULL AS DATE)")
.execute(getQueryRunner(), session, trinoCreateAsSelect(session, "test_date"))
.execute(getQueryRunner(), session, trinoCreateAsSelect(getSession(), "test_date"))
.execute(getQueryRunner(), session, trinoCreateAndInsert(session, "test_date"));
SqlDataTypeTest.create()
.addRoundTrip("date", "TO_DATE('5877642-06-23 BC', 'yyyy-MM-dd G', 'local')", DATE, "DATE '-5877641-06-23'") // min value in Trino
.addRoundTrip("date", "TO_DATE('0002-01-01 BC', 'yyyy-MM-dd G', 'local')", DATE, "DATE '-0001-01-01'")
.addRoundTrip("date", "TO_DATE('0001-01-01', 'yyyy-MM-dd', 'local')", DATE, "DATE '0001-01-01'")
.addRoundTrip("date", "TO_DATE('1582-10-04', 'yyyy-MM-dd', 'local')", DATE, "DATE '1582-10-04'")
.addRoundTrip("date", "TO_DATE('1582-10-05', 'yyyy-MM-dd', 'local')", DATE, "DATE '1582-10-15'") // begin julian->gregorian switch
.addRoundTrip("date", "TO_DATE('1582-10-14', 'yyyy-MM-dd', 'local')", DATE, "DATE '1582-10-24'") // end julian->gregorian switch
.addRoundTrip("date", "TO_DATE('1582-10-15', 'yyyy-MM-dd', 'local')", DATE, "DATE '1582-10-15'")
.addRoundTrip("date", "TO_DATE('1899-12-31', 'yyyy-MM-dd', 'local')", DATE, "DATE '1899-12-31'")
.addRoundTrip("date", "TO_DATE('1900-01-01', 'yyyy-MM-dd', 'local')", DATE, "DATE '1900-01-01'")
.addRoundTrip("date", "TO_DATE('1952-04-04', 'yyyy-MM-dd', 'local')", DATE, "DATE '1952-04-04'") // before epoch
.addRoundTrip("date", "TO_DATE('1970-01-01', 'yyyy-MM-dd', 'local')", DATE, "DATE '1970-01-01'")
.addRoundTrip("date", "TO_DATE('1970-02-03', 'yyyy-MM-dd', 'local')", DATE, "DATE '1970-02-03'")
.addRoundTrip("date", "TO_DATE('2017-07-01', 'yyyy-MM-dd', 'local')", DATE, "DATE '2017-07-01'") // summer on northern hemisphere (possible DST)
.addRoundTrip("date", "TO_DATE('2017-01-01', 'yyyy-MM-dd', 'local')", DATE, "DATE '2017-01-01'") // winter on northern hemisphere (possible DST on southern hemisphere)
.addRoundTrip("date", "TO_DATE('1983-04-01', 'yyyy-MM-dd', 'local')", DATE, "DATE '1983-04-01'")
.addRoundTrip("date", "TO_DATE('1983-10-01', 'yyyy-MM-dd', 'local')", DATE, "DATE '1983-10-01'")
.addRoundTrip("date", "TO_DATE('9999-12-31', 'yyyy-MM-dd', 'local')", DATE, "DATE '9999-12-31'")
.addRoundTrip("date", "TO_DATE('5881580-07-11', 'yyyy-MM-dd', 'local')", DATE, "DATE '5881580-07-11'") // max value in Trino
.addRoundTrip("date", "NULL", DATE, "CAST(NULL AS DATE)")
.addRoundTrip("integer primary key", "1", INTEGER, "1")
.execute(getQueryRunner(), session, phoenixCreateAndInsert("tpch.test_date"));
}
@Test(dataProvider = "sessionZonesDataProvider")
public void testUnsignedDate(ZoneId sessionZone)
{
Session session = Session.builder(getSession())
.setTimeZoneKey(getTimeZoneKey(sessionZone.getId()))
.build();
SqlDataTypeTest.create()
.addRoundTrip("unsigned_date", "TO_DATE('1970-01-01', 'yyyy-MM-dd', 'local')", DATE, "DATE '1970-01-01'") // min value in Phoenix
.addRoundTrip("unsigned_date", "TO_DATE('1970-02-03', 'yyyy-MM-dd', 'local')", DATE, "DATE '1970-02-03'")
.addRoundTrip("unsigned_date", "TO_DATE('1983-04-01', 'yyyy-MM-dd', 'local')", DATE, "DATE '1983-04-01'")
.addRoundTrip("unsigned_date", "TO_DATE('1983-10-01', 'yyyy-MM-dd', 'local')", DATE, "DATE '1983-10-01'")
.addRoundTrip("unsigned_date", "TO_DATE('2017-07-01', 'yyyy-MM-dd', 'local')", DATE, "DATE '2017-07-01'") // summer on northern hemisphere (possible DST)
.addRoundTrip("unsigned_date", "TO_DATE('2017-01-01', 'yyyy-MM-dd', 'local')", DATE, "DATE '2017-01-01'") // winter on northern hemisphere (possible DST on southern hemisphere)
.addRoundTrip("unsigned_date", "TO_DATE('9999-12-31', 'yyyy-MM-dd', 'local')", DATE, "DATE '9999-12-31'")
.addRoundTrip("unsigned_date", "TO_DATE('5881580-07-11', 'yyyy-MM-dd', 'local')", DATE, "DATE '5881580-07-11'") // max value in Trino
.addRoundTrip("unsigned_date", "NULL", DATE, "CAST(NULL AS DATE)")
.addRoundTrip("integer primary key", "1", INTEGER, "1")
.execute(getQueryRunner(), session, phoenixCreateAndInsert("tpch.test_unsigned_date"));
}
@Test
public void testArray()
{
// basic types
SqlDataTypeTest.create()
.addRoundTrip("ARRAY(boolean)", "ARRAY[true, false]", new ArrayType(BOOLEAN), "ARRAY[true, false]")
.addRoundTrip("ARRAY(bigint)", "ARRAY[123456789012]", new ArrayType(BIGINT), "ARRAY[123456789012]")
.addRoundTrip("ARRAY(integer)", "ARRAY[1, 2, 1234567890]", new ArrayType(INTEGER), "ARRAY[1, 2, 1234567890]")
.addRoundTrip("ARRAY(smallint)", "ARRAY[32456]", new ArrayType(SMALLINT), "ARRAY[SMALLINT '32456']")
.addRoundTrip("ARRAY(double)", "ARRAY[123.45]", new ArrayType(DOUBLE), "ARRAY[DOUBLE '123.45']")
.addRoundTrip("ARRAY(real)", "ARRAY[123.45]", new ArrayType(REAL), "ARRAY[REAL '123.45']")
.execute(getQueryRunner(), trinoCreateAsSelect("test_array_basic"));
SqlDataTypeTest.create()
.addRoundTrip("ARRAY(date)", "ARRAY[DATE '1952-04-03']", new ArrayType(DATE), "ARRAY[DATE '1952-04-03']") // before epoch
.addRoundTrip("ARRAY(date)", "ARRAY[DATE '1970-01-01']", new ArrayType(DATE), "ARRAY[DATE '1970-01-01']")
.addRoundTrip("ARRAY(date)", "ARRAY[DATE '1970-02-03']", new ArrayType(DATE), "ARRAY[DATE '1970-02-03']")
.addRoundTrip("ARRAY(date)", "ARRAY[DATE '2017-07-01']", new ArrayType(DATE), "ARRAY[DATE '2017-07-01']") // summer on northern hemisphere (possible DST)
.addRoundTrip("ARRAY(date)", "ARRAY[DATE '2017-01-01']", new ArrayType(DATE), "ARRAY[DATE '2017-01-01']") // winter on northern hemisphere (possible DST on southern hemisphere)
.addRoundTrip("ARRAY(date)", "ARRAY[DATE '1983-04-01']", new ArrayType(DATE), "ARRAY[DATE '1983-04-01']")
.addRoundTrip("ARRAY(date)", "ARRAY[DATE '1983-10-01']", new ArrayType(DATE), "ARRAY[DATE '1983-10-01']")
.execute(getQueryRunner(), trinoCreateAsSelect("test_array_date"));
SqlDataTypeTest.create()
.addRoundTrip("date ARRAY", "ARRAY[TO_DATE('1952-04-03', 'yyyy-MM-dd', 'local')]", new ArrayType(DATE), "ARRAY[DATE '1952-04-03']") // before epoch
.addRoundTrip("date ARRAY", "ARRAY[TO_DATE('1970-01-01', 'yyyy-MM-dd', 'local')]", new ArrayType(DATE), "ARRAY[DATE '1970-01-01']")
.addRoundTrip("date ARRAY", "ARRAY[TO_DATE('1970-02-03', 'yyyy-MM-dd', 'local')]", new ArrayType(DATE), "ARRAY[DATE '1970-02-03']")
.addRoundTrip("date ARRAY", "ARRAY[TO_DATE('2017-07-01', 'yyyy-MM-dd', 'local')]", new ArrayType(DATE), "ARRAY[DATE '2017-07-01']") // summer on northern hemisphere (possible DST)
.addRoundTrip("date ARRAY", "ARRAY[TO_DATE('2017-01-01', 'yyyy-MM-dd', 'local')]", new ArrayType(DATE), "ARRAY[DATE '2017-01-01']") // winter on northern hemisphere (possible DST on southern hemisphere)
.addRoundTrip("date ARRAY", "ARRAY[TO_DATE('1983-04-01', 'yyyy-MM-dd', 'local')]", new ArrayType(DATE), "ARRAY[DATE '1983-04-01']")
.addRoundTrip("date ARRAY", "ARRAY[TO_DATE('1983-10-01', 'yyyy-MM-dd', 'local')]", new ArrayType(DATE), "ARRAY[DATE '1983-10-01']")
.addRoundTrip("integer primary key", "1", INTEGER, "1")
.execute(getQueryRunner(), phoenixCreateAndInsert("tpch.test_array_date"));
SqlDataTypeTest.create()
.addRoundTrip("ARRAY(decimal(3, 0))", "ARRAY[CAST('193' AS decimal(3, 0)), CAST('19' AS decimal(3, 0)), CAST('-193' AS decimal(3, 0))]", new ArrayType(createDecimalType(3, 0)), "ARRAY[CAST('193' AS decimal(3, 0)), CAST('19' AS decimal(3, 0)), CAST('-193' AS decimal(3, 0))]")
.addRoundTrip("ARRAY(decimal(3, 1))", "ARRAY[CAST('10.0' AS decimal(3, 1)), CAST('10.1' AS decimal(3, 1)), CAST('-10.1' AS decimal(3, 1))]", new ArrayType(createDecimalType(3, 1)), "ARRAY[CAST('10.0' AS decimal(3, 1)), CAST('10.1' AS decimal(3, 1)), CAST('-10.1' AS decimal(3, 1))]")
.addRoundTrip("ARRAY(decimal(4, 2))", "ARRAY[CAST('2' AS decimal(4, 2)), CAST('2.3' AS decimal(4, 2))]", new ArrayType(createDecimalType(4, 2)), "ARRAY[CAST('2' AS decimal(4, 2)), CAST('2.3' AS decimal(4, 2))]")
.addRoundTrip("ARRAY(decimal(24, 2))", "ARRAY[CAST('2' AS decimal(24, 2)), CAST('2.3' AS decimal(24, 2)), CAST('123456789.3' AS decimal(24, 2))]", new ArrayType(createDecimalType(24, 2)), "ARRAY[CAST('2' AS decimal(24, 2)), CAST('2.3' AS decimal(24, 2)), CAST('123456789.3' AS decimal(24, 2))]")
.addRoundTrip("ARRAY(decimal(24, 4))", "ARRAY[CAST('12345678901234567890.31' AS decimal(24, 4))]", new ArrayType(createDecimalType(24, 4)), "ARRAY[CAST('12345678901234567890.31' AS decimal(24, 4))]")
.addRoundTrip("ARRAY(decimal(30, 5))", "ARRAY[CAST('3141592653589793238462643.38327' AS decimal(30, 5)), CAST('-3141592653589793238462643.38327' AS decimal(30, 5))]", new ArrayType(createDecimalType(30, 5)), "ARRAY[CAST('3141592653589793238462643.38327' AS decimal(30, 5)), CAST('-3141592653589793238462643.38327' AS decimal(30, 5))]")
.addRoundTrip("ARRAY(decimal(38, 0))", "ARRAY[CAST('27182818284590452353602874713526624977' AS decimal(38, 0)), CAST('-27182818284590452353602874713526624977' AS decimal(38, 0))]", new ArrayType(createDecimalType(38, 0)), "ARRAY[CAST('27182818284590452353602874713526624977' AS decimal(38, 0)), CAST('-27182818284590452353602874713526624977' AS decimal(38, 0))]")
.execute(getQueryRunner(), trinoCreateAsSelect("test_array_decimal"));
SqlDataTypeTest.create()
.addRoundTrip("decimal(3, 0) ARRAY", "ARRAY[CAST(193 AS decimal(3, 0)), CAST(19 AS decimal(3, 0)), CAST(-193 AS decimal(3, 0))]", new ArrayType(createDecimalType(3, 0)), "ARRAY[CAST(193 AS decimal(3, 0)), CAST(19 AS decimal(3, 0)), CAST(-193 AS decimal(3, 0))]")
.addRoundTrip("decimal(3, 1) ARRAY", "ARRAY[CAST(10.0 AS decimal(3, 1)), CAST(10.1 AS decimal(3, 1)), CAST(-10.1 AS decimal(3, 1))]", new ArrayType(createDecimalType(3, 1)), "ARRAY[CAST(10.0 AS decimal(3, 1)), CAST(10.1 AS decimal(3, 1)), CAST(-10.1 AS decimal(3, 1))]")
.addRoundTrip("decimal(4, 2) ARRAY", "ARRAY[CAST(2 AS decimal(4, 2)), CAST(2.3 AS decimal(4, 2))]", new ArrayType(createDecimalType(4, 2)), "ARRAY[CAST(2 AS decimal(4, 2)), CAST(2.3 AS decimal(4, 2))]")
.addRoundTrip("decimal(24, 2) ARRAY", "ARRAY[CAST(2 AS decimal(24, 2)), CAST(2.3 AS decimal(24, 2)), CAST(123456789.3 AS decimal(24, 2))]", new ArrayType(createDecimalType(24, 2)), "ARRAY[CAST(2 AS decimal(24, 2)), CAST(2.3 AS decimal(24, 2)), CAST(123456789.3 AS decimal(24, 2))]")
.addRoundTrip("decimal(24, 4) ARRAY", "ARRAY[CAST(12345678901234567890.31 AS decimal(24, 4))]", new ArrayType(createDecimalType(24, 4)), "ARRAY[CAST(12345678901234567890.31 AS decimal(24, 4))]")
.addRoundTrip("decimal(30, 5) ARRAY", "ARRAY[CAST(3141592653589793238462643.38327 AS decimal(30, 5)), CAST(-3141592653589793238462643.38327 AS decimal(30, 5))]", new ArrayType(createDecimalType(30, 5)), "ARRAY[CAST(3141592653589793238462643.38327 AS decimal(30, 5)), CAST(-3141592653589793238462643.38327 AS decimal(30, 5))]")
.addRoundTrip("decimal(38, 0) ARRAY", "ARRAY[CAST(27182818284590452353602874713526624977 AS decimal(38, 0)), CAST(-27182818284590452353602874713526624977 AS decimal(38, 0))]", new ArrayType(createDecimalType(38, 0)), "ARRAY[CAST('27182818284590452353602874713526624977' AS decimal(38, 0)), CAST('-27182818284590452353602874713526624977' AS decimal(38, 0))]")
.addRoundTrip("integer primary key", "1", INTEGER, "1")
.execute(getQueryRunner(), phoenixCreateAndInsert("tpch.test_array_decimal"));
SqlDataTypeTest.create()
.addRoundTrip("ARRAY(char(10))", "ARRAY['text_a']", new ArrayType(createCharType(10)), "ARRAY[CAST('text_a' AS char(10))]")
.addRoundTrip("ARRAY(char(255))", "ARRAY['text_b']", new ArrayType(createCharType(255)), "ARRAY[CAST('text_b' AS char(255))]")
.addRoundTrip("ARRAY(char(65535))", "ARRAY['text_d']", new ArrayType(createCharType(65535)), "ARRAY[CAST('text_d' AS char(65535))]")
.execute(getQueryRunner(), trinoCreateAsSelect("test_array_char"));
SqlDataTypeTest.create()
.addRoundTrip("char(10) ARRAY", "ARRAY['text_a']", new ArrayType(createCharType(10)), "ARRAY[CAST('text_a' AS char(10))]")
.addRoundTrip("char(255) ARRAY", "ARRAY['text_b']", new ArrayType(createCharType(255)), "ARRAY[CAST('text_b' AS char(255))]")
.addRoundTrip("char(65535) ARRAY", "ARRAY['text_d']", new ArrayType(createCharType(65535)), "ARRAY[CAST('text_d' AS char(65535))]")
.addRoundTrip("integer primary key", "1", INTEGER, "1")
.execute(getQueryRunner(), phoenixCreateAndInsert("tpch.test_array_char"));
SqlDataTypeTest.create()
.addRoundTrip("ARRAY(varchar(10))", "ARRAY['text_a']", new ArrayType(createVarcharType(10)), "ARRAY[CAST('text_a' AS varchar(10))]")
.addRoundTrip("ARRAY(varchar(255))", "ARRAY['text_b']", new ArrayType(createVarcharType(255)), "ARRAY[CAST('text_b' AS varchar(255))]")
.addRoundTrip("ARRAY(varchar(65535))", "ARRAY['text_d']", new ArrayType(createVarcharType(65535)), "ARRAY[CAST('text_d' AS varchar(65535))]")
.addRoundTrip("ARRAY(varchar(10485760))", "ARRAY['text_f']", new ArrayType(createVarcharType(10485760)), "ARRAY[CAST('text_f' AS varchar(10485760))]")
.addRoundTrip("ARRAY(varchar)", "ARRAY['unbounded']", new ArrayType(VARCHAR), "ARRAY[CAST('unbounded' AS varchar)]")
.execute(getQueryRunner(), trinoCreateAsSelect("test_array_varchar"));
SqlDataTypeTest.create()
.addRoundTrip("varchar(10) ARRAY", "ARRAY['text_a']", new ArrayType(createVarcharType(10)), "ARRAY[CAST('text_a' AS varchar(10))]")
.addRoundTrip("varchar(255) ARRAY", "ARRAY['text_b']", new ArrayType(createVarcharType(255)), "ARRAY[CAST('text_b' AS varchar(255))]")
.addRoundTrip("varchar(65535) ARRAY", "ARRAY['text_d']", new ArrayType(createVarcharType(65535)), "ARRAY[CAST('text_d' AS varchar(65535))]")
.addRoundTrip("varchar(10485760) ARRAY", "ARRAY['text_f']", new ArrayType(createVarcharType(10485760)), "ARRAY[CAST('text_f' AS varchar(10485760))]")
.addRoundTrip("varchar ARRAY", "ARRAY['unbounded']", new ArrayType(VARCHAR), "ARRAY[CAST('unbounded' AS varchar)]")
.addRoundTrip("integer primary key", "1", INTEGER, "1")
.execute(getQueryRunner(), phoenixCreateAndInsert("tpch.test_array_varchar"));
}
@Test
public void testArrayNulls()
{
// Verify only SELECT instead of using SqlDataTypeTest because array comparison not supported for arrays with null elements
try (TestTable table = new TestTable(getQueryRunner()::execute, "test_array_nulls", "(c1 ARRAY(boolean), c2 ARRAY(varchar), c3 ARRAY(varchar))", ImmutableList.of("(NULL, ARRAY[NULL], ARRAY['foo', NULL, 'bar', NULL])"))) {
assertThat(query("SELECT c1 FROM " + table.getName())).matches("VALUES CAST(NULL AS ARRAY(boolean))");
assertThat(query("SELECT c2 FROM " + table.getName())).matches("VALUES CAST(ARRAY[NULL] AS ARRAY(varchar))");
assertThat(query("SELECT c3 FROM " + table.getName())).matches("VALUES CAST(ARRAY['foo', NULL, 'bar', NULL] AS ARRAY(varchar))");
}
}
@DataProvider
public Object[][] sessionZonesDataProvider()
{
return new Object[][] {
{UTC},
{jvmZone},
// using two non-JVM zones so that we don't need to worry what Phoenix system zone is
{vilnius},
{kathmandu},
{ZoneId.of(TestingSession.DEFAULT_TIME_ZONE_KEY.getId())},
};
}
private static void checkIsGap(ZoneId zone, LocalDateTime dateTime)
{
verify(isGap(zone, dateTime), "Expected %s to be a gap in %s", dateTime, zone);
}
private static boolean isGap(ZoneId zone, LocalDateTime dateTime)
{
return zone.getRules().getValidOffsets(dateTime).isEmpty();
}
private static void checkIsDoubled(ZoneId zone, LocalDateTime dateTime)
{
verify(zone.getRules().getValidOffsets(dateTime).size() == 2, "Expected %s to be doubled in %s", dateTime, zone);
}
private DataSetup trinoCreateAsSelect(String tableNamePrefix)
{
return trinoCreateAsSelect(getSession(), tableNamePrefix);
}
private DataSetup trinoCreateAsSelect(Session session, String tableNamePrefix)
{
return new CreateAsSelectDataSetup(new TrinoSqlExecutor(getQueryRunner(), session), tableNamePrefix);
}
private DataSetup trinoCreateAndInsert(Session session, String tableNamePrefix)
{
return new CreateAndInsertDataSetup(new TrinoSqlExecutor(getQueryRunner(), session), tableNamePrefix);
}
private DataSetup phoenixCreateAndInsert(String tableNamePrefix)
{
return new CreateAndInsertDataSetup(new PhoenixSqlExecutor(phoenixServer.getJdbcUrl()), tableNamePrefix);
}
private Session sessionWithDecimalMappingAllowOverflow(RoundingMode roundingMode, int scale)
{
return Session.builder(getSession())
.setCatalogSessionProperty("phoenix", DECIMAL_MAPPING, ALLOW_OVERFLOW.name())
.setCatalogSessionProperty("phoenix", DECIMAL_ROUNDING_MODE, roundingMode.name())
.setCatalogSessionProperty("phoenix", DECIMAL_DEFAULT_SCALE, Integer.valueOf(scale).toString())
.build();
}
private Session sessionWithDecimalMappingStrict(UnsupportedTypeHandling unsupportedTypeHandling)
{
return Session.builder(getSession())
.setCatalogSessionProperty("phoenix", DECIMAL_MAPPING, STRICT.name())
.setCatalogSessionProperty("phoenix", UNSUPPORTED_TYPE_HANDLING, unsupportedTypeHandling.name())
.build();
}
}
|
'''
Pytorch implementation for pre-activation ResNet.
Original paper: https://arxiv.org/abs/1603.05027
'''
import torch
import torch.nn as nn
from torch.autograd import Variable
__all__ = ['PreActResNet', 'preact_resnet18', 'preact_resnet34', 'preact_resnet50',
'preact_resnet101', 'preact_resnet152']
def preact_resnet18():
model = PreActResNet(BasicPreAct, [2, 2, 2, 2])
return model
def preact_resnet34():
model = PreActResNet(BasicPreAct, [3, 4, 6, 3])
return model
def preact_resnet50():
model = PreActResNet(FullPreAct, [3, 4, 6, 3])
return model
def preact_resnet101():
model = PreActResNet(FullPreAct, [3, 4, 23, 3])
return model
def preact_resnet152():
model = PreActResNet(FullPreAct, [3, 8, 36, 3])
return model
class BasicPreAct(nn.Module):
expansion = 1
def __init__(self, in_planes, planes, stride=1, shortcut=None):
super(BasicPreAct, self).__init__()
self.bn1 = nn.BatchNorm2d(in_planes)
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False)
self.relu = nn.ReLU(inplace=True)
self.stride = stride
self.shortcut = shortcut
def forward(self, x):
residual = x
out = self.bn1(x)
out = self.relu(out)
out = self.conv1(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv2(out)
if self.shortcut is not None:
residual = self.shortcut(x)
out += residual
return out
class FullPreAct(nn.Module):
expansion = 4
def __init__(self, in_planes, planes, stride=1, shortcut=None):
super(FullPreAct, self).__init__()
self.bn1 = nn.BatchNorm2d(in_planes)
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=1, stride=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn3 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(planes, planes * self.expansion, kernel_size=1, stride=1, bias=False)
self.relu = nn.ReLU(inplace=True)
self.stride = stride
self.shortcut = shortcut
def forward(self, x):
residual = x
out = self.bn1(x)
out = self.relu(out)
out = self.conv1(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn3(out)
out = self.relu(out)
out = self.conv3(out)
if self.shortcut is not None:
residual = self.shortcut(x)
out += residual
return out
class PreActResNet(nn.Module):
def __init__(self, block, layers, num_classes=10):
super(PreActResNet, self).__init__()
self.in_planes = 64
self.conv = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False)
self.layer1 = self._make_layer(block, 64, layers[0])
self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
self.layer4 = self._make_layer(block, 512, layers[3], stride=2)
self.avgpool = nn.AvgPool2d(4)
self.classifier = nn.Linear(512 * block.expansion, num_classes)
def _make_layer(self, block, planes, num_blocks, stride=1):
shortcut = None
if stride != 1 or self.in_planes != planes * block.expansion:
shortcut = nn.Sequential(
nn.Conv2d(self.in_planes, planes * block.expansion, kernel_size=1, stride=stride,
bias=False)
)
layers = []
layers.append(block(self.in_planes, planes, stride, shortcut))
self.in_planes = planes * block.expansion
for i in range(1, num_blocks):
layers.append(block(self.in_planes, planes))
return nn.Sequential(*layers)
def forward(self, x):
out = self.conv(x)
out = self.layer1(out)
out = self.layer2(out)
out = self.layer3(out)
out = self.layer4(out)
out = self.avgpool(out)
out = out.view(out.size(0), -1)
out = self.classifier(out)
return out
def test_cifar():
print('----- Testing cifar[PreactResNet] -----')
model = preact_resnet152()
# model = model.cuda()
print(model)
X = torch.randn(1, 3, 32, 32)
out = model(Variable(X))
print('out:', out)
print('out size:', out.size())
if __name__ == '__main__':
test_cifar()
|
<gh_stars>1-10
var transformApply = require('../../../alg/permutation/transform-apply');
var expect = require('chai').expect;
describe('Transform-apply', () => {
var T = (array, transforms, transformed) => expect(transformApply(array, transforms)).to.eql(transformed);
it('applies a transformation on an array', () => {
T(['a', 'b', 'c'], [], ['a', 'b', 'c']);
T(['a', 'b', 'c'], [[0, 1]], ['b', 'a', 'c']);
T(['a', 'b', 'c'], [[0, 2], [1, 2]], ['c', 'a', 'b']);
});
});
|
'''Dump the data from the database into CSV files.'''
import dbm
import csv
def main():
video_2_user_db = dbm.open('video_2_user.dbm', 'r')
video_2_server_db = dbm.open('video_2_server.dbm', 'r')
with open('video_2_user.csv', 'w', newline='') as video_2_user_file:
writer = csv.writer(video_2_user_file)
writer.writerow(['Video ID', 'Username'])
for video in sorted(video_2_user_db.keys()):
writer.writerow([video.decode(), video_2_user_db[video].decode()])
with open('video_2_server.csv', 'w', newline='') as video_2_server_file:
writer = csv.writer(video_2_server_file)
writer.writerow(['Video ID', 'Server Media URL'])
for video in sorted(video_2_server_db.keys()):
if video not in video_2_user_db:
print('Warning: ', video, 'not in database skipping.')
continue
writer.writerow([video.decode(), video_2_server_db[video].decode()])
if __name__ == '__main__':
main()
|
<filename>resources/assets/js/router/index.js
import Vue from 'vue'
import Router from 'vue-router'
Vue.use(Router)
export default new Router({
routes: [
{
path: '/',
component: require('../components/Home/HomeComponent.vue'),
name: 'home',
meta: { },
},
{
name: 'users.create',
path: '/register',
component: require('../components/Users/Create.vue'),
meta: { },
},
{
path: '/login',
name: 'login',
//meta: { requiresAuth: true },
meta: { },
component: require('../components/Login/Login.vue')
},
{ path: '/categories',
name: 'categories.index',
component: require('../components/Categorie/Index.vue')
},
{ path: '/categories/:slug',
name: 'categories.show',
props: true,
component: require('../components/Categorie/Show.vue')
},
{ path: '/packages',
name: 'packages.index',
component: require('../components/Package/Index.vue')
},
{
name: 'packages.show',
path: '/packages/:slug',
props: true,
component: require('../components/Package/Show.vue')
},
{
name: 'aboutus',
path: '/sobre',
props: true,
component: require('../components/AboutUs.vue')
},
{
name: 'contactus',
path: '/contactus',
props: true,
component: require('../components/ContactUs.vue')
},
{
name: 'cart',
path: '/cart',
props: true,
component: require('../components/Cart/index.vue')
},
{
name: 'cart.show_item',
path: '/cart/show_item/:id',
props: true,
component: require('../components/Cart/show_item.vue')
}
],
linkExactActiveClass: 'active'
})
|
<gh_stars>0
'use strict';
module.exports = function(app) {
var Validator = require('../models/shallow-validator');
var Marker = require('../models/markers');
function fetchMap(req, res) {
Marker.getAll(function(err, markers) {
if (err) {
res.send(err);
} else {
res.send({
code: 200,
message: 'Here are all the markers.',
markers: markers
});
}
});
}
function newMarker(req, res) {
var token = req.body.token;
var title = req.body.title;
var description = req.body.description;
var location = req.body.location;
var lat = parseFloat(req.body.lat, 10);
var lon = parseFloat(req.body.lon, 10);
var type = parseInt(req.body.type, 10);
var ttl = parseInt(req.body.ttl, 10);
if (Validator.sendError(res, Validator.token(token))) { return; }
if (Validator.sendError(res, Validator.title(title))) { return; }
if (Validator.sendError(res, Validator.description(description))) { return; }
if (Validator.sendError(res, Validator.location(location))) { return; }
if (Validator.sendError(res, Validator.coordinate(lat))) { return; }
if (Validator.sendError(res, Validator.coordinate(lon))) { return; }
if (Validator.sendError(res, Validator.type(type))) { return; }
if (Validator.sendError(res, Validator.ttl(ttl))) { return; }
Marker.create(token, title, description, location, type, lat, lon, ttl,
function (err, marker) {
if (err) {
res.send(err);
return;
}
res.send({
code : 200,
message : 'Marker successfully created',
marker : marker
});
}
);
}
function deleteMarker(req, res) {
var token = req.body.token;
if (Validator.sendError(res, Validator.token(token))) { return; }
// Any checks??
Marker.delete(function(err) {
if (err) {
res.send(err);
return;
}
res.send({
code : 200,
message : 'Marker successfully deleted'
});
});
}
// What is the structure of this method?
//
app.get('/v1/map', fetchMap);
app.post('/v1/map/marker', newMarker);
// Not in the API specs
// app.delete('/v1/map/marker', deleteMarker);
};
|
package com.cgfy.oauth.bussApi.controller;
import com.cgfy.oauth.base.bean.AjaxResponse;
import com.cgfy.oauth.bussApi.feign.UserFeignClient;
import com.cgfy.oauth.bussApi.feign.bean.UserInfoOutputBean;
import com.cgfy.oauth.base.config.AuthLoginLimitProperties;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import java.security.Principal;
/**
* 验证账户
*/
@Api(tags = "验证账户", produces = MediaType.APPLICATION_JSON_VALUE)
@RestController("OauthAccountController")
@RequestMapping("/oauth/account")
public class OauthAccountController {
@Autowired
private AuthLoginLimitProperties loginLimitProperties;
@Autowired
private RedisTemplate<String, Object> redisTemplate;
@Autowired
private UserFeignClient userFeignClient;
/**
* 账户登录锁定解除
*/
@ApiOperation(value = "账户登录锁定解除")
@RequestMapping(value = "/accountLockoutRelease/{username}", method = RequestMethod.POST)
public AjaxResponse<Object> accountLockoutRelease(@ApiParam(name = "username", value = "解锁用户登录名", required = true) @PathVariable("username") String username,
Principal user) {
String redisKey = loginLimitProperties.getRedisKeyNamespace() + username;
redisTemplate.delete(redisKey);
return AjaxResponse.success();
}
@ApiOperation(value = "通过主键id获取用户详情")
@RequestMapping(value = "/selectUserById/{id}", method=RequestMethod.GET)
public AjaxResponse<UserInfoOutputBean> selectUserById(@PathVariable String id) {
return userFeignClient.getDetail(id);
}
}
|
<reponame>msnraju/al-productivity-tools
import IDataItem from "./data-item.model";
export default interface IDataSet {
dataItems: Array<IDataItem>;
postLabelComments: string[];
comments: string[];
}
|
package com.example.xty.helloagain.MyDataBase;
import org.greenrobot.greendao.annotation.Entity;
import org.greenrobot.greendao.annotation.Generated;
import org.greenrobot.greendao.annotation.Id;
import org.greenrobot.greendao.annotation.Property;
/**
* Created by xty on 2018/6/21.
*/
@Entity
public class Settings {
@Id
private Long id; //主键
@Property(nameInDb = "weekNumber")
private String weekNumber;
public String getWeekNumber() {
return this.weekNumber;
}
public void setWeekNumber(String weekNumber) {
this.weekNumber = weekNumber;
}
public Long getId() {
return this.id;
}
public void setId(Long id) {
this.id = id;
}
@Generated(hash = 1513464318)
public Settings(Long id, String weekNumber) {
this.id = id;
this.weekNumber = weekNumber;
}
@Generated(hash = 456090543)
public Settings() {
}
}
|
<reponame>ebdavison/papermerge
# coding: utf-8
from __future__ import unicode_literals
import datetime
import warnings
from django.template import Library
from django.contrib.admin.templatetags.admin_list import result_headers
from django.conf import settings
from django.contrib.admin.templatetags.admin_urls import add_preserved_filters
from django.utils.translation import gettext
from django.contrib.admin.utils import (
display_for_field, display_for_value, lookup_field)
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
try:
from django.urls import NoReverseMatch
except ImportError: # Django < 1.10 pragma: no cover
from django.core.urlresolvers import NoReverseMatch
try:
from django.utils.deprecation import RemovedInDjango20Warning
except ImportError:
RemovedInDjango20Warning = RuntimeWarning
from django.utils.encoding import force_text
from django.utils.html import format_html
from django.utils.safestring import mark_safe
from django.urls import reverse
from django.contrib.admin.utils import quote
from papermerge.core.models import Access
MPTT_ADMIN_LEVEL_INDENT = getattr(settings, 'MPTT_ADMIN_LEVEL_INDENT', 10)
register = Library()
def remote_field(field):
# function copied from mptt v0.9.0 mptt.compat module
# in mptt v0.9.1 it was removed
# very likely that code using it will need refactoring.
# TODO: ^
return field.remote_field if hasattr(field, 'remote_field') else getattr(field, 'rel', None)
@register.simple_tag()
def breadcrumbs_tree_path(node):
return build_tree_path(
node,
include_self=True,
include_index=True,
html_class_attr="reverse"
)
###
# Ripped from contrib.admin (1.10)
def _coerce_field_name(field_name, field_index):
"""
Coerce a field_name (which may be a callable) to a string.
"""
if callable(field_name):
if field_name.__name__ == '<lambda>':
return 'lambda' + str(field_index)
else:
return field_name.__name__
return field_name
def get_empty_value_display(cl):
if hasattr(cl.model_admin, 'get_empty_value_display'):
return cl.model_admin.get_empty_value_display()
else:
# Django < 1.9
from django.contrib.admin.views.main import EMPTY_CHANGELIST_VALUE
return EMPTY_CHANGELIST_VALUE
###
# Ripped from contrib.admin's (1.10) items_for_result tag.
# The only difference is we're indenting nodes according to their level.
def mptt_items_for_result(cl, result, form):
"""
Generates the actual list of data.
"""
# each key in the returned result is the name of the model's field as
# follows:
# * "action_checkbox"
# * "title_with_class"
yielded_result = {}
def link_in_col(is_first, field_name, cl):
if cl.list_display_links is None:
return False
if is_first and not cl.list_display_links:
return True
return field_name in cl.list_display_links
first = True
pk = cl.lookup_opts.pk.attname
for field_index, field_name in enumerate(cl.list_display):
# #### MPTT SUBSTITUTION START
empty_value_display = get_empty_value_display(cl)
# #### MPTT SUBSTITUTION END
row_classes = [
'field-%s' % _coerce_field_name(field_name, field_index)
]
try:
f, attr, value = lookup_field(field_name, result, cl.model_admin)
except ObjectDoesNotExist:
result_repr = empty_value_display
else:
empty_value_display = getattr(
attr,
'empty_value_display',
empty_value_display
)
if f is None or f.auto_created:
if field_name == 'action_checkbox':
row_classes = ['action-checkbox']
allow_tags = getattr(attr, 'allow_tags', False)
boolean = getattr(attr, 'boolean', False)
# #### MPTT SUBSTITUTION START
try:
# Changed in Django 1.9, now takes 3 arguments
result_repr = display_for_value(
value, empty_value_display, boolean)
except TypeError:
result_repr = display_for_value(value, boolean)
# #### MPTT SUBSTITUTION END
if allow_tags:
warnings.warn(
"Deprecated allow_tags attribute used on field {}. "
"Use django.utils.safestring.format_html(), "
"format_html_join(), or mark_safe() instead.".format(
field_name
),
RemovedInDjango20Warning
)
result_repr = mark_safe(result_repr)
if isinstance(value, (datetime.date, datetime.time)):
row_classes.append('nowrap')
else:
# #### MPTT SUBSTITUTION START
is_many_to_one = isinstance(
remote_field(f),
models.ManyToOneRel
)
if is_many_to_one:
# #### MPTT SUBSTITUTION END
field_val = getattr(result, f.name)
if field_val is None:
result_repr = empty_value_display
else:
result_repr = field_val
else:
# #### MPTT SUBSTITUTION START
try:
result_repr = display_for_field(value, f)
except TypeError:
# Changed in Django 1.9, now takes 3 arguments
result_repr = display_for_field(
value, f, empty_value_display)
# #### MPTT SUBSTITUTION END
if isinstance(
f,
(models.DateField, models.TimeField, models.ForeignKey)
):
row_classes.append('nowrap')
if force_text(result_repr) == '':
result_repr = mark_safe(' ')
row_class = mark_safe(' class="%s"' % ' '.join(row_classes))
# If list_display_links not defined,
# add the link tag to the first field
if link_in_col(first, field_name, cl):
first = False
# Display link to the result's change_view if the url exists, else
# display just the result's representation.
try:
url = cl.url_for_result(result)
except NoReverseMatch:
link_or_text = result_repr
else:
url = add_preserved_filters(
{
'preserved_filters': cl.preserved_filters,
'opts': cl.opts
},
url,
)
# Convert the pk to something that can be used in Javascript.
# Problem cases are long ints (23L) and non-ASCII strings.
if cl.to_field:
attr = str(cl.to_field)
else:
attr = pk
value = result.serializable_value(attr)
yielded_result[field_name] = {
'row_class': row_class,
'repr': result_repr,
}
else:
# By default the fields come from ModelAdmin.list_editable,
# but if we pull
# the fields out of the form instead of list_editable custom admins
# can provide fields on a per request basis
if (form and field_name in form.fields and not (
field_name == cl.model._meta.pk.name and
form[cl.model._meta.pk.name].is_hidden)):
bf = form[field_name]
result_repr = mark_safe(force_text(bf.errors) + force_text(bf))
yielded_result[field_name] = {
'row_class': row_class,
'repr': result_repr,
}
_, _, _id = lookup_field('id', result, cl.model_admin)
yielded_result['id'] = _id
# ctype = Document | Folder
yielded_result['ctype'] = str(result.polymorphic_ctype)
if (str(result.polymorphic_ctype) == 'Document'):
yielded_result['text'] = result.get_real_instance().text
yielded_result['title'] = str(result.title)
yield yielded_result
def mptt_results(cl):
if cl.formset:
for res, form in zip(cl.result_list, cl.formset.forms):
yield list(mptt_items_for_result(cl, res, form))
else:
for res in cl.result_list:
yield list(mptt_items_for_result(cl, res, None))
def url_for_folder(node):
return reverse(
'admin:core_basetreenode_changelist_obj',
args=(quote(node.id),),
current_app='boss'
)
def url_for_document(node):
return reverse(
'admin:core_basetreenode_change',
args=(quote(node.id),),
current_app='boss'
)
def build_url_for_index(
html_class_attr='',
title='Documents'
):
url = reverse(
'admin:core_basetreenode_changelist',
current_app='boss'
)
link = format_html(
'<a href="{}" class="{}" alt="{}">'
'{}</a>',
url,
html_class_attr,
title,
title
)
return link
def build_url_for_node(node, html_class_attr=''):
if node.polymorphic_ctype.name in ('Folder', 'Ordner'):
url = url_for_folder(node)
else:
url = url_for_document(node)
link = format_html(
'<a href="{}" class="{}" data-id="{}" alt="{}">'
'{}</a>',
url,
html_class_attr,
node.id,
node.title,
node.title
)
return link
def build_tree_path(
node,
include_self=False,
include_index=False,
html_class_attr=''
):
"""
Returns an html formated path of the Node.
Example:
Documents > Folder A > Folder B > Document C
Where each node is an html anchor with href to the element.
Node is instance of core.models.BaseTreeNode.
include_index will add url to the index of boss page.
"""
if node:
ancestors = node.get_ancestors(include_self=include_self)
else:
ancestors = []
titles = [
build_url_for_node(item, html_class_attr=html_class_attr)
for item in ancestors
]
if include_index:
titles.insert(
0,
build_url_for_index(html_class_attr=html_class_attr)
)
return mark_safe(' › '.join(titles))
def get_icon_html(node):
result = ''
if node.polymorphic_ctype.name == 'Folder':
result = "<i class='yellow-folder margin-y-sm'></i>"
else:
#if len(node.text) > 0:
# result = "<i class='file margin-y-sm'></i>"
#else:
result = "<i class='file-empty margin-y-sm'></i>"
return mark_safe(result)
def mptt_search_results(cl, user):
results = []
for node in cl.result_list:
if user.has_perm(Access.PERM_READ, node):
results.append({
'icon': get_icon_html(node),
'dir_path': build_tree_path(node),
'title': build_url_for_node(node),
'page_highlight': mark_safe(node.page_highlight),
'model_ctype': node.model_ctype
})
return results
def mptt_result_list(cl):
"""
Displays the headers and data list together
"""
return {'cl': cl,
'result_headers': list(result_headers(cl)),
'results': list(mptt_results(cl))}
def boss_result(cl):
return {
'cl': cl,
'result_headers': list(result_headers(cl)),
'results': list(mptt_results(cl))
}
@register.inclusion_tag('boss/mptt_change_list_results_grid.html')
def boss_result_grid(cl):
"""
Displays the headers and data list together
"""
return boss_result(cl)
@register.inclusion_tag('boss/mptt_change_list_results_list.html')
def boss_result_list(cl):
"""
Displays the headers and data list together
"""
return boss_result(cl)
@register.inclusion_tag('boss/mptt_change_list_search_results.html')
def boss_search_results(cl, user):
"""
Displays search results as a list using search specific view
"""
return {
'cl': cl,
'results': mptt_search_results(cl, user)
}
@register.simple_tag
def login_tag():
title = gettext('Sign In here')
link = reverse('login')
return format_html(
'<a href="{}">{}</a>',
link,
title
)
@register.simple_tag
def register_tag():
title = gettext('please register')
link = reverse('register')
return format_html(
'<a href="{}">{}</a>',
link,
title
)
@register.simple_tag
def terms_tag():
link = gettext('/en/terms')
title = gettext('Terms and Conditions')
return format_html(
'<a target="_blank" href="{}">{}</a>',
link,
title
)
@register.simple_tag
def privacy_tag():
link = gettext('/en/privacy')
title = gettext('Privacy Policy')
return format_html(
'<a target="_blank" href="{}">{}</a>',
link,
title
)
@register.simple_tag
def cookies_tag():
link = gettext('/en/cookies')
title = gettext('Cookies Policy')
return format_html(
'<a target="_blank" href="{}">{}</a>',
link,
title
)
|
# Optimized Python code to find prime numbers in a range
# using primality tests
def is_prime(num):
if num <= 1:
return False
for i in range(2, num):
if (num % i) == 0:
return False
return True
# Generate a range of consecutive numbers
# and check for each if it is prime
def prime_numbers(n):
for num in range(2, n+1):
if is_prime(num):
print(num, end=' ')
# Driver code
if __name__ == '__main__':
n = 10
prime_numbers(n) |
#!/bin/sh
echo "Creating directories..."
mkdir cg_checkpoints
echo "Downloading preprocessed data..."
gdown https://drive.google.com/uc?id=18fSwjw_F2aL-nDpQouEJ9Mh_cC12SyW9
echo "Unpacking preprocessed data..."
unzip -q data.zip
echo "Finished setup!"
|
<gh_stars>10-100
package io.opensphere.csvcommon.detect.location.model;
import io.opensphere.importer.config.ColumnType;
/**
* The LatLonColumnResults class stores a set of potential latitude and
* longitude columns.
*/
public final class LatLonColumnResults
{
/** The Potential column1. */
private final PotentialLocationColumn myPotentialColumn1;
/** The Potential column2. */
private final PotentialLocationColumn myPotentialColumn2;
/** The Confidence. */
private float myConfidence;
/** The Scale factor. */
private static final float ourScaleFactor = 100.0f;
/** The Confidence factor1. */
private static final float ourConfidenceFactor1 = .8f;
/** The Constant ourConfidenceFactor2. */
private static final float ourConfidenceFactor2 = 1.2f;
/** The Constant ourConfidenceFactor3. */
private static final float ourConfidenceFactor3 = 1.3f;
/** The Column type. */
private ColumnType myColumnType;
/**
* Instantiates a new lat lon column results.
*
* @param col1 the col1
* @param col2 the col2
*/
public LatLonColumnResults(PotentialLocationColumn col1, PotentialLocationColumn col2)
{
myPotentialColumn1 = col1;
myPotentialColumn2 = col2;
}
/**
* Gets the latitude column.
*
* @return the latitude column
*/
public PotentialLocationColumn getLatColumn()
{
if (myPotentialColumn1.getType().equals(ColumnType.LAT))
{
return myPotentialColumn1;
}
else if (myPotentialColumn2.getType().equals(ColumnType.LAT))
{
return myPotentialColumn2;
}
return null;
}
/**
* Gets the longitude column.
*
* @return the longitude column
*/
public PotentialLocationColumn getLonColumn()
{
if (myPotentialColumn1.getType().equals(ColumnType.LON))
{
return myPotentialColumn1;
}
else if (myPotentialColumn2.getType().equals(ColumnType.LON))
{
return myPotentialColumn2;
}
return null;
}
/**
* Sets the confidence based on exact match or not.
*
* @param matchConfidence the degree of confidence that these columns are a
* match, from 1 to 100.
*/
public void setConfidence(int matchConfidence)
{
if (getLatColumn().getPrefix().isEmpty() && getLonColumn().getPrefix().isEmpty() && getLatColumn().getSuffix().isEmpty()
&& getLonColumn().getSuffix().isEmpty())
{
myConfidence = matchConfidence / ourScaleFactor;
}
else if (getLatColumn().getPrefix().isEmpty() && getLonColumn().getPrefix().isEmpty()
&& getLatColumn().getSuffix().equals(getLonColumn().getSuffix()))
{
myConfidence = 1.0f / ourConfidenceFactor2 * (matchConfidence / ourScaleFactor);
}
else if (getLatColumn().getPrefix().equals(getLonColumn().getPrefix())
&& getLatColumn().getSuffix().equals(getLonColumn().getSuffix()))
{
myConfidence = 1.0f / ourConfidenceFactor3 * (matchConfidence / ourScaleFactor);
}
else if (!getLatColumn().getPrefix().equals(getLonColumn().getPrefix())
&& !getLatColumn().getSuffix().equals(getLonColumn().getSuffix()))
{
myConfidence = 0f;
}
// Prefixes don't match but there is a lat and lon.
else if (getLatColumn().isLongName() == getLonColumn().isLongName())
{
myConfidence = ourConfidenceFactor1 * matchConfidence / ourScaleFactor;
}
getLatColumn().setConfidence(myConfidence);
getLonColumn().setConfidence(myConfidence);
}
/**
* Sets the confidence.
*
* @param conf the new confidence
*/
public void setConfidence(float conf)
{
myConfidence = conf;
getLatColumn().setConfidence(myConfidence);
getLonColumn().setConfidence(myConfidence);
}
/**
* Gets the confidence.
*
* @return the confidence
*/
public float getConfidence()
{
return myConfidence;
}
/**
* Gets the column type.
*
* @return the column type
*/
public ColumnType getColumnType()
{
return myColumnType;
}
/**
* Sets the column type.
*
* @param columnType the new column type
*/
public void setColumnType(ColumnType columnType)
{
myColumnType = columnType;
}
}
|
import PropTypes from 'prop-types'
import React from 'react'
const Column = (props) => {
const { children, width = 'full', className = '', renderFeatures } = props
const classes = [className]
if (width === 'full') classes.push('govuk-grid-column-full')
if (width === 'one-half') classes.push('govuk-grid-column-one-half')
if (width === 'one-third') classes.push('govuk-grid-column-one-third')
if (width === 'two-thirds') classes.push('govuk-grid-column-two-thirds')
const finalClassName = classes.join(' ').trim()
return (
<div className={finalClassName}>
{children}
{renderFeatures(props)}
</div>
)
}
export default Column
Column.propTypes = {
children: PropTypes.node,
className: PropTypes.string,
renderFeatures: PropTypes.func,
width: PropTypes.string
}
Column.defaultProps = {
className: '',
renderFeatures: () => { return null },
width: 'full'
}
|
package main
import (
"errors"
"github.com/go-martini/martini"
"github.com/martini-contrib/render"
"gopkg.in/mgo.v2"
"gopkg.in/mgo.v2/bson"
"strconv"
"time"
)
type TorrentDB struct {
session *mgo.Session
collection *mgo.Collection
}
type Torrent struct {
Btih string `bson:"_id,omitempty"`
Title string
Category string
Size int
Details []string
Swarm Stats
Lastmod time.Time
Imported time.Time
}
type Stats struct {
Seeders int
Leechers int
}
func NewTorrentDB(url string) (*TorrentDB, error) {
session, err := mgo.Dial(url)
if err != nil {
return nil, err
}
session.SetMode(mgo.Monotonic, true)
collection := session.DB("bitcannon").C("torrents")
collection.EnsureIndex(mgo.Index{Key: []string{"$text:title"}, Name: "title"})
collection.EnsureIndex(mgo.Index{Key: []string{"category"}, Name: "category"})
collection.EnsureIndex(mgo.Index{Key: []string{"swarm.seeders"}, Name: "seeders"})
collection.EnsureIndex(mgo.Index{Key: []string{"lastmod"}, Name: "lastmod"})
return &TorrentDB{session, collection}, nil
}
func (torrentDB *TorrentDB) Close() {
torrentDB.session.Close()
}
func (torrentDB *TorrentDB) Stats(r render.Render) {
count, err := torrentDB.collection.Count()
if err != nil {
r.JSON(500, map[string]interface{}{"message": "API Error"})
return
}
r.JSON(200, map[string]interface{}{"Count": count, "Trackers": trackers})
}
func (torrentDB *TorrentDB) Categories(r render.Render) {
var result []string
err := torrentDB.collection.Find(nil).Distinct("category", &result)
if err != nil {
r.JSON(500, map[string]interface{}{"message": "API Error"})
return
}
var size int
for size = range result {
}
stats := make([]map[string]interface{}, size+1, size+1)
for i, cat := range result {
total, err := torrentDB.collection.Find(bson.M{"category": cat}).Count()
if err != nil {
stats[i] = map[string]interface{}{cat: 0}
} else {
stats[i] = map[string]interface{}{"name": cat, "count": total}
}
}
r.JSON(200, stats)
}
func (torrentDB *TorrentDB) Browse(r render.Render, params martini.Params) {
result := []Torrent{}
err = torrentDB.collection.Find(bson.M{"category": params["category"]}).Sort("-swarm.seeders").Limit(resultLimit).All(&result)
if err != nil {
r.JSON(404, map[string]interface{}{"message": err.Error()})
return
}
r.JSON(200, result)
}
func (torrentDB *TorrentDB) Search(r render.Render, params martini.Params) {
result := []Torrent{}
skip := 0
if value, ok := params["skip"]; ok {
skip, err = strconv.Atoi(value)
if err != nil {
r.JSON(400, map[string]interface{}{"message": err.Error()})
return
}
}
var pipe *mgo.Pipe
if category, ok := params["category"]; ok {
pipe = torrentDB.collection.Pipe([]bson.M{
{"$match": bson.M{"$text": bson.M{"$search": params["query"]}}},
{"$match": bson.M{"category": category}},
{"$sort": bson.M{"swarm.seeders": -1}},
{"$skip": skip},
{"$limit": resultLimit},
})
} else {
pipe = torrentDB.collection.Pipe([]bson.M{
{"$match": bson.M{"$text": bson.M{"$search": params["query"]}}},
{"$sort": bson.M{"swarm.seeders": -1}},
{"$skip": skip},
{"$limit": resultLimit},
})
}
iter := pipe.Iter()
err = iter.All(&result)
if err != nil {
r.JSON(404, map[string]interface{}{"message": err.Error()})
return
}
r.JSON(200, result)
}
func (torrentDB *TorrentDB) Get(r render.Render, params martini.Params) {
result := Torrent{}
err = torrentDB.collection.Find(bson.M{"_id": params["btih"]}).One(&result)
if err != nil {
r.JSON(404, map[string]interface{}{"message": "Torrent not found."})
return
}
r.JSON(200, result)
}
func (torrentDB *TorrentDB) Insert(btih string, title string, category string, size int, details string) (bool, error) {
var detailsArr []string
if details != "" {
detailsArr = []string{details}
}
err := torrentDB.collection.Insert(
&Torrent{Btih: btih,
Title: title,
Category: category,
Size: size,
Details: detailsArr,
Swarm: Stats{Seeders: -1, Leechers: -1},
Lastmod: time.Now(),
Imported: time.Now(),
})
if err != nil {
return false, errors.New("Something went wrong when trying to insert.")
}
return true, nil
}
func (torrentDB *TorrentDB) Update(btih string, seeders int, leechers int) {
match := bson.M{"_id": btih}
update := bson.M{"$set": bson.M{"swarm": &Stats{Seeders: seeders, Leechers: leechers}, "lastmod": time.Now()}}
torrentDB.collection.Update(match, update)
}
func (torrentDB *TorrentDB) GetStale() []string {
result := []Torrent{}
err = torrentDB.collection.Find(bson.M{"swarm.seeders": -1, "swarm.leechers": -1}).Limit(50).All(&result)
if len(result) == 0 {
// No unscraped torrents, get stale ones
torrentDB.collection.Find(bson.M{"lastmod": bson.M{"$lt": time.Now().Add(-24 * time.Hour)}}).Sort("lastmod").Limit(50).All(&result)
}
var btih = make([]string, len(result))
for i := range result {
btih[i] = result[i].Btih
}
return btih
}
|
<filename>benchmarks/statcalc/addValue/Eq/oldV.java<gh_stars>1-10
package benchmarks.statcalc.addValue.Eq;
public class oldV {
static double sum = 0;
static double sumOfSquares = 0;
static double mean = 0;
static double deviation = 0;
static int count = 0;
public static void addValue(double val)
{
count++;
System.out.println("stat ");
double currentVal = val;
sum += currentVal;
sumOfSquares += currentVal * currentVal;
mean = sum / count;
deviation = Math.sqrt( (sumOfSquares / count) - (mean * mean) );
}
} |
// Copyright (c) Microsoft Corporation.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
#define _SILENCE_CXX20_IS_ALWAYS_EQUAL_DEPRECATION_WARNING
#include <memory>
#include <type_traits>
#include <utility>
#define STATIC_ASSERT(...) static_assert(__VA_ARGS__, #__VA_ARGS__)
using namespace std;
template <class T, class = void>
constexpr bool has_member_size_type = false;
template <class T>
constexpr bool has_member_size_type<T, void_t<typename T::size_type>> = true;
template <class T, class = void>
constexpr bool has_member_difference_type = false;
template <class T>
constexpr bool has_member_difference_type<T, void_t<typename T::difference_type>> = true;
template <class T, class = void>
constexpr bool has_member_pocma = false;
template <class T>
constexpr bool has_member_pocma<T, void_t<typename T::propagate_on_container_move_assignment>> = true;
template <class T, class = void>
constexpr bool has_member_is_always_equal = false;
template <class T>
constexpr bool has_member_is_always_equal<T, void_t<typename T::is_always_equal>> = true;
template <class T, class = void>
constexpr bool can_allocate = false;
template <class T>
constexpr bool can_allocate<T, void_t<decltype(declval<T&>().allocate(size_t{}))>> = true;
STATIC_ASSERT(has_member_size_type<allocator<int>>);
STATIC_ASSERT(has_member_difference_type<allocator<int>>);
STATIC_ASSERT(has_member_pocma<allocator<int>>);
STATIC_ASSERT(has_member_is_always_equal<allocator<int>>);
STATIC_ASSERT(can_allocate<allocator<int>>);
STATIC_ASSERT(is_convertible_v<allocator<void>, allocator<int>>);
#if _HAS_CXX20
constexpr bool has_cxx20 = true;
#else
constexpr bool has_cxx20 = false;
#endif
STATIC_ASSERT(has_cxx20 == has_member_size_type<allocator<void>>);
STATIC_ASSERT(has_cxx20 == has_member_difference_type<allocator<void>>);
STATIC_ASSERT(has_cxx20 == has_member_pocma<allocator<void>>);
STATIC_ASSERT(has_cxx20 == has_member_is_always_equal<allocator<void>>);
STATIC_ASSERT(has_cxx20 == can_allocate<allocator<void>>);
STATIC_ASSERT(has_cxx20 == is_convertible_v<allocator<int>, allocator<void>>);
int main() {} // COMPILE-ONLY
|
function getWarehouseNames($almacenes) {
$warehouseNames = [];
foreach ($almacenes as $almacen) {
$warehouseNames[] = $almacen['nombre'];
}
return $warehouseNames;
} |
<reponame>KiharaTakahiro/share-media
import { PAGE_END_POINT } from '../common/const'
import { parseCookies, destroyCookie, setCookie} from 'nookies';
import { NextPageContext } from 'next';
import Router from 'next/router'
/**
* トークンのインターフェース
*/
interface Token {
access_token: string,
refresh_token: string
}
/**
* トークンの情報を取得する
* @param ctx
*/
export const get_access_token = (ctx?: NextPageContext): Token | any => {
if(typeof parseCookies(ctx).access_token === 'undefined') {
return undefined
}
return parseCookies(ctx).access_token;
}
export const set_token = (data: Token, ctx?: NextPageContext) =>{
setCookie(ctx, "access_token", data.access_token , {
maxAge: 24 * 60 * 60, // 1日で切れる
path: '/'
})
setCookie(ctx, "refresh_token", data.refresh_token , {
maxAge: 90 * 24 * 60 * 60, // 90日で切れる
path: '/'
})
}
/**
* 認証不要ページの判定処理
* @param url
*/
export const exclude_login_route = (url: string) => {
return url === PAGE_END_POINT.REGISTER_USER || url === PAGE_END_POINT.LOGIN_USER
}
/**
* ログアウト処理
*/
export const logout = (ctx?: NextPageContext) => {
destroyCookie(ctx, "access_token")
destroyCookie(ctx, "refresh_token")
Router.push(PAGE_END_POINT.LOGIN_USER)
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
var vscode = require('vscode');
let util = require('./util.js');
let helper = require('./helper.js');
let crTrgg = require('./triggerCommands/wsk.trigger.create.js');
let upTrgg = require('./triggerCommands/wsk.trigger.update.js');
let dlTrgg = require('./triggerCommands/wsk.trigger.delete.js');
let gtTrgg = require('./triggerCommands/wsk.trigger.get.js');
let frTrgg = require('./triggerCommands/wsk.trigger.fire.js');
var log;
var ow;
var props;
var context;
function register(_ow, context, _log, _props) {
ow = _ow;
log = _log;
props = _props;
context = context;
var defaultDisposable = vscode.commands.registerCommand('extension.wsk.trigger', defaultAction);
var listDisposable = vscode.commands.registerCommand('extension.wsk.trigger.list', listTrigger);
var createDisposable = vscode.commands.registerCommand('extension.wsk.trigger.create', createTrigger);
var updateDisposable = vscode.commands.registerCommand('extension.wsk.trigger.update', updateTrigger);
var deleteDisposable = vscode.commands.registerCommand('extension.wsk.trigger.delete', deleteTrigger);
var getDisposable = vscode.commands.registerCommand('extension.wsk.trigger.get', getTrigger);
var fireDisposable = vscode.commands.registerCommand('extension.wsk.trigger.fire', fireTrigger);
context.subscriptions.push(defaultDisposable, listDisposable, createDisposable, updateDisposable, deleteDisposable, getDisposable, fireDisposable);
}
function defaultAction(params) {
helper.defaultAction('trigger')
}
function listTrigger(params) {
if (!props.validate()){
return;
}
log.show(true);
log.appendLine('\n$ openwsk trigger list');
triggerList();
}
function triggerList() {
if (!props.validate()){
return;
}
return getList().then(function (triggers) {
util.appendHeading('triggers');
for (var x=0; x<triggers.length; x ++){
util.appendEntry(triggers[x]);
}
}).catch(function(error) {
log.appendLine(error.toString())
});
}
function getList() {
return new Promise(function (fulfill, reject){
return ow.triggers.list().then(function (triggers) {
fulfill(triggers);
}).catch(function(error) {
log.appendLine(error.toString())
});
});
}
function getListAsStringArray() {
return getList().then(function (triggers) {
var result = [];
for (var x=0; x<triggers.length; x ++){
var name = util.formatQualifiedName(triggers[x]);
result.push(name)
}
return result;
})
}
// corresponding functions for Trigger commands
function createTrigger(params) {
crTrgg.register(ow, context, log, props);
crTrgg.createTrigger(params);
}
function updateTrigger(params) {
upTrgg.register(ow, context, log, props);
upTrgg.updateTrigger(params);
}
function deleteTrigger(params) {
dlTrgg.register(ow, context, log, props);
dlTrgg.deleteTrigger(params);
}
function getTrigger(params) {
gtTrgg.register(ow, context, log, props);
gtTrgg.getTrigger(params);
}
function fireTrigger(params) {
frTrgg.register(ow, context, log, props);
frTrgg.fireTrigger(params);
}
module.exports = {
register: register,
triggerList:triggerList,
getListAsStringArray:getListAsStringArray
};
|
// Create an Express router
const express = require('express');
const router = express.Router();
//Bring in the token validation method
const {validateToken} = require('../middleware/auth');
//Create the endpoint
router.post('/validate-token', validateToken, (req, res) => {
//Send a success response if token validation was successful
res.send({
message: 'Token validation successful'
});
});
//Export the router
module.exports = router; |
// Define the modules and their exported functions/objects
const modules = {
operations: {
rollMin: () => {},
rollMax: () => {}
},
parser: {
evaluate: () => {}
},
diceforge: {
forgeDice: () => {}
}
};
// Implement the loadModule function
function loadModule(moduleName) {
if (modules[moduleName]) {
return modules[moduleName];
} else {
return {};
}
} |
#!/bin/bash
usage() { echo "Usage: $0 [-c <channelname>] -n [chaincodename]" 1>&2; exit 1; }
while getopts ":c:n:" o; do
case "${o}" in
c)
c=${OPTARG}
;;
n)
n=${OPTARG}
;;
*)
usage
;;
esac
done
shift $((OPTIND-1))
if [ -z "${c}" ] || [ -z "${n}" ] ; then
usage
fi
echo "create channel channelID ${c} chaincodeName ${n} "
DATA=/home/ubuntu/hyperledgerconfig/data
export FABRIC_CFG_PATH=$DATA/
PEER_ORGS="org1 org2"
NUM_PEERS=2
CHANNEL_NAME=${c}
CHANNEL_TX_FILE=$DATA/$CHANNEL_NAME.tx
CA_CHAINFILE=${DATA}/org0-ca-cert.pem
ORDERER_HOST=orderer1-org0
export ORDERER_PORT_ARGS=" -o orderer1-org0:7050 --tls --cafile $CA_CHAINFILE --clientauth"
QUERY_TIMEOUT=30
# install chaincode on peer1-org1, peer1-org2
for ORG in $PEER_ORGS; do
#initPeerVars $ORG 1
PEER_HOST=peer1-${ORG}
PEER_NAME=${PEER_HOST}
ORG_ADMIN_HOME=$DATA/orgs/$ORG/admin
CA_CHAINFILE=${DATA}/${ORG}-ca-cert.pem
export FABRIC_CA_CLIENT_HOME=$ORG_ADMIN_HOME
export FABRIC_CA_CLIENT_TLS_CERTFILES=$CA_CHAINFILE
export CORE_PEER_MSPCONFIGPATH=$ORG_ADMIN_HOME/msp
export CORE_PEER_ID=$PEER_HOST
export CORE_PEER_ADDRESS=$PEER_HOST:7051
export CORE_PEER_LOCALMSPID=${ORG}MSP
export CORE_LOGGING_LEVEL=DEBUG
export CORE_PEER_TLS_ENABLED=true
export CORE_PEER_TLS_CLIENTAUTHREQUIRED=true
export CORE_PEER_TLS_ROOTCERT_FILE=$CA_CHAINFILE
export CORE_PEER_TLS_CLIENTCERT_FILE=$DATA/tls/$PEER_NAME-cli-client.crt
export CORE_PEER_TLS_CLIENTKEY_FILE=$DATA/tls/$PEER_NAME-cli-client.key
export CORE_PEER_PROFILE_ENABLED=true
# gossip variables
export CORE_PEER_GOSSIP_USELEADERELECTION=true
export CORE_PEER_GOSSIP_ORGLEADER=false
echo "Install for $PEER_HOST ..."
export ORDERER_PORT_ARGS=" -o orderer1-org0:7050 --tls --cafile $DATA/org0-ca-cert.pem --clientauth"
export ORDERER_CONN_ARGS="$ORDERER_PORT_ARGS --keyfile $CORE_PEER_TLS_CLIENTKEY_FILE --certfile $CORE_PEER_TLS_CLIENTCERT_FILE"
echo $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode install -n $n -v 1.0 -p github.com/deevotech/hyperledger-supplychain-chaincode/supplychain/go
#$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode install -n ${n} -v 1.0 -p github.com/hyperledger/fabric/examples/chaincode/go/chaincode_example02
#sleep 3
done
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode list --installed -C $CHANNEL_NAME
#initPeerVars ${PORGS[1]} 1
#switchToAdminIdentity
ORG=org1
PEER_HOST=peer1-${ORG}
PEER_NAME=${PEER_HOST}
ORG_ADMIN_HOME=$DATA/orgs/$ORG/admin
CA_CHAINFILE=${DATA}/${ORG}-ca-cert.pem
export FABRIC_CA_CLIENT_HOME=$ORG_ADMIN_HOME
export FABRIC_CA_CLIENT_TLS_CERTFILES=$CA_CHAINFILE
export CORE_PEER_MSPCONFIGPATH=$ORG_ADMIN_HOME/msp
export CORE_PEER_ID=$PEER_HOST
export CORE_PEER_ADDRESS=$PEER_HOST:7051
export CORE_PEER_LOCALMSPID=${ORG}MSP
export CORE_LOGGING_LEVEL=DEBUG
export CORE_PEER_TLS_ENABLED=true
export CORE_PEER_TLS_CLIENTAUTHREQUIRED=true
export CORE_PEER_TLS_ROOTCERT_FILE=$CA_CHAINFILE
export CORE_PEER_TLS_CLIENTCERT_FILE=$DATA/tls/$PEER_NAME-cli-client.crt
export CORE_PEER_TLS_CLIENTKEY_FILE=$DATA/tls/$PEER_NAME-cli-client.key
export CORE_PEER_PROFILE_ENABLED=true
# gossip variables
export CORE_PEER_GOSSIP_USELEADERELECTION=true
export CORE_PEER_GOSSIP_ORGLEADER=false
export ORDERER_PORT_ARGS=" -o orderer1-org0:7050 --tls --cafile $DATA/org0-ca-cert.pem --clientauth"
export ORDERER_CONN_ARGS="$ORDERER_PORT_ARGS --keyfile $CORE_PEER_TLS_CLIENTKEY_FILE --certfile $CORE_PEER_TLS_CLIENTCERT_FILE"
echo $ORDERER_CONN_ARGS
echo "Instantiating chaincode on $PEER_HOST ..."
export ORDERER_PORT_ARGS=" -o orderer1-org0:7050 --tls --cafile $DATA/org0-ca-cert.pem --clientauth"
export ORDERER_CONN_ARGS="$ORDERER_PORT_ARGS --keyfile $CORE_PEER_TLS_CLIENTKEY_FILE --certfile $CORE_PEER_TLS_CLIENTCERT_FILE"
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode instantiate -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["init"]}' $ORDERER_CONN_ARGS
sleep 10
#initPeerVars ${PORGS[0]} 1
#switchToUserIdentity
ORG=org1
PEER_HOST=peer1-${ORG}
PEER_NAME=${PEER_HOST}
ORG_ADMIN_HOME=$DATA/orgs/$ORG/admin
CA_CHAINFILE=${DATA}/${ORG}-ca-cert.pem
export FABRIC_CA_CLIENT_HOME=$ORG_ADMIN_HOME
export FABRIC_CA_CLIENT_TLS_CERTFILES=$CA_CHAINFILE
export CORE_PEER_MSPCONFIGPATH=$ORG_ADMIN_HOME/msp
export CORE_PEER_ID=$PEER_HOST
export CORE_PEER_ADDRESS=$PEER_HOST:7051
export CORE_PEER_LOCALMSPID=${ORG}MSP
export CORE_LOGGING_LEVEL=DEBUG
export CORE_PEER_TLS_ENABLED=true
export CORE_PEER_TLS_CLIENTAUTHREQUIRED=true
export CORE_PEER_TLS_ROOTCERT_FILE=$CA_CHAINFILE
export CORE_PEER_TLS_CLIENTCERT_FILE=$DATA/tls/$PEER_NAME-cli-client.crt
export CORE_PEER_TLS_CLIENTKEY_FILE=$DATA/tls/$PEER_NAME-cli-client.key
export CORE_PEER_PROFILE_ENABLED=true
# gossip variables
export CORE_PEER_GOSSIP_USELEADERELECTION=true
export CORE_PEER_GOSSIP_ORGLEADER=false
echo "Updating anchor peers for $PEER_HOST ..."
export ORDERER_PORT_ARGS=" -o orderer1-org0:7050 --tls --cafile $DATA/org0-ca-cert.pem --clientauth"
export ORDERER_CONN_ARGS="$ORDERER_PORT_ARGS --keyfile $CORE_PEER_TLS_CLIENTKEY_FILE --certfile $CORE_PEER_TLS_CLIENTCERT_FILE"
echo $ORDERER_CONN_ARGS
echo "Sending invoke transaction to $PEER_HOST ..."
echo "init orgs"
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initOrg","1","supplier1","1","67.0006, -70.5476"]}' $ORDERER_CONN_ARGS
sleep 3
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initOrg","2","supplier2","1", "67.0006, -70.5476"]}' $ORDERER_CONN_ARGS
sleep 3
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initOrg","3","farmer1","2","67.0006, -70.5476"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initOrg","4","farmer2","2","67.0006, -70.5476"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initOrg","5","factory1","3","67.0006, -70.5476"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initOrg","6","factory2","3","67.0006, -70.5476"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initOrg","7","retailer1","4","67.0006, -70.5476"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initOrg","8","retailer3","4","67.0006, -70.5476"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initOrg","9","consumer1","5","67.0006, -70.5476"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initOrg","10","consumer2","5","67.0006, -70.5476"]}' $ORDERER_CONN_ARGS
#$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initOrg","11","tree1","6","67.0006, -70.5476"]}' $ORDERER_CONN_ARGS
#$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initOrg","12","tree2","6","67.0006, -70.5476"]}' $ORDERER_CONN_ARGS
echo "init trees for farmer1 and farmer2"
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initFarmerTree","11","tree1","1000","11", "12", "1", "3", "1000"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initFarmerTree","12","tree2","1000","13", "14", "3", "4", "1000"]}' $ORDERER_CONN_ARGS
echo "init suppliermaterials"
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initSupplierMaterial","1","material1","10","1"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initSupplierMaterial","2","material2","20","1"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initSupplierMaterial","3","material3","15","2"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initSupplierMaterial","4","material4","30","2"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initSupplierMaterial","2","material5","30","1"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["initSupplierMaterial","3","material6","30","2"]}' $ORDERER_CONN_ARGS
sleep 3
echo "action sell material1 to farmer1"
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["changeOwnerMaterial","material1","3"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["changeOwnerMaterial","material2","3"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["changeOwnerMaterial","material3","4"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["changeOwnerMaterial","material4","4"]}' $ORDERER_CONN_ARGS
sleep 3 "action material to tree"
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["changeOwnerMaterial","material1","11"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["changeOwnerMaterial","material2","11"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["changeOwnerMaterial","material3","12"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["changeOwnerMaterial","material4","12"]}' $ORDERER_CONN_ARGS
sleep 3
echo "action get historyfor Materials 1, 2, 3, 4"
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["getHistoryForMaterial","material1"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["getHistoryForMaterial","material2"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["getHistoryForMaterial","material3"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["getHistoryForMaterial","material4"]}' $ORDERER_CONN_ARGS
sleep 3
#Rich Query (Only supported if CouchDB is used as state database):
echo "query Materials By Owner"
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["queryMaterialsByOwner","3"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["queryMaterialsByOwner","4"]}' $ORDERER_CONN_ARGS
sleep 3
echo "action harvest agri product"
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["harvestAgriProduct","111", "aproduct1", "11", "1000", "3"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["harvestAgriProduct","112", "aproduct1", "12", "2000", "4"]}' $ORDERER_CONN_ARGS
echo "action sell agri product for factory 1 and factory 2"
sleep 3
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["changeOwnerAgriProduct","aproduct1", "5"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["changeOwnerAgriProduct","aproduct2", "6"]}' $ORDERER_CONN_ARGS
echo "action make product from agri product"
sleep 3
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["makeProduct","111", "221", "product1", "10000", "5"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["makeProduct","112", "222", "product2", "20000", "6"]}' $ORDERER_CONN_ARGS
echo "action change to retailer"
sleep 3
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["changeOwnerProduct","product1", "7"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["changeOwnerProduct","product2", "8"]}' $ORDERER_CONN_ARGS
sleep 3
echo "action sell to customer 1 and customer 2"
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["changeOwnerProduct","product1", "9"]}' $ORDERER_CONN_ARGS
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["changeOwnerProduct","product2", "10"]}' $ORDERER_CONN_ARGS
sleep 3
echo "get history of product1"
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["getHistoryForProduct","product1"]}' $ORDERER_CONN_ARGS
echo "get history of product2"
$GOPATH/src/github.com/hyperledger/fabric/build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -v 1.0 -c '{"Args":["getHistoryForProduct","product2"]}' $ORDERER_CONN_ARGS
echo "done test"
|
#!/bin/bash
# Dependency: requires swiftformat (https://github.com/nicklockwood/SwiftFormat).
# Install via homebrew: `brew install swiftformat`
# @raycast.title Format Swift
# @raycast.author Dean Moore
# @raycast.authorURL https://github.com/moored
# @raycast.description Use [swiftformat](https://github.com/nicklockwood/SwiftFormat) to format clipboard content.
# @raycast.icon images/swift.png
# @raycast.mode silent
# @raycast.packageName Developer Utilities
# @raycast.schemaVersion 1
if ! command -v swiftformat &> /dev/null; then
echo "swiftformat command is required (https://github.com/nicklockwood/SwiftFormat).";
exit 1;
fi
pbpaste | swiftformat --output stdout | pbcopy
echo "Swift formatted"
|
// Copyright © 2019 <NAME>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package main
import (
"flag"
"net"
"os"
"time"
"github.com/banzaicloud/bank-vaults/operator/pkg/apis"
"github.com/banzaicloud/bank-vaults/operator/pkg/controller"
_ "k8s.io/client-go/plugin/pkg/client/auth/gcp"
"k8s.io/client-go/rest"
ctrl "sigs.k8s.io/controller-runtime"
"sigs.k8s.io/controller-runtime/pkg/client/config"
"sigs.k8s.io/controller-runtime/pkg/healthz"
"sigs.k8s.io/controller-runtime/pkg/log/zap"
"sigs.k8s.io/controller-runtime/pkg/manager"
)
var log = ctrl.Log.WithName("cmd")
const (
operatorNamespace = "OPERATOR_NAMESPACE"
watchNamespaceEnvVar = "WATCH_NAMESPACE"
healthProbeBindAddress = ":8080"
metricsBindAddress = ":8383"
)
func main() {
syncPeriod := flag.Duration("sync_period", 30*time.Second, "SyncPeriod determines the minimum frequency at which watched resources are reconciled")
verbose := flag.Bool("verbose", false, "enable verbose logging")
flag.Parse()
// The logger instantiated here can be changed to any logger
// implementing the logr.Logger interface. This logger will
// be propagated through the whole operator, generating
// uniform and structured logs.
ctrl.SetLogger(zap.New(zap.UseDevMode(*verbose)))
var namespace string
var err error
namespace, isSet := os.LookupEnv(operatorNamespace)
if !isSet {
namespace, isSet = os.LookupEnv(watchNamespaceEnvVar)
if !isSet {
log.Info("No watched namespace found, watching the entire cluster")
namespace = ""
}
}
log.Info("Watched namespace: " + namespace)
// Get a config to talk to the apiserver
k8sConfig, err := config.GetConfig()
if err != nil {
log.Error(err, "Unable to get k8s config")
os.Exit(1)
}
leaderElectionNamespace := ""
if !isInClusterConfig(k8sConfig) {
leaderElectionNamespace = "default"
}
// Create a new Cmd to provide shared dependencies and start components
mgr, err := manager.New(k8sConfig, manager.Options{
Namespace: namespace,
LeaderElection: true,
LeaderElectionNamespace: leaderElectionNamespace,
LeaderElectionID: "vault-operator-lock",
SyncPeriod: syncPeriod,
HealthProbeBindAddress: healthProbeBindAddress,
LivenessEndpointName: "/", // For Chart backwards compatibility
ReadinessEndpointName: "/ready", // For Chart backwards compatibility
MetricsBindAddress: metricsBindAddress,
})
if err != nil {
log.Error(err, "Unable to create manager as defined")
os.Exit(1)
}
err = mgr.AddReadyzCheck("ping", healthz.Ping)
if err != nil {
log.Error(err, "Add Readyz Check failed")
os.Exit(1)
}
err = mgr.AddHealthzCheck("ping", healthz.Ping)
if err != nil {
log.Error(err, "Unable to add heatlh check")
os.Exit(1)
}
log.Info("Registering Components.")
// Setup Scheme for all resources
if err := apis.AddToScheme(mgr.GetScheme()); err != nil {
log.Error(err, "Failed to use api to add scheme")
os.Exit(1)
}
// Setup all Controllers
if err := controller.AddToManager(mgr); err != nil {
log.Error(err, "Unable to add manager to controller")
os.Exit(1)
}
log.Info("Starting the Cmd.")
// Start the Cmd
if err := mgr.Start(ctrl.SetupSignalHandler()); err != nil {
log.Error(err, "manager exited non-zero")
os.Exit(1)
}
}
func isInClusterConfig(k8sConfig *rest.Config) bool {
host, port := os.Getenv("KUBERNETES_SERVICE_HOST"), os.Getenv("KUBERNETES_SERVICE_PORT")
return k8sConfig.Host == "https://"+net.JoinHostPort(host, port)
}
|
import re
def extract_authors_and_copyright(code_snippet):
pattern = r'#\s*(.*?)\s*,\s*(.*?)\s*'
match = re.search(pattern, code_snippet)
authors = f"{match.group(1)}, {match.group(2)}" if match else "Unknown"
year_pattern = r'\b\d{4}\b'
years = re.findall(year_pattern, code_snippet)
copyright_years = f"{years[0]}-{years[1]}" if years else "Unknown"
return f"Authors: {authors}\nCopyright: {copyright_years}"
code_snippet = """
# <NAME>, <NAME>
# orthologue
# (c) 1998-2019 all rights reserved
#
"""
print(extract_authors_and_copyright(code_snippet)) |
# Reclaim disk space, otherwise we have too little free space at the start of a job
#
# Numbers as of 2022-01-26:
#
# $ df -h
# Filesystem Size Used Avail Use% Mounted on
# /dev/root 84G 52G 32G 63% /
# devtmpfs 3.4G 0 3.4G 0% /dev
# tmpfs 3.4G 4.0K 3.4G 1% /dev/shm
# tmpfs 696M 1.1M 695M 1% /run
# tmpfs 5.0M 0 5.0M 0% /run/lock
# tmpfs 3.4G 0 3.4G 0% /sys/fs/cgroup
# /dev/loop0 62M 62M 0 100% /snap/core20/1270
# /dev/sda15 105M 5.2M 100M 5% /boot/efi
# /dev/loop1 68M 68M 0 100% /snap/lxd/21835
# /dev/loop2 44M 44M 0 100% /snap/snapd/14295
# /dev/sdb1 14G 4.1G 9.0G 32% /mnt
#
# $ docker images
# REPOSITORY TAG IMAGE ID CREATED SIZE
# node 12-alpine 8a6e486e9817 2 weeks ago 91.1MB
# node 16-alpine 23990429c0d7 2 weeks ago 109MB
# node 12 44d575d74d9f 2 weeks ago 918MB
# node 14 24d97ba03bf7 2 weeks ago 944MB
# node 14-alpine 194cd0d85d8a 2 weeks ago 118MB
# node 16 842962c4b3a7 2 weeks ago 905MB
# ubuntu 20.04 d13c942271d6 2 weeks ago 72.8MB
# ubuntu 18.04 886eca19e611 2 weeks ago 63.1MB
# buildpack-deps stretch 46000751048f 5 weeks ago 835MB
# buildpack-deps buster ac4279e940f3 5 weeks ago 804MB
# buildpack-deps bullseye d724319bd076 5 weeks ago 834MB
# debian 9 c599fc96ef79 5 weeks ago 101MB
# debian 10 8a94f77c4ac3 5 weeks ago 114MB
# debian 11 6f4986d78878 5 weeks ago 124MB
# moby/buildkit latest 19340e24de14 2 months ago 144MB
# alpine 3.12 b0925e081921 2 months ago 5.59MB
# alpine 3.13 6b7b3256dabe 2 months ago 5.62MB
# alpine 3.14 0a97eee8041e 2 months ago 5.6MB
# ubuntu 16.04 b6f507652425 4 months ago 135MB
time docker rmi node:12 node:14 node:16 buildpack-deps:stretch buildpack-deps:buster buildpack-deps:bullseye
# That is 4.07 GB
time sudo rm -rf /usr/share/dotnet
# That is 1.78 GB
time sudo rm -rf /usr/share/swift
|
prune_retrain_resnet56(){
# train
python train_test_split_main.py --dataset cifar10 \
--arch resnet \
--depth 56 \
--save prune_retrain_checkpoints/resnet56_$2 \
--seed $2 \
--wandb resnet_56_standard_train_test_split &&
# prune A
python res56prune.py \
--dataset cifar10 \
-v A \
--model prune_retrain_checkpoints/resnet56_$2/resnet_56_best.pt \
--save prune_retrain_checkpoints/resnet56_$2 &&
# onecycle
python train_test_split_finetune.py \
--refine prune_retrain_checkpoints/resnet56_$2/pruned.pth.tar \
--save prune_retrain_checkpoints/resnet56_$2 \
--dataset cifar10 \
--arch resnet \
--depth 56 \
--epochs 40 \
--use_onecycle \
--seed $2 \
--lr 0.1 \
--wandb_name resnet_56_A_onecycle_40epochs_train_test_split &&
# fine-tune
python train_test_split_finetune.py \
--refine prune_retrain_checkpoints/resnet56_$2/pruned.pth.tar \
--save prune_retrain_checkpoints/resnet56_$2 \
--dataset cifar10 \
--arch resnet \
--depth 56 \
--epochs 40 \
--seed $2 \
--wandb_name resnet_56_A_finetune_40epochs_train_test_split
# prune B
python res56prune.py \
--dataset cifar10 \
-v B \
--model prune_retrain_checkpoints/resnet56_$2/resnet_56_best.pt \
--save prune_retrain_checkpoints/resnet56_$2 &&
# onecycle
python train_test_split_finetune.py \
--refine prune_retrain_checkpoints/resnet56_$2/pruned.pth.tar \
--save prune_retrain_checkpoints/resnet56_$2 \
--dataset cifar10 \
--arch resnet \
--depth 56 \
--epochs 40 \
--use_onecycle \
--seed $2 \
--lr 0.1 \
--wandb_name resnet_56_B_onecycle_40epochs_train_test_split &&
# fine-tune
python train_test_split_finetune.py \
--refine prune_retrain_checkpoints/resnet56_$2/pruned.pth.tar \
--save prune_retrain_checkpoints/resnet56_$2 \
--dataset cifar10 \
--arch resnet \
--depth 56 \
--epochs 40 \
--seed $2 \
--wandb_name resnet_56_B_finetune_40epochs_train_test_split
}
prune_retrain_resnet56 B 1 &&
prune_retrain_resnet56 B 2 &&
prune_retrain_resnet56 B 3 &&
prune_retrain_resnet56 B 4 &&
prune_retrain_resnet56 B 5 |
def analyze_target_apps(target_apps):
analyze_android_max_version = target_apps.get('android', {}).get('max', '').isdigit()
is_extension_or_theme = target_apps.get('detected_type') in ('theme', 'extension')
is_targeting_firefoxes_only = set(target_apps.keys()).intersection(('firefox', 'android')) == {'firefox', 'android'}
is_targeting_thunderbird_or_seamonkey_only = set(target_apps.keys()).intersection(('thunderbird', 'seamonkey')) == {'thunderbird', 'seamonkey'}
return (analyze_android_max_version, is_extension_or_theme, is_targeting_firefoxes_only, is_targeting_thunderbird_or_seamonkey_only) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.