text stringlengths 1 1.05M |
|---|
<gh_stars>0
export var multi1 = [
{
"name": "usedslot",
"series": [
{
"name": "7am",
"value": 10
},
{
"name": "8am",
"value": 16
},
{
"name": "9am",
"value": 23
},
{
"name": "10am",
"value": 24
},
{
"name": "11am",
"value": 25
},
{
"name": "12pm",
"value": 23
},
{
"name": "1pm",
"value": 12
},
{
"name": "2pm",
"value": 18
},
{
"name": "3pm",
"value": 23
},
{
"name": "4pm",
"value": 21
},
{
"name": "5pm",
"value": 15
},
{
"name": "6pm",
"value": 29
},
{
"name": "7pm",
"value": 31
},
{
"name": "8pm",
"value": 18
}
]
},
{
"name": "availableslot",
"series": [
{
"name": "7am",
"value": 31
},
{
"name": "8am",
"value": 34
},
{
"name": "9am",
"value": 35
},
{
"name": "10am",
"value": 33
},
{
"name": "11am",
"value": 34
}, {
"name": "12pm",
"value": 35
},
{
"name": "1pm",
"value": 37
},
{
"name": "2pm",
"value": 38
},
{
"name": "3pm",
"value": 27
},
{
"name": "4pm",
"value": 34
},
{
"name": "5pm",
"value": 24
},
{
"name": "6pm",
"value": 36
},
{
"name": "7pm",
"value": 31
},
{
"name": "8pm",
"value": 23
}
]
}];
export var multi = [
{
"name": "7am",
"series": [
{
"name": "Available Slot",
"value": 10
},
{
"name": "Used Slot",
"value": 8
}
]
},
{
"name": "8am",
"series": [
{
"name": "Available Slot",
"value": 20
},
{
"name": "Used Slot",
"value": 17
}
]
},
{
"name": "9am",
"series": [
{
"name": "Available Slot",
"value": 8
},
{
"name": "Used Slot",
"value": 4
}
]
},
{
"name": "10am",
"series": [
{
"name": "Available Slot",
"value": 18
},
{
"name": "Used Slot",
"value": 14
}
]
},
{
"name": "11am",
"series": [
{
"name": "Available Slot",
"value": 150
},
{
"name": "Used Slot",
"value": 110
}
]
},
{
"name": "12pm",
"series": [
{
"name": "Available Slot",
"value": 130
},
{
"name": "Used Slot",
"value": 125
}
]
},
{
"name": "1pm",
"series": [
{
"name": "Available Slot",
"value": 89
},
{
"name": "Used Slot",
"value": 76
}
]
},
{
"name": "2pm",
"series": [
{
"name": "Available Slot",
"value": 68
},
{
"name": "Used Slot",
"value": 59
}
]
},
{
"name": "3pm",
"series": [
{
"name": "Available Slot",
"value": 72
},
{
"name": "Used Slot",
"value": 68
}
]
},
{
"name": "4pm",
"series": [
{
"name": "Available Slot",
"value": 88
},
{
"name": "Used Slot",
"value": 82
}
]
},
{
"name": "5pm",
"series": [
{
"name": "Available Slot",
"value": 99
},
{
"name": "Used Slot",
"value": 91
}
]
},
{
"name": "6pm",
"series": [
{
"name": "Available Slot",
"value": 18
},
{
"name": "Used Slot",
"value": 9
}
]
},
{
"name": "7pm",
"series": [
{
"name": "Available Slot",
"value": 100
},
{
"name": "Used Slot",
"value": 99
}
]
},
{
"name": "8pm",
"series": [
{
"name": "Available Slot",
"value": 80
},
{
"name": "Used Slot",
"value": 80
}
]
}
];
|
#include "tg078uw004a0.h"
extern s32 bsp_disp_get_panel_info(u32 screen_id, disp_panel_para *info);
static void lcd_power_on(u32 sel);
static void lcd_power_off(u32 sel);
static void lcd_backlight_open(u32 sel);
static void lcd_backlight_close(u32 sel);
static void lcd_panel_init(u32 sel);
static void lcd_panel_exit(u32 sel);
static u8 const mipi_dcs_pixel_format[4] = { 0x77, 0x66, 0x66, 0x55 };
#define panel_reset(val) sunxi_lcd_gpio_set_value(sel, 1, val)
#define power_en(val) sunxi_lcd_gpio_set_value(sel, 0, val)
#define _get_register(a) (*(volatile unsigned int *)(a))
#define _set_register(a, v) (*(volatile unsigned int *)(a) = (v))
static void lcd_cfg_panel_info(panel_extend_para *info)
{
u32 i = 0, j = 0;
u32 items;
/* {input value, corrected value} */
u8 lcd_gamma_tbl[][2] = {
{ 0, 0}, { 15, 15}, { 30, 30}, { 45, 45},
{ 60, 60}, { 75, 75}, { 90, 90}, {105, 105},
{120, 120}, {135, 135}, {150, 150}, {165, 165},
{180, 180}, {195, 195}, {210, 210}, {225, 225},
{240, 240}, {255, 255},
};
u32 lcd_cmap_tbl[2][3][4] = {
{
{LCD_CMAP_G0, LCD_CMAP_B1, LCD_CMAP_G2, LCD_CMAP_B3},
{LCD_CMAP_B0, LCD_CMAP_R1, LCD_CMAP_B2, LCD_CMAP_R3},
{LCD_CMAP_R0, LCD_CMAP_G1, LCD_CMAP_R2, LCD_CMAP_G3},
}, {
{LCD_CMAP_B3, LCD_CMAP_G2, LCD_CMAP_B1, LCD_CMAP_G0},
{LCD_CMAP_R3, LCD_CMAP_B2, LCD_CMAP_R1, LCD_CMAP_B0},
{LCD_CMAP_G3, LCD_CMAP_R2, LCD_CMAP_G1, LCD_CMAP_R0},
},
};
items = sizeof(lcd_gamma_tbl) / 2;
for (i = 0; i < items - 1; i++) {
u32 num = lcd_gamma_tbl[i + 1][0] - lcd_gamma_tbl[i][0];
for (j = 0; j < num; j++) {
u32 value = 0;
value = lcd_gamma_tbl[i][1] +
((lcd_gamma_tbl[i + 1][1] - lcd_gamma_tbl[i][1]) * j) / num;
info->lcd_gamma_tbl[lcd_gamma_tbl[i][0] + j] =
(value << 16) + (value << 8) + value;
}
}
info->lcd_gamma_tbl[255] =
(lcd_gamma_tbl[items - 1][1] << 16) +
(lcd_gamma_tbl[items - 1][1] << 8) +
lcd_gamma_tbl[items - 1][1];
memcpy(info->lcd_cmap_tbl, lcd_cmap_tbl, sizeof(lcd_cmap_tbl));
}
static s32 lcd_open_flow(u32 sel)
{
LCD_OPEN_FUNC(sel, lcd_power_on, 10);
LCD_OPEN_FUNC(sel, lcd_panel_init, 20);
LCD_OPEN_FUNC(sel, sunxi_lcd_tcon_enable, 50);
LCD_OPEN_FUNC(sel, lcd_backlight_open, 0);
return 0;
}
static s32 lcd_close_flow(u32 sel)
{
LCD_CLOSE_FUNC(sel, lcd_backlight_close, 0);
LCD_CLOSE_FUNC(sel, sunxi_lcd_tcon_disable, 0);
LCD_CLOSE_FUNC(sel, lcd_panel_exit, 20);
LCD_CLOSE_FUNC(sel, lcd_power_off, 50);
return 0;
}
static void lcd_power_on(u32 sel)
{
#if 0
sunxi_lcd_gpio_set_value(sel, 0, 1); /* reset = 1 */
#endif
sunxi_lcd_power_enable(sel, 0);
sunxi_lcd_delay_ms(50);
sunxi_lcd_power_enable(sel, 1);
sunxi_lcd_delay_ms(50);
sunxi_lcd_power_enable(sel, 2);
sunxi_lcd_delay_ms(10);
sunxi_lcd_pin_cfg(sel, 1);
#if 0
sunxi_lcd_gpio_set_value(sel, 0, 0); /* reset = 1 */
sunxi_lcd_delay_ms(20);
sunxi_lcd_gpio_set_value(sel, 0, 1); /* reset = 0 */
sunxi_lcd_delay_ms(150);
#endif
}
static void lcd_power_off(u32 sel)
{
sunxi_lcd_pin_cfg(sel, 0);
sunxi_lcd_delay_ms(20);
/*
sunxi_lcd_gpio_set_value(sel, 0, 0);
sunxi_lcd_delay_ms(5);
*/
sunxi_lcd_power_disable(sel, 2);
sunxi_lcd_delay_ms(5);
sunxi_lcd_power_disable(sel, 1);
sunxi_lcd_delay_ms(5);
sunxi_lcd_power_disable(sel, 0);
}
static void lcd_backlight_open(u32 sel)
{
unsigned int regval = 0;
sunxi_lcd_pwm_enable(sel);
/* PB2 set as pwm0 */
regval = _get_register(0x01c20824);
regval &= 0xfffff0ff;
regval |= 0x00000300;
_set_register(0x01c20824, regval);
/* pwm config */
_set_register(0x01c23420, 0x00000010);
_set_register(0x01c23464, 0x01df010a);
_set_register(0x01c23440, 0x00000001);
sunxi_lcd_delay_ms(50);
sunxi_lcd_backlight_enable(sel);
}
static void lcd_backlight_close(u32 sel)
{
sunxi_lcd_backlight_disable(sel);
sunxi_lcd_pwm_disable(sel);
}
static void load_config_to_tg078uw004a0(u32 sel)
{
int i;
int size = sizeof(panel_config) / sizeof(*panel_config);
for (i = 0; i < size; i++) {
if (panel_config[i].command == 0x1FF)
sunxi_lcd_delay_ms(panel_config[i].param[0]);
if (panel_config[i].para_num == 1) {
sunxi_lcd_dsi_dcs_write_1para(sel,
panel_config[i].command,
panel_config[i].param[0]);
} else if (panel_config[i].para_num > 1) {
sunxi_lcd_dsi_dcs_write(sel,
panel_config[i].command,
(unsigned char *) panel_config[i].param,
panel_config[i].para_num);
}
}
}
static void lcd_panel_init(u32 sel)
{
disp_panel_para *panel_info = kmalloc(sizeof(disp_panel_para), GFP_KERNEL | __GFP_ZERO);
bsp_disp_get_panel_info(sel, panel_info);
sunxi_lcd_dsi_clk_enable(sel);
load_config_to_tg078uw004a0(sel);
sunxi_lcd_delay_ms(50);
#if 0
sunxi_lcd_dsi_dcs_write_0para(sel, 0x11);
sunxi_lcd_delay_ms(150);
sunxi_lcd_dsi_dcs_write_0para(sel, 0x29);
sunxi_lcd_delay_ms(50);
#endif
kfree(panel_info);
return;
}
static void lcd_panel_exit(u32 sel)
{
sunxi_lcd_dsi_dcs_write_0para(sel, DSI_DCS_SET_DISPLAY_OFF);
sunxi_lcd_delay_ms(20);
sunxi_lcd_dsi_dcs_write_0para(sel, DSI_DCS_ENTER_SLEEP_MODE);
sunxi_lcd_delay_ms(80);
return;
}
/* sel: 0:lcd0; 1:lcd1 */
static s32 lcd_user_defined_func(u32 sel, u32 para1, u32 para2, u32 para3)
{
return 0;
}
/* sel: 0:lcd0; 1:lcd1 */
static s32 lcd_set_bright(u32 sel, u32 bright)
{
/* sunxi_lcd_dsi_dcs_write_1para(sel,0x51,bright); */
return 0;
}
__lcd_panel_t tg078uw004a0_panel = {
/* panel driver name, must mach the name of lcd_drv_name in
sys_config.fex */
.name = "tg078uw004a0",
.func = {
.cfg_panel_info = lcd_cfg_panel_info,
.cfg_open_flow = lcd_open_flow,
.cfg_close_flow = lcd_close_flow,
.lcd_user_defined_func = lcd_user_defined_func,
.set_bright = lcd_set_bright,
},
};
|
#!/bin/bash
FN="pd.margene.1.0.st_3.12.0.tar.gz"
URLS=(
"https://bioconductor.org/packages/3.14/data/annotation/src/contrib/pd.margene.1.0.st_3.12.0.tar.gz"
"https://bioarchive.galaxyproject.org/pd.margene.1.0.st_3.12.0.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-pd.margene.1.0.st/bioconductor-pd.margene.1.0.st_3.12.0_src_all.tar.gz"
)
MD5="f670b192e4b453f13bb7cae154b5de42"
# Use a staging area in the conda dir rather than temp dirs, both to avoid
# permission issues as well as to have things downloaded in a predictable
# manner.
STAGING=$PREFIX/share/$PKG_NAME-$PKG_VERSION-$PKG_BUILDNUM
mkdir -p $STAGING
TARBALL=$STAGING/$FN
SUCCESS=0
for URL in ${URLS[@]}; do
curl $URL > $TARBALL
[[ $? == 0 ]] || continue
# Platform-specific md5sum checks.
if [[ $(uname -s) == "Linux" ]]; then
if md5sum -c <<<"$MD5 $TARBALL"; then
SUCCESS=1
break
fi
else if [[ $(uname -s) == "Darwin" ]]; then
if [[ $(md5 $TARBALL | cut -f4 -d " ") == "$MD5" ]]; then
SUCCESS=1
break
fi
fi
fi
done
if [[ $SUCCESS != 1 ]]; then
echo "ERROR: post-link.sh was unable to download any of the following URLs with the md5sum $MD5:"
printf '%s\n' "${URLS[@]}"
exit 1
fi
# Install and clean up
R CMD INSTALL --library=$PREFIX/lib/R/library $TARBALL
rm $TARBALL
rmdir $STAGING
|
package com.java.study.zuo.vedio.basic.chapter3;
/**
* <Description>
*
* @author hushiye
* @since 2020-08-22 15:48
*/
public class MatrixPrint {
public static void print(int[][] arr) {
if (arr == null || arr.length == 0) {
return;
}
//左上点坐标
int lR = 0;
int lC = 0;
//右下点坐标
int rR = arr.length - 1;
int rC = arr[0].length - 1;
while (lR <= rR && lC <= rR) {
print(arr, lR++, lC++, rR--, rC--);
}
}
/**
* @param arr
* @param lR 左上点行号
* @param lC 左上点列号
* @param rR 右上点行号
* @param rC 右上点列号
*/
private static void print(int[][] arr, int lR, int lC, int rR, int rC) {
//一行 从前向后打印
if (lR == rR) {
for (int i = lC; i <= rC; i++) {
System.out.print(arr[lR][i] + " ");
}
System.out.println();
} else if (lC == rC) { //一列,从上往下打印
for (int i = lR; i <= rR; i++) {
System.out.printf(arr[lR][lC] + " ");
}
System.out.println();
} else {
for (int i = lC; i < rC; i++) {
System.out.printf(arr[lR][i] + " ");
}
System.out.println();
for (int i = lR; i < rR; i++) {
System.out.printf(arr[i][rC] + " ");
}
System.out.println();
for (int i = rC; i > lC; i--) {
System.out.printf(arr[rR][i] + " ");
}
System.out.println();
for (int i = rR; i > lR; i--) {
System.out.printf(arr[i][lC]+" ");
}
System.out.println();
}
}
public static void main(String[] args) {
int[][] matrix = { { 1, 2, 3, 4 },
{ 5, 6, 7, 8 },
{ 9, 10, 11, 12 },
{ 13, 14, 15, 16 } };
print(matrix);
}
}
|
const constant = require("./constant.js");
const storage = require("./storage.js");
function isPhone(phone) {
if (!(/^1(3|4|5|7|8)\d{9}$/.test(phone))) {
return false;
}
return true;
}
function showSuccessToast(config) {
wx.showToast({
title: config.title,
icon: 'success',
mask: true,
duration: constant.duration,
success: config.success
});
}
function showFailToast(config) {
wx.showToast({
title: config.title,
image: '/image/info.png',
mask: true,
duration: constant.duration,
success: config.success
});
}
module.exports = {
isPhone: isPhone,
showSuccessToast: showSuccessToast,
showFailToast: showFailToast
}; |
var classarmnn_1_1profiling_1_1_profiling_state_machine =
[
[ "ProfilingStateMachine", "classarmnn_1_1profiling_1_1_profiling_state_machine.xhtml#a419c19ff2c798aab55b0789e051517d7", null ],
[ "ProfilingStateMachine", "classarmnn_1_1profiling_1_1_profiling_state_machine.xhtml#aa89f70de19b6fb3a6ed4cea36890d9ab", null ],
[ "GetCurrentState", "classarmnn_1_1profiling_1_1_profiling_state_machine.xhtml#ad8d263669817cf577760710ff6c96bb9", null ],
[ "IsOneOfStates", "classarmnn_1_1profiling_1_1_profiling_state_machine.xhtml#a4522fa83798319a209209f4d4ae34fd3", null ],
[ "IsOneOfStates", "classarmnn_1_1profiling_1_1_profiling_state_machine.xhtml#a64c592cf41e009dd59cded3a7169de73", null ],
[ "Reset", "classarmnn_1_1profiling_1_1_profiling_state_machine.xhtml#a372de693ad40b3f42839c8ec6ac845f4", null ],
[ "TransitionToState", "classarmnn_1_1profiling_1_1_profiling_state_machine.xhtml#a175365c95c51fb3dad223ce3f48a3ffc", null ]
]; |
#!/bin/bash
if [ "$(id -u)" != "0" ]; then
echo "Please run as root" 1>&2
exit 1
fi
progname=$(basename $0)
function usage()
{
cat << HEREDOC
Usage:
Mount Image : $progname [--mount] [--image-name <path to qcow2 image>] [--mount-point <mount point>]
Umount Image: $progname [--umount] [--mount-point <mount point>]
Cleanup NBD : $progname [--cleanup]
arguments:
-h, --help show this help message and exit
-c, --cleanup cleanup orphaned device mappings
-m, --mount mount image
-u, --umount umount image
-i, --image-name path to qcow2 image
-p, --mount-point mount point for image
This tool will use /dev/nbd1 as default for mounting an image. If you want to use another device, execute like this:
NBD_DEV=/dev/nbd2 ./$progname --mount --image <your image> --mount-point <your path>
HEREDOC
}
MOUNT=0
UMOUNT=0
IMAGE=""
MOUNTPOINT=""
nbd_cleanup() {
DEVS="$(lsblk | grep nbd | grep disk | cut -d" " -f1)"
if [ ! -z "${DEVS}" ]; then
for d in $DEVS; do
if [ ! -z "${d}" ]; then
QDEV="$(ps xa | grep $d | grep -v grep)"
if [ -z "${QDEV}" ]; then
kpartx -d /dev/$d && echo "Unconnected device map removed: /dev/$d"
fi
fi
done
fi
}
# As long as there is at least one more argument, keep looping
while [[ $# -gt 0 ]]; do
key="$1"
case "$key" in
-h|--help)
usage
exit
;;
-c|--cleanup)
nbd_cleanup
;;
-m|--mount)
MOUNT=1
;;
-u|--umount)
UMOUNT=1
;;
-i|--image-name)
shift
IMAGE="$1"
;;
-p|--mount-point)
shift
MOUNTPOINT="$1"
;;
*)
echo "Unknown option '$key'"
usage
exit
;;
esac
# Shift after checking all the cases to get the next option
shift
done
if [ "${MOUNT}" = "1" ] && [ "${UMOUNT}" = "1" ]; then
usage
echo "Concurrent mount options not possible."
exit
fi
if [ "${MOUNT}" = "1" ] && ([ -z "${IMAGE}" ] || [ -z "${MOUNTPOINT}" ]); then
usage
echo "Can not mount image. Image path and/or mount point missing."
exit
fi
if [ "${UMOUNT}" = "1" ] && [ -z "${MOUNTPOINT}" ]; then
usage
echo "Can not umount. Mount point parameter missing."
exit
fi
export NBD_DEV="${NBD_DEV:-/dev/nbd1}"
export MAP_BOOT_DEV=/dev/mapper/nbd1p1
export MAP_ROOT_DEV=/dev/mapper/nbd1p2
source scripts/qcow2_handling
if [ "${MOUNT}" = "1" ]; then
mount_qimage "${IMAGE}" "${MOUNTPOINT}"
elif [ "${UMOUNT}" = "1" ]; then
umount_qimage "${MOUNTPOINT}"
fi
|
def findSubset(arr, target):
# create a 2D matrix
T = [[False for x in range(target + 1)] for x in range(len(arr) + 1)]
# if target is 0, return true
for i in range(len(arr) + 1):
T[i][0] = True
# if target is not 0 and set is empty
# return false
for i in range(1, target + 1):
T[0][i] = False
# fill the subset table in botton up manner
for i in range(1, len(arr) + 1):
for j in range(1, target + 1):
if j < arr[i-1]:
T[i][j] = T[i-1][j]
if j >= arr[i-1]:
T[i][j] = T[i-1][j] or T[i - 1][j-arr[i-1]]
return (T[len(arr)][target]) |
SELECT name, count
FROM Fruits
ORDER BY count DESC
LIMIT 10; |
#!/bin/bash
echo Which file do you want to post?
read -e -p "Post:" file
file="${file}.Rmd"
post=${file//.Rmd}
markdown="${post}.md"
header="${post}-header.md"
echo Do you want to add a header? Enter y or n
read header_y_n
if [ $header_y_n == "y" ]
then
cp $header temp_post.md
cat $markdown >> temp_post.md
mv temp_post.md $markdown
fi
perl -pi -e 's/..\/img\/blog_images\///g' $markdown
perl -pi -e 's/!\[]\(/!\[]\(\{\{site_url\}\}\/img\/blog_images\//g' $markdown
post_files="${post}_files"
date=`date +%Y-%m-%d`
markdown_for_post="${date}-${markdown}"
wd=$PWD
cd ../_posts/
if [ -f *${markdown} ]
then
markdown_for_post=*${markdown}
fi
cd $wd
markdown_output="../_posts/${markdown_for_post}"
cp $markdown $markdown_output
cp -r $post_files ../img/blog_images
if diff $markdown_output $markdown >/dev/null; then
echo "Your post was succesfully moved";
else
echo "There was an error in moving your post";
fi
|
import random
def random_num(start, stop):
return random.randint(start, stop)
# Example
num = random_num(0, 10)
print(num) |
#!/bin/bash
echo "=============== MINIBOT GUI SETUP ================"
pip install -r requirements.txt
cd gui
npm install
echo "================= SETUP COMPLETE ================="
|
package com.wgu.setcard.ump.service.impl;
import java.util.Objects;
import java.util.Optional;
import com.google.common.collect.ImmutableMap;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.wgu.setcard.ump.model.User;
import com.wgu.setcard.ump.repository.spec.IUserRepository;
import com.wgu.setcard.ump.service.spec.ITokenService;
import com.wgu.setcard.ump.service.spec.IUserAuthenticationService;
@Service
public class UserAuthenticationServiceImpl implements IUserAuthenticationService {
/* DEFINITIONS **************************************************/
private static final String USERNAME_KEY = "username";
/* MEMBERS DECLARATIONS *****************************************/
@Autowired
private ITokenService tokenService;
@Autowired
private IUserRepository userRepository;
/* CLASS CONSTRUCTORS *******************************************/
/* METHODS IMPLEMENTATIONS **************************************/
@Override
public Optional<String> login(final String username, final String password) {
return
userRepository
.findByUsername(username)
.filter(user -> Objects.equals(password, user.getPassword()))
.map(user -> tokenService.expiring(ImmutableMap.of(USERNAME_KEY, username)));
}
@Override
public Optional<User> findByToken(final String token) {
return
Optional.of(tokenService.verify(token))
.map(map -> map.get(USERNAME_KEY))
.flatMap(userRepository::findByUsername);
}
@Override
public void logout(final User user) {
}
}
|
<reponame>MccreeFei/jframe
package jframe.pay.wx.http.util;
import java.util.Random;
import jframe.pay.wx.http.AccessTokenRequestHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class WxUtil {
static Logger LOG = LoggerFactory.getLogger(WxUtil.class);
public static String getNonceStr() {
Random random = new Random();
return MD5Util.MD5Encode(String.valueOf(random.nextInt(10000)), "GBK");
}
public static String getTimeStamp() {
return String.valueOf(System.currentTimeMillis() / 1000);
}
/**
* 生成微信订单号
*
* @return
*/
public static String genTradeNo() {
// ---------------生成订单号 开始------------------------
// 当前时间 yyyyMMddHHmmss
String currTime = TenpayUtil.getCurrTime();
// 8位日期
String strTime = currTime.substring(8, currTime.length());
// 四位随机数
String strRandom = TenpayUtil.buildRandom(4) + "";
// 10位序列号,可以自行调整。
// 订单号,此处用时间加随机数生成,商户根据自己情况调整,只要保持全局唯一就行
return strTime + strRandom;
// ---------------生成订单号 结束------------------------
}
public static String getToken() {
/*if (mem == null) {
LOG.error("Memcached service is null");
// TODO
return null;
}
// 获取token值,这样效率更好,TODO 但有一定风险,需要测试
Object token = mem.get(ServiceConstants.Key_Wx_Access_Token);*/
Object token = null;
if (token == null) {
token = AccessTokenRequestHandler.getTokenReal();
// less than 7200(default value)
/*mem.set(ServiceConstants.Key_Wx_Access_Token, token, new Date(
6600 * 1000));*/
}
return token.toString();
}
}
|
require "rails/generators/active_record"
module PgAuditLog
module Generators
class InstallGenerator < ::ActiveRecord::Generators::Base
# ActiveRecord::Generators::Base inherits from Rails::Generators::NamedBase which requires a NAME parameter for the
# new table name. Our generator doesn't require a name, so we just set a random name here.
argument :name, type: :string, default: "random_name"
source_root File.expand_path('../templates', __FILE__)
def install
directory "lib/tasks"
migration_template "migration.rb", "db/migrate/install_pg_audit_log.rb"
end
end
end
end
|
<filename>QUBEKit/tests/ligand_tests.py
from QUBEKit.ligand import Ligand
import unittest
class TestLigands(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.molecule = Ligand('tests/test_files/acetone.pdb')
def test_pdb_reader(self):
# Check all atoms are found
self.assertEqual(10, len(self.molecule.molecule))
# Check atom names and coords are extracted for each atom in the molecule
for atom in self.molecule.molecule:
self.assertEqual(4, len(atom))
if __name__ == '__main__':
unittest.main()
|
<gh_stars>0
import React, { useState, useRef, useEffect } from "react";
import { useNavigate, useParams } from "react-router-dom";
import avatar from "../assets/avatars/sample-6.png";
import { Document, Page, pdfjs } from "react-pdf/dist/esm/entry.webpack5";
import Button from "../components/Button";
import ButtonTransparent from "../components/ButtonTransparent";
import { isDesktop, isMobile } from "react-device-detect";
import { storage, db } from "../firebase/server";
import { doc, getDoc } from "firebase/firestore";
import { ref, getDownloadURL } from "firebase/storage";
import {
DownloadIcon,
ShareIcon,
ArrowLeftIcon,
XIcon,
ArrowsExpandIcon,
LinkIcon,
} from "@heroicons/react/outline";
import { BadgeCheckIcon } from "@heroicons/react/solid";
pdfjs.GlobalWorkerOptions.workerSrc = `//cdnjs.cloudflare.com/ajax/libs/pdf.js/${pdfjs.version}/pdf.worker.js`;
export default function Viewer() {
const nav = useNavigate();
const { key } = useParams();
const [fileData, setFileData] = useState({});
const renderSection = useRef();
const [fullScreen, setFullScreen] = useState(false);
const [nowSize, setNowSize] = useState(0);
const [renderSize, setRenderSize] = useState(0);
const [pageNumber, setPageNumber] = useState(0);
const [share, setShare] = useState(false);
const [downloading, setDownloading] = useState(false);
useEffect(() => {
if (isMobile) {
setRenderSize(renderSection.current.offsetWidth + 60);
} else setRenderSize(renderSection.current.offsetWidth - 120);
const onResize = () => {
let time;
clearTimeout(time);
time = setTimeout(() => {
if (isDesktop) {
setRenderSize(renderSection.current.offsetWidth);
}
}, 100);
};
const getFile = async () => {
const docRef = doc(db, "publishings", key);
const docSnap = await getDoc(docRef);
let course;
if (docSnap.exists()) {
const bookData = docSnap.data();
const authSnap = await getDoc(bookData.author);
if (bookData.course) {
const getCourse = await getDoc(bookData.course);
if (getCourse.exists()) {
course = getCourse.data();
}
}
if (authSnap.exists()) {
const authorData = authSnap.data();
const month = [
"Gennaio",
"Febbraio",
"Marzo",
"Aprile",
"Maggio",
"Giugno",
"Luglio",
"Agosto",
"Settembre",
"Ottobre",
"Novembre",
"Dicembre",
];
const pub = new Date(bookData.publishDate.toDate());
const last = new Date(bookData.lastEdit.toDate());
getDownloadURL(
ref(storage, `users/${bookData.authorId}/files/${bookData.file}`)
)
.then((url) => {
// `url` is the download URL for 'images/stars.jpg'
setFileData((f) => ({
...f,
title: bookData.title,
publish:
pub.getDay() +
" " +
month[pub.getMonth()] +
" " +
pub.getFullYear(),
pages: bookData.pages,
lastEdit:
last.getDay() +
" " +
month[last.getMonth()] +
" " +
last.getFullYear(),
file: url,
authorName: authorData.fname + " " + authorData.lname,
authorUser: authorData.user,
authorAvatar: authorData.avatar,
authorVerified: authorData.verified,
course: course ? course.name : false,
}));
})
.catch((error) => {
console.warn(error);
});
} else {
// doc.data() will be undefined in this case
console.log("No such document!");
}
} else {
console.log("No such document!");
nav("/home");
}
};
getFile();
window.addEventListener("resize", onResize);
}, [renderSection, key, nav]);
const downloadFile = () => {
console.log("Downloading...");
setDownloading(true);
setTimeout(() => {
const xhr = new XMLHttpRequest();
xhr.responseType = "blob";
xhr.onload = (event) => {
const blob = new Blob([xhr.response], { type: "image/pdf" });
let a = document.createElement("a");
a.style = "display: none";
document.body.appendChild(a);
let url = window.URL.createObjectURL(blob);
a.href = url;
a.download =
fileData.title +
" - @" +
fileData.authorUser +
" (from CappCamp).pdf";
a.click();
window.URL.revokeObjectURL(url);
};
xhr.open("GET", fileData.file);
xhr.send();
setDownloading(false);
}, 1500);
};
return (
<div className="w-screen h-4/5 md:overflow-hidden flex-1 flex flex-col md:flex-row md:px-10 gap-5 md:gap-32">
<div className="flex flex-col-reverse md:flex-col px-5 py-2 md:py-10">
<div className="flex-1 flex-col">
{fileData.course ? <Tag text={fileData.course} /> : null}
<h1 className="text-2xl font-semibold pt-3 pb-5 text-slate-900 md:w-72">
{fileData.title || "Document title."}
</h1>
<div className="flex flex-col gap-8">
<div className="flex justify-around md:justify-start flex-row items-center md:items-start md:flex-col gap-5 md:gap-3 ring-2 ring-slate-100 md:ring-0 p-2 md:p-0 rounded-lg">
<div className="flex flex-col items-center md:items-start text-slate-800">
<span className="uppercase text-sm font-medium opacity-50">
pages
</span>
<span className="flex items-center font-Def font-light -mt-0.5">
<span>{pageNumber || fileData.pages}</span>
{/* <span className="pl-1 opacity-50">/ 35</span> */}
</span>
</div>
<div className="flex flex-col items-center md:items-start text-slate-800">
<span className="uppercase text-sm font-medium opacity-50">
last updated
</span>
<span className="flex items-center font-Def font-light -mt-0.5">
<span>{fileData.lastEdit}</span>
</span>
</div>
{isDesktop ? (
<div className="flex flex-col items-center md:items-start text-slate-800">
<span className="uppercase text-sm font-medium opacity-50">
by
</span>
<span
className="flex items-center font-Def font-light -mt-0.5 cursor-pointer w-min"
onClick={() => {
setFullScreen(true);
setNowSize(renderSize);
}}
>
<img src={avatar} alt="Avatar" className="w-5 h-5" />
<span className="flex items-center ml-2 whitespace-nowrap">
<span>{fileData.authorName}</span>
{fileData.authorVerified && (
<BadgeCheckIcon className="w-4 h-4 ml-2 text-blue-600" />
)}
</span>
</span>
</div>
) : null}
</div>
<div className="flex items-center gap-2 justify-start">
<Button
text="Download"
icon={DownloadIcon}
dark
action={() => downloadFile()}
loading={downloading}
/>
<div className="relative">
<Button
text="Share"
icon={ShareIcon}
action={() => {
setShare(!share);
}}
/>
{share && (
<Share
link={
"https://cappcamp-beta.netlify.app/dashboard/viewer/" +
key
}
setShare={setShare}
/>
)}
</div>
{isMobile && (
<Button
text="Immersivo"
icon={ArrowsExpandIcon}
action={() => {
setNowSize(renderSize);
setFullScreen(true);
}}
/>
)}
</div>
</div>
</div>
{/* BOTTOM BUTTONS */}
<div className="pb-5 md:pb-0">
<ButtonTransparent
text="<NAME>"
icon={ArrowLeftIcon}
link="/dashboard"
/>
</div>
</div>
{/* RENDER FILE */}
<div
// className="flex flex-1 justify-end w-full md:w-3/4"
className={
fullScreen
? "fixed flex flex-1 top-0 left-0 w-screen h-screen bg-slate-900 bg-opacity-40 transition"
: "flex flex-1 justify-end w-full md:w-3/4 transition"
}
ref={renderSection}
>
{fullScreen && (
<div className="fixed top-0 left-0 w-screen flex items-center justify-end p-3 z-20">
<Button
text={isDesktop ? "Close" : null}
icon={XIcon}
dark
action={() => {
setFullScreen(false);
}}
/>
</div>
)}
<RenderFile
file={fileData.file}
size={fullScreen ? nowSize : renderSize}
setPageN={setPageNumber}
reference={renderSection}
/>
<div className="flex fixed bottom-8 right-10 z-20">
{!fullScreen && !isMobile && (
<Button
text="Fullscreen"
icon={ArrowsExpandIcon}
hoverOpacity
action={() => {
setNowSize(renderSize);
setFullScreen(true);
}}
/>
)}
</div>
</div>
</div>
);
}
// fullScreen
// ? renderSize > 900
// ? (renderSize * 75) / 100
// : renderSize < 500
// ? (renderSize * 100) / 90
// : renderSize
// : renderSize
const RenderFile = (props) => {
const [pages, setPages] = useState(0);
const [loading, setLoading] = useState(5);
const [totalPages, setTotalPages] = useState(0);
function handleSuccess({ numPages }) {
if (numPages <= 5) setPages(numPages);
else setPages(5);
props.setPageN(numPages);
setTotalPages(numPages);
}
const handleLoading = (e) => {
setLoading(0);
// console.log("Full: " + e.target.scrollHeight);
// console.log("Top: " + e.target.scrollTop);
if (e.target.scrollHeight - 800 < e.target.scrollTop) {
// console.error("Maximum reached");
if (pages + 5 > totalPages) {
setPages(totalPages);
} else setPages(pages + 5);
}
};
return (
<div
ref={props.reference}
className={`flex-1 overflow-y-auto`}
// onScroll={(e) => console.log(e)}
onScrollCapture={(e) => {
handleLoading(e);
}}
>
<div className={"relative overflow-y-auto md:px-5"}>
<Document
file={props.file}
loading={<p>Loading PDF</p>}
onLoadSuccess={handleSuccess}
className="flex flex-col gap-10 items-center"
// renderMode="svg"
>
{Array.apply(null, Array(pages))
.map((x, i) => i + 1)
.map((page) => (
<Page
pageNumber={page}
loading={<p>Loading page...</p>}
onRenderSuccess={() => {
setLoading(loading + 1);
console.log("rendered");
}}
renderAnnotationLayer={false}
renderTextLayer={false}
width={props.size - 100 || 200}
className="w-min shadow-xl shadow-slate-300 first:mt-10 last:mb-10"
key={page}
/>
))}
</Document>
</div>
</div>
);
};
const Tag = (props) => {
const nav = useNavigate();
return (
<div
className={`flex items-center py-2 px-3 bg-slate-100 text-slate-900 rounded-lg w-max max-w-xs ${
props.link || props.action ? "cursor-pointer" : null
}`}
onClick={props.link ? () => nav(props.link) : null}
>
<span className="uppercase text-xs font-medium">
{props.text || "tag"}
</span>
</div>
);
};
const Share = (props) => {
const handleCopy = () => {
window.navigator.clipboard.writeText(props.link).then(() => {
props.setShare(false);
console.log("Copied to clipboard");
});
};
return (
<div className="absolute z-30 bg-gray-50 top-10 w-60 min-w-min p-3 rounded-lg">
<div>
<h1 className="text-md font-semibold">
Share this note with your friends.
</h1>
<div className="flex items-center gap-2 pt-2">
<input
type="text"
value={props.link || "Sorry, we can't generate a link"}
className="py-1 px-2 text-sm rounded-md bg-gray-100 text-slate-900 outline-none focus:ring-2 ring-blue-500 transition"
readOnly
/>
<Button icon={LinkIcon} action={() => handleCopy()} />
</div>
</div>
</div>
);
};
|
function randomPassword(length) {
let chars = "abcdefghijklmnopqrstuvwxyz!@#$%^&*()-+<>ABCDEFGHIJKLMNOP1234567890";
let pass = "";
for (let x = 0; x < length; x++) {
let i = Math.floor(Math.random() * chars.length);
pass += chars.charAt(i);
}
return pass;
}
console.log(randomPassword(5)); |
<reponame>guokaia/HJ212-Moniter<filename>src/main/java/cn/zqgx/moniter/center/server/portal/mapper/MoniterErrorMapping.java<gh_stars>1-10
package cn.zqgx.moniter.center.server.portal.mapper;
import cn.zqgx.moniter.center.server.portal.bean.po.MoniterErrorPo;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.springframework.stereotype.Repository;
@Repository
public interface MoniterErrorMapping extends BaseMapper<MoniterErrorPo> {}
|
function typeCheck(obj) {
for (let key in obj) {
if (typeof obj[key] !== typeof obj[key]) {
return false;
}
}
return true;
} |
const adjectives = ["big","small","tall","dark","light","fast","slow","powerful","weak","happy","sad"];
const nouns = ["cat","dog","monkey","elephant","pigeon","sea lion","dolphin","lizard","whale","dragon"];
let randomAdjective = adjectives[Math.floor(Math.random() * adjectives.length)];
let randomNoun = nouns[Math.floor(Math.random() * nouns.length)];
console.log(`The ${randomAdjective} ${randomNoun} ran away.`); |
#!/usr/bin/env bash
GUNICORN_WORKERS=${GUNICORN_WORKERS:-"5"}
GUNICORN_WORKER_CLASS=${GUNICORN_WORKER_CLASS:-"gevent"}
GUNICORN_WORKER_CONNECTIONS=${GUNICORN_WORKER_CONNECTIONS:-"2000"}
GUNICORN_BACKLOG=${GUNICORN_BACKLOG:-"1000"}
# Needed to allow utf8 use in the Monasca API
export PYTHONIOENCODING=utf-8
gunicorn --capture-output \
-n dyservice \
--worker-class="$GUNICORN_WORKER_CLASS" \
--worker-connections="$GUNICORN_WORKER_CONNECTIONS" \
--backlog=$GUNICORN_BACKLOG \
--paste /etc/dyservice/api-config.ini \
-w "$GUNICORN_WORKERS"
|
#!/bin/bash
cd /home/ram16/epaxos/src/server
IP=`ip addr show eth0 | grep 'inet ' | cut -d ' ' -f 8`
CMD="go run server.go -maddr $1 -addr $IP -e -exec -dreply -app $2"
echo $CMD
$CMD
# To Kill Servers using pssh
# run pgrep -l server to see which processes show up (make sure they are correct)
# run pkill server
|
#!/bin/bash
echo " Entre com numero"
read count
if [ $count -eq 100 ]
then
echo " conta e 100"
elif [ $count -gt 100 ]
fi
|
package org.terracottamc.network.packet.type;
/**
* Copyright (c) 2021, TerracottaMC
* All rights reserved.
*
* <p>
* This project is licensed under the BSD 3-Clause License which
* can be found in the root directory of this source tree
*
* @author Kaooot
* @version 1.0
*/
public class ResourcePackEntry {
private final String uuid;
private final String version;
/**
* Creates a new {@link org.terracottamc.network.packet.type.ResourcePackEntry} with given uniqueId and version
*
* @param uuid that is the uniqueId of this {@link org.terracottamc.network.packet.type.ResourcePackEntry}
* @param version that is the version of this {@link org.terracottamc.network.packet.type.ResourcePackEntry}
*/
public ResourcePackEntry(final String uuid, final String version) {
this.uuid = uuid;
this.version = version;
}
/**
* Retrieves the {@link java.util.UUID} of this {@link org.terracottamc.network.packet.type.ResourcePackEntry}
*
* @return a fresh {@link java.lang.String}
*/
public String getUuid() {
return this.uuid;
}
/**
* Returns the version of this {@link org.terracottamc.network.packet.type.ResourcePackEntry}
*
* @return a fresh {@link java.lang.String}
*/
public String getVersion() {
return this.version;
}
} |
<filename>src/test/scala/com/tzavellas/coeus/validation/vspec/constraint/CreditCardConstraintTest.scala
/* - Coeus web framework -------------------------
*
* Licensed under the Apache License, Version 2.0.
*
* Ported from Apache Jakarta Commons Validator,
* http://commons.apache.org/validator/
*
* Author: <NAME>
*/
package com.tzavellas.coeus.validation.vspec.constraint
import org.junit.Test
import org.junit.Assert._
class CreditCardConstraintTest {
import CreditCardConstraintTest._
import ConstraintAssertions._
val ccc = new CreditCardConstraint
@Test
def test_using_invalid_credit_cards() {
assertInvalid(ccc, "", "123456789012", "12345678901234567890", "4417123456789112", "4417q23456w89113")
}
@Test
def test_using_valid_credit_cards() {
assertValid(ccc, null, VALID_VISA, VALID_SHORT_VISA, VALID_SHORT_VISA,
VALID_MASTERCARD, VALID_DISCOVER, VALID_DINERS)
}
@Test
def change_allowed_card_types() {
val dinersOnly = new CreditCardConstraint(CreditCardType.DinersClub)
assertValid(dinersOnly, VALID_DINERS)
// Turned off all other card types so even valid numbers should fail
assertInvalid(dinersOnly, VALID_VISA, VALID_AMEX, VALID_MASTERCARD, VALID_DISCOVER)
}
@Test
def skip_cart_type_validation() {
val noTypeValidation = new CreditCardConstraint(CreditCardType.SkipCartTypeValidation)
assertValid(noTypeValidation, VALID_AMEX)
}
}
private object CreditCardConstraintTest {
val VALID_VISA = "4417123456789113"
val VALID_SHORT_VISA = "4222222222222"
val VALID_AMEX = "378282246310005"
val VALID_MASTERCARD = "5105105105105100"
val VALID_DISCOVER = "6011000990139424"
val VALID_DINERS = "30569309025904"
}
|
//
// EEPROM` в FLASH для сохранения настроек (pvvx Ver 1.0)
// Циклически использует область в 1024 байт (страницу) во Flash MCU
// для сохранения блока конфигурации
//
#ifndef __RW_FLASH_INI_H
#define __RW_FLASH_INI_H
#include "stm32f10x.h"
#include "stm32f10x_flash.h"
// Medium-density devices are STM32F101xx, STM32F102xx and STM32F103xx microcontrollers
// where the Flash memory density ranges between 64 and 128 Kbytes.
#define FLASH_SIZE 65536ul // размер Flash
#define FLASH_PAGE_SIZE 1024ul // размер страницы Flash для FLASH_ErasePage()
#define FLASH_STORE_STR_ADR FLASH_BASE + FLASH_SIZE - FLASH_PAGE_SIZE // стартовый адрес блока сохранения настроек в Flash (64k - страница)
#define FLASH_STORE_END_ADR FLASH_BASE + FLASH_SIZE - 1 // конечный адрес блока сохранения настроек в Flash (конецный адрес Flash)
void WriteIniBlk(void * blk, uint16_t blk_size); // запись блока настроек во Flash
int ReadIniBlk(void * blk, uint16_t blk_size); // чтение блока настроек из Flash
#endif //__RW_FLASH_INI_H
|
<filename>core/repl.go<gh_stars>1000+
package core
import (
"bufio"
"errors"
"fmt"
"os"
"github.com/elsaland/quickjs"
)
// Repl implementation
func Repl() {
stringToEval := ""
fmt.Println("Elsa REPL")
fmt.Println("exit using ctrl+c or close()")
for true {
fmt.Print("> ")
reader := bufio.NewReader(os.Stdin)
text, _ := reader.ReadString('\n')
fmt.Println(Eval(text, &stringToEval))
stringToEval += ";undefined;"
}
}
// Eval js from string
func Eval(text string, buffer *string) string {
// repl close function
closeEval := func(ctx *quickjs.Context, this quickjs.Value, args []quickjs.Value) quickjs.Value {
os.Exit(1)
return ctx.Null()
}
evalRuntime := quickjs.NewRuntime()
defer evalRuntime.Free()
evalContext := evalRuntime.NewContext()
defer evalContext.Free()
//TODO(buttercubz) set globals functions
globalsEval := evalContext.Globals()
globalsEval.Set("close", evalContext.Function(closeEval))
result, err := evalContext.Eval(*buffer + text)
saveBuffer := check(err)
if saveBuffer {
*buffer += fmt.Sprintf(";undefined; %s", text)
}
defer result.Free()
return result.String()
}
// check errors without exit
func check(err error) bool {
if err != nil {
var evalErr *quickjs.Error
if errors.As(err, &evalErr) {
fmt.Println(evalErr.Cause)
fmt.Println(evalErr.Stack)
return false
}
}
return true
}
|
<filename>gulpfile.js<gh_stars>0
'use strict';
var gulp = require('gulp'),
uglify = require('gulp-uglify'),
minifycss = require('gulp-minify-css'),
minifyhtml = require('gulp-minify-html'),
nodemon = require('gulp-nodemon');
gulp.task('minifyhtml', function () {
return gulp.src('src/dualrtc.html')
.pipe(minifyhtml())
.pipe(gulp.dest('dist'));
});
gulp.task('uglify', function () {
return gulp.src('src/dualrtc/*.js')
.pipe(uglify())
.pipe(gulp.dest('dist/js'));
});
gulp.task('minifycss', function () {
return gulp.src('src/css/style.css')
.pipe(minifycss())
.pipe(gulp.dest('dist/css'));
});
gulp.task('nodemon', ['uglify', 'minifycss', 'minifyhtml'], function (cb) {
var started = false;
return nodemon({
script: 'app.js'
}).on('start', function () {
// to avoid nodemon being started multiple times
if (!started) {
cb();
started = true;
}
});
});
gulp.task('watch', function () {
gulp.watch('src/dualrtc.html', ['minifyhtml']);
gulp.watch('src/dualrtc/*.js', ['uglify']);
gulp.watch('src/css/*.css', ['minifycss']);
});
gulp.task('default', ['nodemon', 'watch']); |
<gh_stars>1-10
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: complexType.proto
package com.riferrei.kafka.connect.pulsar;
public final class ProtoBufGenComplexType {
private ProtoBufGenComplexType() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(com.google.protobuf.ExtensionRegistryLite) registry);
}
public interface ProtoBufComplexTypeOrBuilder extends
// @@protoc_insertion_point(interface_extends:com.riferrei.kafka.connect.pulsar.ProtoBufComplexType)
com.google.protobuf.MessageOrBuilder {
/**
* <code>string stringField = 1;</code>
*/
java.lang.String getStringField();
/**
* <code>string stringField = 1;</code>
*/
com.google.protobuf.ByteString
getStringFieldBytes();
/**
* <code>bool booleanField = 2;</code>
*/
boolean getBooleanField();
/**
* <code>bytes bytesField = 3;</code>
*/
com.google.protobuf.ByteString getBytesField();
/**
* <code>int32 intField = 4;</code>
*/
int getIntField();
/**
* <code>int64 longField = 5;</code>
*/
long getLongField();
/**
* <code>float floatField = 6;</code>
*/
float getFloatField();
/**
* <code>double doubleField = 7;</code>
*/
double getDoubleField();
/**
* <code>map<string, double> mapField = 8;</code>
*/
int getMapFieldCount();
/**
* <code>map<string, double> mapField = 8;</code>
*/
boolean containsMapField(
java.lang.String key);
/**
* Use {@link #getMapFieldMap()} instead.
*/
@java.lang.Deprecated
java.util.Map<java.lang.String, java.lang.Double>
getMapField();
/**
* <code>map<string, double> mapField = 8;</code>
*/
java.util.Map<java.lang.String, java.lang.Double>
getMapFieldMap();
/**
* <code>map<string, double> mapField = 8;</code>
*/
double getMapFieldOrDefault(
java.lang.String key,
double defaultValue);
/**
* <code>map<string, double> mapField = 8;</code>
*/
double getMapFieldOrThrow(
java.lang.String key);
/**
* <code>.com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType innerField = 9;</code>
*/
boolean hasInnerField();
/**
* <code>.com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType innerField = 9;</code>
*/
com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType getInnerField();
/**
* <code>.com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType innerField = 9;</code>
*/
com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerTypeOrBuilder getInnerFieldOrBuilder();
}
/**
* Protobuf type {@code com.riferrei.kafka.connect.pulsar.ProtoBufComplexType}
*/
public static final class ProtoBufComplexType extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:com.riferrei.kafka.connect.pulsar.ProtoBufComplexType)
ProtoBufComplexTypeOrBuilder {
private static final long serialVersionUID = 0L;
// Use ProtoBufComplexType.newBuilder() to construct.
private ProtoBufComplexType(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ProtoBufComplexType() {
stringField_ = "";
bytesField_ = com.google.protobuf.ByteString.EMPTY;
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ProtoBufComplexType(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
java.lang.String s = input.readStringRequireUtf8();
stringField_ = s;
break;
}
case 16: {
booleanField_ = input.readBool();
break;
}
case 26: {
bytesField_ = input.readBytes();
break;
}
case 32: {
intField_ = input.readInt32();
break;
}
case 40: {
longField_ = input.readInt64();
break;
}
case 53: {
floatField_ = input.readFloat();
break;
}
case 57: {
doubleField_ = input.readDouble();
break;
}
case 66: {
if (!((mutable_bitField0_ & 0x00000080) != 0)) {
mapField_ = com.google.protobuf.MapField.newMapField(
MapFieldDefaultEntryHolder.defaultEntry);
mutable_bitField0_ |= 0x00000080;
}
com.google.protobuf.MapEntry<java.lang.String, java.lang.Double>
mapField__ = input.readMessage(
MapFieldDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry);
mapField_.getMutableMap().put(
mapField__.getKey(), mapField__.getValue());
break;
}
case 74: {
com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.Builder subBuilder = null;
if (innerField_ != null) {
subBuilder = innerField_.toBuilder();
}
innerField_ = input.readMessage(com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(innerField_);
innerField_ = subBuilder.buildPartial();
}
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.internal_static_com_riferrei_kafka_connect_pulsar_ProtoBufComplexType_descriptor;
}
@SuppressWarnings({"rawtypes"})
@java.lang.Override
protected com.google.protobuf.MapField internalGetMapField(
int number) {
switch (number) {
case 8:
return internalGetMapField();
default:
throw new RuntimeException(
"Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.internal_static_com_riferrei_kafka_connect_pulsar_ProtoBufComplexType_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.class, com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.Builder.class);
}
public interface ProtoBufInnerTypeOrBuilder extends
// @@protoc_insertion_point(interface_extends:com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType)
com.google.protobuf.MessageOrBuilder {
/**
* <code>double doubleField = 1;</code>
*/
double getDoubleField();
/**
* <code>repeated string arrayField = 2;</code>
*/
java.util.List<java.lang.String>
getArrayFieldList();
/**
* <code>repeated string arrayField = 2;</code>
*/
int getArrayFieldCount();
/**
* <code>repeated string arrayField = 2;</code>
*/
java.lang.String getArrayField(int index);
/**
* <code>repeated string arrayField = 2;</code>
*/
com.google.protobuf.ByteString
getArrayFieldBytes(int index);
/**
* <code>.com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType.ProtoBufMultipleOptions enumField = 3;</code>
*/
int getEnumFieldValue();
/**
* <code>.com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType.ProtoBufMultipleOptions enumField = 3;</code>
*/
com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.ProtoBufMultipleOptions getEnumField();
}
/**
* Protobuf type {@code com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType}
*/
public static final class ProtoBufInnerType extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType)
ProtoBufInnerTypeOrBuilder {
private static final long serialVersionUID = 0L;
// Use ProtoBufInnerType.newBuilder() to construct.
private ProtoBufInnerType(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ProtoBufInnerType() {
arrayField_ = com.google.protobuf.LazyStringArrayList.EMPTY;
enumField_ = 0;
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ProtoBufInnerType(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 9: {
doubleField_ = input.readDouble();
break;
}
case 18: {
java.lang.String s = input.readStringRequireUtf8();
if (!((mutable_bitField0_ & 0x00000002) != 0)) {
arrayField_ = new com.google.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000002;
}
arrayField_.add(s);
break;
}
case 24: {
int rawValue = input.readEnum();
enumField_ = rawValue;
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000002) != 0)) {
arrayField_ = arrayField_.getUnmodifiableView();
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.internal_static_com_riferrei_kafka_connect_pulsar_ProtoBufComplexType_ProtoBufInnerType_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.internal_static_com_riferrei_kafka_connect_pulsar_ProtoBufComplexType_ProtoBufInnerType_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.class, com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.Builder.class);
}
/**
* Protobuf enum {@code com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType.ProtoBufMultipleOptions}
*/
public enum ProtoBufMultipleOptions
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <code>FIRST_OPTION = 0;</code>
*/
FIRST_OPTION(0),
/**
* <code>SECOND_OPTION = 1;</code>
*/
SECOND_OPTION(1),
/**
* <code>THIRD_OPTION = 2;</code>
*/
THIRD_OPTION(2),
/**
* <code>FOURTH_OPTION = 3;</code>
*/
FOURTH_OPTION(3),
UNRECOGNIZED(-1),
;
/**
* <code>FIRST_OPTION = 0;</code>
*/
public static final int FIRST_OPTION_VALUE = 0;
/**
* <code>SECOND_OPTION = 1;</code>
*/
public static final int SECOND_OPTION_VALUE = 1;
/**
* <code>THIRD_OPTION = 2;</code>
*/
public static final int THIRD_OPTION_VALUE = 2;
/**
* <code>FOURTH_OPTION = 3;</code>
*/
public static final int FOURTH_OPTION_VALUE = 3;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static ProtoBufMultipleOptions valueOf(int value) {
return forNumber(value);
}
public static ProtoBufMultipleOptions forNumber(int value) {
switch (value) {
case 0: return FIRST_OPTION;
case 1: return SECOND_OPTION;
case 2: return THIRD_OPTION;
case 3: return FOURTH_OPTION;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<ProtoBufMultipleOptions>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<
ProtoBufMultipleOptions> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<ProtoBufMultipleOptions>() {
public ProtoBufMultipleOptions findValueByNumber(int number) {
return ProtoBufMultipleOptions.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.getDescriptor().getEnumTypes().get(0);
}
private static final ProtoBufMultipleOptions[] VALUES = values();
public static ProtoBufMultipleOptions valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private ProtoBufMultipleOptions(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType.ProtoBufMultipleOptions)
}
private int bitField0_;
public static final int DOUBLEFIELD_FIELD_NUMBER = 1;
private double doubleField_;
/**
* <code>double doubleField = 1;</code>
*/
public double getDoubleField() {
return doubleField_;
}
public static final int ARRAYFIELD_FIELD_NUMBER = 2;
private com.google.protobuf.LazyStringList arrayField_;
/**
* <code>repeated string arrayField = 2;</code>
*/
public com.google.protobuf.ProtocolStringList
getArrayFieldList() {
return arrayField_;
}
/**
* <code>repeated string arrayField = 2;</code>
*/
public int getArrayFieldCount() {
return arrayField_.size();
}
/**
* <code>repeated string arrayField = 2;</code>
*/
public java.lang.String getArrayField(int index) {
return arrayField_.get(index);
}
/**
* <code>repeated string arrayField = 2;</code>
*/
public com.google.protobuf.ByteString
getArrayFieldBytes(int index) {
return arrayField_.getByteString(index);
}
public static final int ENUMFIELD_FIELD_NUMBER = 3;
private int enumField_;
/**
* <code>.com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType.ProtoBufMultipleOptions enumField = 3;</code>
*/
public int getEnumFieldValue() {
return enumField_;
}
/**
* <code>.com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType.ProtoBufMultipleOptions enumField = 3;</code>
*/
public com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.ProtoBufMultipleOptions getEnumField() {
@SuppressWarnings("deprecation")
com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.ProtoBufMultipleOptions result = com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.ProtoBufMultipleOptions.valueOf(enumField_);
return result == null ? com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.ProtoBufMultipleOptions.UNRECOGNIZED : result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (doubleField_ != 0D) {
output.writeDouble(1, doubleField_);
}
for (int i = 0; i < arrayField_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, arrayField_.getRaw(i));
}
if (enumField_ != com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.ProtoBufMultipleOptions.FIRST_OPTION.getNumber()) {
output.writeEnum(3, enumField_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (doubleField_ != 0D) {
size += com.google.protobuf.CodedOutputStream
.computeDoubleSize(1, doubleField_);
}
{
int dataSize = 0;
for (int i = 0; i < arrayField_.size(); i++) {
dataSize += computeStringSizeNoTag(arrayField_.getRaw(i));
}
size += dataSize;
size += 1 * getArrayFieldList().size();
}
if (enumField_ != com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.ProtoBufMultipleOptions.FIRST_OPTION.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(3, enumField_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType)) {
return super.equals(obj);
}
com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType other = (com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType) obj;
if (java.lang.Double.doubleToLongBits(getDoubleField())
!= java.lang.Double.doubleToLongBits(
other.getDoubleField())) return false;
if (!getArrayFieldList()
.equals(other.getArrayFieldList())) return false;
if (enumField_ != other.enumField_) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + DOUBLEFIELD_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
java.lang.Double.doubleToLongBits(getDoubleField()));
if (getArrayFieldCount() > 0) {
hash = (37 * hash) + ARRAYFIELD_FIELD_NUMBER;
hash = (53 * hash) + getArrayFieldList().hashCode();
}
hash = (37 * hash) + ENUMFIELD_FIELD_NUMBER;
hash = (53 * hash) + enumField_;
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType)
com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerTypeOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.internal_static_com_riferrei_kafka_connect_pulsar_ProtoBufComplexType_ProtoBufInnerType_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.internal_static_com_riferrei_kafka_connect_pulsar_ProtoBufComplexType_ProtoBufInnerType_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.class, com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.Builder.class);
}
// Construct using com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
doubleField_ = 0D;
arrayField_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000002);
enumField_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.internal_static_com_riferrei_kafka_connect_pulsar_ProtoBufComplexType_ProtoBufInnerType_descriptor;
}
@java.lang.Override
public com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType getDefaultInstanceForType() {
return com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.getDefaultInstance();
}
@java.lang.Override
public com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType build() {
com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType buildPartial() {
com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType result = new com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
result.doubleField_ = doubleField_;
if (((bitField0_ & 0x00000002) != 0)) {
arrayField_ = arrayField_.getUnmodifiableView();
bitField0_ = (bitField0_ & ~0x00000002);
}
result.arrayField_ = arrayField_;
result.enumField_ = enumField_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType) {
return mergeFrom((com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType other) {
if (other == com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.getDefaultInstance()) return this;
if (other.getDoubleField() != 0D) {
setDoubleField(other.getDoubleField());
}
if (!other.arrayField_.isEmpty()) {
if (arrayField_.isEmpty()) {
arrayField_ = other.arrayField_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureArrayFieldIsMutable();
arrayField_.addAll(other.arrayField_);
}
onChanged();
}
if (other.enumField_ != 0) {
setEnumFieldValue(other.getEnumFieldValue());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private double doubleField_ ;
/**
* <code>double doubleField = 1;</code>
*/
public double getDoubleField() {
return doubleField_;
}
/**
* <code>double doubleField = 1;</code>
*/
public Builder setDoubleField(double value) {
doubleField_ = value;
onChanged();
return this;
}
/**
* <code>double doubleField = 1;</code>
*/
public Builder clearDoubleField() {
doubleField_ = 0D;
onChanged();
return this;
}
private com.google.protobuf.LazyStringList arrayField_ = com.google.protobuf.LazyStringArrayList.EMPTY;
private void ensureArrayFieldIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
arrayField_ = new com.google.protobuf.LazyStringArrayList(arrayField_);
bitField0_ |= 0x00000002;
}
}
/**
* <code>repeated string arrayField = 2;</code>
*/
public com.google.protobuf.ProtocolStringList
getArrayFieldList() {
return arrayField_.getUnmodifiableView();
}
/**
* <code>repeated string arrayField = 2;</code>
*/
public int getArrayFieldCount() {
return arrayField_.size();
}
/**
* <code>repeated string arrayField = 2;</code>
*/
public java.lang.String getArrayField(int index) {
return arrayField_.get(index);
}
/**
* <code>repeated string arrayField = 2;</code>
*/
public com.google.protobuf.ByteString
getArrayFieldBytes(int index) {
return arrayField_.getByteString(index);
}
/**
* <code>repeated string arrayField = 2;</code>
*/
public Builder setArrayField(
int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureArrayFieldIsMutable();
arrayField_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated string arrayField = 2;</code>
*/
public Builder addArrayField(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureArrayFieldIsMutable();
arrayField_.add(value);
onChanged();
return this;
}
/**
* <code>repeated string arrayField = 2;</code>
*/
public Builder addAllArrayField(
java.lang.Iterable<java.lang.String> values) {
ensureArrayFieldIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, arrayField_);
onChanged();
return this;
}
/**
* <code>repeated string arrayField = 2;</code>
*/
public Builder clearArrayField() {
arrayField_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <code>repeated string arrayField = 2;</code>
*/
public Builder addArrayFieldBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureArrayFieldIsMutable();
arrayField_.add(value);
onChanged();
return this;
}
private int enumField_ = 0;
/**
* <code>.com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType.ProtoBufMultipleOptions enumField = 3;</code>
*/
public int getEnumFieldValue() {
return enumField_;
}
/**
* <code>.com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType.ProtoBufMultipleOptions enumField = 3;</code>
*/
public Builder setEnumFieldValue(int value) {
enumField_ = value;
onChanged();
return this;
}
/**
* <code>.com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType.ProtoBufMultipleOptions enumField = 3;</code>
*/
public com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.ProtoBufMultipleOptions getEnumField() {
@SuppressWarnings("deprecation")
com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.ProtoBufMultipleOptions result = com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.ProtoBufMultipleOptions.valueOf(enumField_);
return result == null ? com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.ProtoBufMultipleOptions.UNRECOGNIZED : result;
}
/**
* <code>.com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType.ProtoBufMultipleOptions enumField = 3;</code>
*/
public Builder setEnumField(com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.ProtoBufMultipleOptions value) {
if (value == null) {
throw new NullPointerException();
}
enumField_ = value.getNumber();
onChanged();
return this;
}
/**
* <code>.com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType.ProtoBufMultipleOptions enumField = 3;</code>
*/
public Builder clearEnumField() {
enumField_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType)
}
// @@protoc_insertion_point(class_scope:com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType)
private static final com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType();
}
public static com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ProtoBufInnerType>
PARSER = new com.google.protobuf.AbstractParser<ProtoBufInnerType>() {
@java.lang.Override
public ProtoBufInnerType parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ProtoBufInnerType(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<ProtoBufInnerType> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ProtoBufInnerType> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
private int bitField0_;
public static final int STRINGFIELD_FIELD_NUMBER = 1;
private volatile java.lang.Object stringField_;
/**
* <code>string stringField = 1;</code>
*/
public java.lang.String getStringField() {
java.lang.Object ref = stringField_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
stringField_ = s;
return s;
}
}
/**
* <code>string stringField = 1;</code>
*/
public com.google.protobuf.ByteString
getStringFieldBytes() {
java.lang.Object ref = stringField_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
stringField_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int BOOLEANFIELD_FIELD_NUMBER = 2;
private boolean booleanField_;
/**
* <code>bool booleanField = 2;</code>
*/
public boolean getBooleanField() {
return booleanField_;
}
public static final int BYTESFIELD_FIELD_NUMBER = 3;
private com.google.protobuf.ByteString bytesField_;
/**
* <code>bytes bytesField = 3;</code>
*/
public com.google.protobuf.ByteString getBytesField() {
return bytesField_;
}
public static final int INTFIELD_FIELD_NUMBER = 4;
private int intField_;
/**
* <code>int32 intField = 4;</code>
*/
public int getIntField() {
return intField_;
}
public static final int LONGFIELD_FIELD_NUMBER = 5;
private long longField_;
/**
* <code>int64 longField = 5;</code>
*/
public long getLongField() {
return longField_;
}
public static final int FLOATFIELD_FIELD_NUMBER = 6;
private float floatField_;
/**
* <code>float floatField = 6;</code>
*/
public float getFloatField() {
return floatField_;
}
public static final int DOUBLEFIELD_FIELD_NUMBER = 7;
private double doubleField_;
/**
* <code>double doubleField = 7;</code>
*/
public double getDoubleField() {
return doubleField_;
}
public static final int MAPFIELD_FIELD_NUMBER = 8;
private static final class MapFieldDefaultEntryHolder {
static final com.google.protobuf.MapEntry<
java.lang.String, java.lang.Double> defaultEntry =
com.google.protobuf.MapEntry
.<java.lang.String, java.lang.Double>newDefaultInstance(
com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.internal_static_com_riferrei_kafka_connect_pulsar_ProtoBufComplexType_MapFieldEntry_descriptor,
com.google.protobuf.WireFormat.FieldType.STRING,
"",
com.google.protobuf.WireFormat.FieldType.DOUBLE,
0D);
}
private com.google.protobuf.MapField<
java.lang.String, java.lang.Double> mapField_;
private com.google.protobuf.MapField<java.lang.String, java.lang.Double>
internalGetMapField() {
if (mapField_ == null) {
return com.google.protobuf.MapField.emptyMapField(
MapFieldDefaultEntryHolder.defaultEntry);
}
return mapField_;
}
public int getMapFieldCount() {
return internalGetMapField().getMap().size();
}
/**
* <code>map<string, double> mapField = 8;</code>
*/
public boolean containsMapField(
java.lang.String key) {
if (key == null) { throw new java.lang.NullPointerException(); }
return internalGetMapField().getMap().containsKey(key);
}
/**
* Use {@link #getMapFieldMap()} instead.
*/
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.Double> getMapField() {
return getMapFieldMap();
}
/**
* <code>map<string, double> mapField = 8;</code>
*/
public java.util.Map<java.lang.String, java.lang.Double> getMapFieldMap() {
return internalGetMapField().getMap();
}
/**
* <code>map<string, double> mapField = 8;</code>
*/
public double getMapFieldOrDefault(
java.lang.String key,
double defaultValue) {
if (key == null) { throw new java.lang.NullPointerException(); }
java.util.Map<java.lang.String, java.lang.Double> map =
internalGetMapField().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
* <code>map<string, double> mapField = 8;</code>
*/
public double getMapFieldOrThrow(
java.lang.String key) {
if (key == null) { throw new java.lang.NullPointerException(); }
java.util.Map<java.lang.String, java.lang.Double> map =
internalGetMapField().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
public static final int INNERFIELD_FIELD_NUMBER = 9;
private com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType innerField_;
/**
* <code>.com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType innerField = 9;</code>
*/
public boolean hasInnerField() {
return innerField_ != null;
}
/**
* <code>.com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType innerField = 9;</code>
*/
public com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType getInnerField() {
return innerField_ == null ? com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.getDefaultInstance() : innerField_;
}
/**
* <code>.com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType innerField = 9;</code>
*/
public com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerTypeOrBuilder getInnerFieldOrBuilder() {
return getInnerField();
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!getStringFieldBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, stringField_);
}
if (booleanField_ != false) {
output.writeBool(2, booleanField_);
}
if (!bytesField_.isEmpty()) {
output.writeBytes(3, bytesField_);
}
if (intField_ != 0) {
output.writeInt32(4, intField_);
}
if (longField_ != 0L) {
output.writeInt64(5, longField_);
}
if (floatField_ != 0F) {
output.writeFloat(6, floatField_);
}
if (doubleField_ != 0D) {
output.writeDouble(7, doubleField_);
}
com.google.protobuf.GeneratedMessageV3
.serializeStringMapTo(
output,
internalGetMapField(),
MapFieldDefaultEntryHolder.defaultEntry,
8);
if (innerField_ != null) {
output.writeMessage(9, getInnerField());
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getStringFieldBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, stringField_);
}
if (booleanField_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(2, booleanField_);
}
if (!bytesField_.isEmpty()) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, bytesField_);
}
if (intField_ != 0) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(4, intField_);
}
if (longField_ != 0L) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(5, longField_);
}
if (floatField_ != 0F) {
size += com.google.protobuf.CodedOutputStream
.computeFloatSize(6, floatField_);
}
if (doubleField_ != 0D) {
size += com.google.protobuf.CodedOutputStream
.computeDoubleSize(7, doubleField_);
}
for (java.util.Map.Entry<java.lang.String, java.lang.Double> entry
: internalGetMapField().getMap().entrySet()) {
com.google.protobuf.MapEntry<java.lang.String, java.lang.Double>
mapField__ = MapFieldDefaultEntryHolder.defaultEntry.newBuilderForType()
.setKey(entry.getKey())
.setValue(entry.getValue())
.build();
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(8, mapField__);
}
if (innerField_ != null) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(9, getInnerField());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType)) {
return super.equals(obj);
}
com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType other = (com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType) obj;
if (!getStringField()
.equals(other.getStringField())) return false;
if (getBooleanField()
!= other.getBooleanField()) return false;
if (!getBytesField()
.equals(other.getBytesField())) return false;
if (getIntField()
!= other.getIntField()) return false;
if (getLongField()
!= other.getLongField()) return false;
if (java.lang.Float.floatToIntBits(getFloatField())
!= java.lang.Float.floatToIntBits(
other.getFloatField())) return false;
if (java.lang.Double.doubleToLongBits(getDoubleField())
!= java.lang.Double.doubleToLongBits(
other.getDoubleField())) return false;
if (!internalGetMapField().equals(
other.internalGetMapField())) return false;
if (hasInnerField() != other.hasInnerField()) return false;
if (hasInnerField()) {
if (!getInnerField()
.equals(other.getInnerField())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + STRINGFIELD_FIELD_NUMBER;
hash = (53 * hash) + getStringField().hashCode();
hash = (37 * hash) + BOOLEANFIELD_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getBooleanField());
hash = (37 * hash) + BYTESFIELD_FIELD_NUMBER;
hash = (53 * hash) + getBytesField().hashCode();
hash = (37 * hash) + INTFIELD_FIELD_NUMBER;
hash = (53 * hash) + getIntField();
hash = (37 * hash) + LONGFIELD_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
getLongField());
hash = (37 * hash) + FLOATFIELD_FIELD_NUMBER;
hash = (53 * hash) + java.lang.Float.floatToIntBits(
getFloatField());
hash = (37 * hash) + DOUBLEFIELD_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
java.lang.Double.doubleToLongBits(getDoubleField()));
if (!internalGetMapField().getMap().isEmpty()) {
hash = (37 * hash) + MAPFIELD_FIELD_NUMBER;
hash = (53 * hash) + internalGetMapField().hashCode();
}
if (hasInnerField()) {
hash = (37 * hash) + INNERFIELD_FIELD_NUMBER;
hash = (53 * hash) + getInnerField().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code com.riferrei.kafka.connect.pulsar.ProtoBufComplexType}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:com.riferrei.kafka.connect.pulsar.ProtoBufComplexType)
com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexTypeOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.internal_static_com_riferrei_kafka_connect_pulsar_ProtoBufComplexType_descriptor;
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapField internalGetMapField(
int number) {
switch (number) {
case 8:
return internalGetMapField();
default:
throw new RuntimeException(
"Invalid map field number: " + number);
}
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapField internalGetMutableMapField(
int number) {
switch (number) {
case 8:
return internalGetMutableMapField();
default:
throw new RuntimeException(
"Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.internal_static_com_riferrei_kafka_connect_pulsar_ProtoBufComplexType_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.class, com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.Builder.class);
}
// Construct using com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
stringField_ = "";
booleanField_ = false;
bytesField_ = com.google.protobuf.ByteString.EMPTY;
intField_ = 0;
longField_ = 0L;
floatField_ = 0F;
doubleField_ = 0D;
internalGetMutableMapField().clear();
if (innerFieldBuilder_ == null) {
innerField_ = null;
} else {
innerField_ = null;
innerFieldBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.internal_static_com_riferrei_kafka_connect_pulsar_ProtoBufComplexType_descriptor;
}
@java.lang.Override
public com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType getDefaultInstanceForType() {
return com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.getDefaultInstance();
}
@java.lang.Override
public com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType build() {
com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType buildPartial() {
com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType result = new com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
result.stringField_ = stringField_;
result.booleanField_ = booleanField_;
result.bytesField_ = bytesField_;
result.intField_ = intField_;
result.longField_ = longField_;
result.floatField_ = floatField_;
result.doubleField_ = doubleField_;
result.mapField_ = internalGetMapField();
result.mapField_.makeImmutable();
if (innerFieldBuilder_ == null) {
result.innerField_ = innerField_;
} else {
result.innerField_ = innerFieldBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType) {
return mergeFrom((com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType other) {
if (other == com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.getDefaultInstance()) return this;
if (!other.getStringField().isEmpty()) {
stringField_ = other.stringField_;
onChanged();
}
if (other.getBooleanField() != false) {
setBooleanField(other.getBooleanField());
}
if (other.getBytesField() != com.google.protobuf.ByteString.EMPTY) {
setBytesField(other.getBytesField());
}
if (other.getIntField() != 0) {
setIntField(other.getIntField());
}
if (other.getLongField() != 0L) {
setLongField(other.getLongField());
}
if (other.getFloatField() != 0F) {
setFloatField(other.getFloatField());
}
if (other.getDoubleField() != 0D) {
setDoubleField(other.getDoubleField());
}
internalGetMutableMapField().mergeFrom(
other.internalGetMapField());
if (other.hasInnerField()) {
mergeInnerField(other.getInnerField());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object stringField_ = "";
/**
* <code>string stringField = 1;</code>
*/
public java.lang.String getStringField() {
java.lang.Object ref = stringField_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
stringField_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>string stringField = 1;</code>
*/
public com.google.protobuf.ByteString
getStringFieldBytes() {
java.lang.Object ref = stringField_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
stringField_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>string stringField = 1;</code>
*/
public Builder setStringField(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
stringField_ = value;
onChanged();
return this;
}
/**
* <code>string stringField = 1;</code>
*/
public Builder clearStringField() {
stringField_ = getDefaultInstance().getStringField();
onChanged();
return this;
}
/**
* <code>string stringField = 1;</code>
*/
public Builder setStringFieldBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
stringField_ = value;
onChanged();
return this;
}
private boolean booleanField_ ;
/**
* <code>bool booleanField = 2;</code>
*/
public boolean getBooleanField() {
return booleanField_;
}
/**
* <code>bool booleanField = 2;</code>
*/
public Builder setBooleanField(boolean value) {
booleanField_ = value;
onChanged();
return this;
}
/**
* <code>bool booleanField = 2;</code>
*/
public Builder clearBooleanField() {
booleanField_ = false;
onChanged();
return this;
}
private com.google.protobuf.ByteString bytesField_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>bytes bytesField = 3;</code>
*/
public com.google.protobuf.ByteString getBytesField() {
return bytesField_;
}
/**
* <code>bytes bytesField = 3;</code>
*/
public Builder setBytesField(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bytesField_ = value;
onChanged();
return this;
}
/**
* <code>bytes bytesField = 3;</code>
*/
public Builder clearBytesField() {
bytesField_ = getDefaultInstance().getBytesField();
onChanged();
return this;
}
private int intField_ ;
/**
* <code>int32 intField = 4;</code>
*/
public int getIntField() {
return intField_;
}
/**
* <code>int32 intField = 4;</code>
*/
public Builder setIntField(int value) {
intField_ = value;
onChanged();
return this;
}
/**
* <code>int32 intField = 4;</code>
*/
public Builder clearIntField() {
intField_ = 0;
onChanged();
return this;
}
private long longField_ ;
/**
* <code>int64 longField = 5;</code>
*/
public long getLongField() {
return longField_;
}
/**
* <code>int64 longField = 5;</code>
*/
public Builder setLongField(long value) {
longField_ = value;
onChanged();
return this;
}
/**
* <code>int64 longField = 5;</code>
*/
public Builder clearLongField() {
longField_ = 0L;
onChanged();
return this;
}
private float floatField_ ;
/**
* <code>float floatField = 6;</code>
*/
public float getFloatField() {
return floatField_;
}
/**
* <code>float floatField = 6;</code>
*/
public Builder setFloatField(float value) {
floatField_ = value;
onChanged();
return this;
}
/**
* <code>float floatField = 6;</code>
*/
public Builder clearFloatField() {
floatField_ = 0F;
onChanged();
return this;
}
private double doubleField_ ;
/**
* <code>double doubleField = 7;</code>
*/
public double getDoubleField() {
return doubleField_;
}
/**
* <code>double doubleField = 7;</code>
*/
public Builder setDoubleField(double value) {
doubleField_ = value;
onChanged();
return this;
}
/**
* <code>double doubleField = 7;</code>
*/
public Builder clearDoubleField() {
doubleField_ = 0D;
onChanged();
return this;
}
private com.google.protobuf.MapField<
java.lang.String, java.lang.Double> mapField_;
private com.google.protobuf.MapField<java.lang.String, java.lang.Double>
internalGetMapField() {
if (mapField_ == null) {
return com.google.protobuf.MapField.emptyMapField(
MapFieldDefaultEntryHolder.defaultEntry);
}
return mapField_;
}
private com.google.protobuf.MapField<java.lang.String, java.lang.Double>
internalGetMutableMapField() {
onChanged();;
if (mapField_ == null) {
mapField_ = com.google.protobuf.MapField.newMapField(
MapFieldDefaultEntryHolder.defaultEntry);
}
if (!mapField_.isMutable()) {
mapField_ = mapField_.copy();
}
return mapField_;
}
public int getMapFieldCount() {
return internalGetMapField().getMap().size();
}
/**
* <code>map<string, double> mapField = 8;</code>
*/
public boolean containsMapField(
java.lang.String key) {
if (key == null) { throw new java.lang.NullPointerException(); }
return internalGetMapField().getMap().containsKey(key);
}
/**
* Use {@link #getMapFieldMap()} instead.
*/
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.Double> getMapField() {
return getMapFieldMap();
}
/**
* <code>map<string, double> mapField = 8;</code>
*/
public java.util.Map<java.lang.String, java.lang.Double> getMapFieldMap() {
return internalGetMapField().getMap();
}
/**
* <code>map<string, double> mapField = 8;</code>
*/
public double getMapFieldOrDefault(
java.lang.String key,
double defaultValue) {
if (key == null) { throw new java.lang.NullPointerException(); }
java.util.Map<java.lang.String, java.lang.Double> map =
internalGetMapField().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
* <code>map<string, double> mapField = 8;</code>
*/
public double getMapFieldOrThrow(
java.lang.String key) {
if (key == null) { throw new java.lang.NullPointerException(); }
java.util.Map<java.lang.String, java.lang.Double> map =
internalGetMapField().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
public Builder clearMapField() {
internalGetMutableMapField().getMutableMap()
.clear();
return this;
}
/**
* <code>map<string, double> mapField = 8;</code>
*/
public Builder removeMapField(
java.lang.String key) {
if (key == null) { throw new java.lang.NullPointerException(); }
internalGetMutableMapField().getMutableMap()
.remove(key);
return this;
}
/**
* Use alternate mutation accessors instead.
*/
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.Double>
getMutableMapField() {
return internalGetMutableMapField().getMutableMap();
}
/**
* <code>map<string, double> mapField = 8;</code>
*/
public Builder putMapField(
java.lang.String key,
double value) {
if (key == null) { throw new java.lang.NullPointerException(); }
internalGetMutableMapField().getMutableMap()
.put(key, value);
return this;
}
/**
* <code>map<string, double> mapField = 8;</code>
*/
public Builder putAllMapField(
java.util.Map<java.lang.String, java.lang.Double> values) {
internalGetMutableMapField().getMutableMap()
.putAll(values);
return this;
}
private com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType innerField_;
private com.google.protobuf.SingleFieldBuilderV3<
com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType, com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.Builder, com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerTypeOrBuilder> innerFieldBuilder_;
/**
* <code>.com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType innerField = 9;</code>
*/
public boolean hasInnerField() {
return innerFieldBuilder_ != null || innerField_ != null;
}
/**
* <code>.com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType innerField = 9;</code>
*/
public com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType getInnerField() {
if (innerFieldBuilder_ == null) {
return innerField_ == null ? com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.getDefaultInstance() : innerField_;
} else {
return innerFieldBuilder_.getMessage();
}
}
/**
* <code>.com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType innerField = 9;</code>
*/
public Builder setInnerField(com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType value) {
if (innerFieldBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
innerField_ = value;
onChanged();
} else {
innerFieldBuilder_.setMessage(value);
}
return this;
}
/**
* <code>.com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType innerField = 9;</code>
*/
public Builder setInnerField(
com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.Builder builderForValue) {
if (innerFieldBuilder_ == null) {
innerField_ = builderForValue.build();
onChanged();
} else {
innerFieldBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
* <code>.com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType innerField = 9;</code>
*/
public Builder mergeInnerField(com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType value) {
if (innerFieldBuilder_ == null) {
if (innerField_ != null) {
innerField_ =
com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.newBuilder(innerField_).mergeFrom(value).buildPartial();
} else {
innerField_ = value;
}
onChanged();
} else {
innerFieldBuilder_.mergeFrom(value);
}
return this;
}
/**
* <code>.com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType innerField = 9;</code>
*/
public Builder clearInnerField() {
if (innerFieldBuilder_ == null) {
innerField_ = null;
onChanged();
} else {
innerField_ = null;
innerFieldBuilder_ = null;
}
return this;
}
/**
* <code>.com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType innerField = 9;</code>
*/
public com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.Builder getInnerFieldBuilder() {
onChanged();
return getInnerFieldFieldBuilder().getBuilder();
}
/**
* <code>.com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType innerField = 9;</code>
*/
public com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerTypeOrBuilder getInnerFieldOrBuilder() {
if (innerFieldBuilder_ != null) {
return innerFieldBuilder_.getMessageOrBuilder();
} else {
return innerField_ == null ?
com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.getDefaultInstance() : innerField_;
}
}
/**
* <code>.com.riferrei.kafka.connect.pulsar.ProtoBufComplexType.ProtoBufInnerType innerField = 9;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType, com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.Builder, com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerTypeOrBuilder>
getInnerFieldFieldBuilder() {
if (innerFieldBuilder_ == null) {
innerFieldBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType, com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerType.Builder, com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType.ProtoBufInnerTypeOrBuilder>(
getInnerField(),
getParentForChildren(),
isClean());
innerField_ = null;
}
return innerFieldBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:com.riferrei.kafka.connect.pulsar.ProtoBufComplexType)
}
// @@protoc_insertion_point(class_scope:com.riferrei.kafka.connect.pulsar.ProtoBufComplexType)
private static final com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType();
}
public static com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ProtoBufComplexType>
PARSER = new com.google.protobuf.AbstractParser<ProtoBufComplexType>() {
@java.lang.Override
public ProtoBufComplexType parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ProtoBufComplexType(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<ProtoBufComplexType> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ProtoBufComplexType> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.riferrei.kafka.connect.pulsar.ProtoBufGenComplexType.ProtoBufComplexType getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
private static final com.google.protobuf.Descriptors.Descriptor
internal_static_com_riferrei_kafka_connect_pulsar_ProtoBufComplexType_descriptor;
private static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_com_riferrei_kafka_connect_pulsar_ProtoBufComplexType_fieldAccessorTable;
private static final com.google.protobuf.Descriptors.Descriptor
internal_static_com_riferrei_kafka_connect_pulsar_ProtoBufComplexType_MapFieldEntry_descriptor;
private static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_com_riferrei_kafka_connect_pulsar_ProtoBufComplexType_MapFieldEntry_fieldAccessorTable;
private static final com.google.protobuf.Descriptors.Descriptor
internal_static_com_riferrei_kafka_connect_pulsar_ProtoBufComplexType_ProtoBufInnerType_descriptor;
private static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_com_riferrei_kafka_connect_pulsar_ProtoBufComplexType_ProtoBufInnerType_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\021complexType.proto\022!com.riferrei.kafka." +
"connect.pulsar\"\242\005\n\023ProtoBufComplexType\022\023" +
"\n\013stringField\030\001 \001(\t\022\024\n\014booleanField\030\002 \001(" +
"\010\022\022\n\nbytesField\030\003 \001(\014\022\020\n\010intField\030\004 \001(\005\022" +
"\021\n\tlongField\030\005 \001(\003\022\022\n\nfloatField\030\006 \001(\002\022\023" +
"\n\013doubleField\030\007 \001(\001\022V\n\010mapField\030\010 \003(\0132D." +
"com.riferrei.kafka.connect.pulsar.ProtoB" +
"ufComplexType.MapFieldEntry\022\\\n\ninnerFiel" +
"d\030\t \001(\0132H.com.riferrei.kafka.connect.pul" +
"sar.ProtoBufComplexType.ProtoBufInnerTyp" +
"e\032/\n\rMapFieldEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value" +
"\030\002 \001(\001:\0028\001\032\226\002\n\021ProtoBufInnerType\022\023\n\013doub" +
"leField\030\001 \001(\001\022\022\n\narrayField\030\002 \003(\t\022s\n\tenu" +
"mField\030\003 \001(\0162`.com.riferrei.kafka.connec" +
"t.pulsar.ProtoBufComplexType.ProtoBufInn" +
"erType.ProtoBufMultipleOptions\"c\n\027ProtoB" +
"ufMultipleOptions\022\020\n\014FIRST_OPTION\020\000\022\021\n\rS" +
"ECOND_OPTION\020\001\022\020\n\014THIRD_OPTION\020\002\022\021\n\rFOUR" +
"TH_OPTION\020\003B\030B\026ProtoBufGenComplexTypeb\006p" +
"roto3"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
internal_static_com_riferrei_kafka_connect_pulsar_ProtoBufComplexType_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_com_riferrei_kafka_connect_pulsar_ProtoBufComplexType_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_com_riferrei_kafka_connect_pulsar_ProtoBufComplexType_descriptor,
new java.lang.String[] { "StringField", "BooleanField", "BytesField", "IntField", "LongField", "FloatField", "DoubleField", "MapField", "InnerField", });
internal_static_com_riferrei_kafka_connect_pulsar_ProtoBufComplexType_MapFieldEntry_descriptor =
internal_static_com_riferrei_kafka_connect_pulsar_ProtoBufComplexType_descriptor.getNestedTypes().get(0);
internal_static_com_riferrei_kafka_connect_pulsar_ProtoBufComplexType_MapFieldEntry_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_com_riferrei_kafka_connect_pulsar_ProtoBufComplexType_MapFieldEntry_descriptor,
new java.lang.String[] { "Key", "Value", });
internal_static_com_riferrei_kafka_connect_pulsar_ProtoBufComplexType_ProtoBufInnerType_descriptor =
internal_static_com_riferrei_kafka_connect_pulsar_ProtoBufComplexType_descriptor.getNestedTypes().get(1);
internal_static_com_riferrei_kafka_connect_pulsar_ProtoBufComplexType_ProtoBufInnerType_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_com_riferrei_kafka_connect_pulsar_ProtoBufComplexType_ProtoBufInnerType_descriptor,
new java.lang.String[] { "DoubleField", "ArrayField", "EnumField", });
}
// @@protoc_insertion_point(outer_class_scope)
}
|
package com.telenav.osv.manager.network.parser;
import org.json.JSONArray;
import org.json.JSONObject;
import com.telenav.osv.item.LeaderboardData;
import com.telenav.osv.item.network.UserCollection;
/**
* JSON parser for leader board
* Created by kalmanb on 8/1/17.
*/
public class LeaderboardParser extends ApiResponseParser<UserCollection> {
private static final String TAG = "LeaderboardParser";
@Override
public UserCollection getHolder() {
return new UserCollection();
}
public UserCollection parse(String json) {
UserCollection collectionData = super.parse(json);
if (json != null && !json.isEmpty()) {
try {
JSONObject obj = new JSONObject(json);
JSONObject osv = obj.getJSONObject("osv");
JSONArray users = osv.getJSONArray("users");
for (int i = 0; i < users.length(); i++) {
JSONObject user = users.getJSONObject(i);
int rank = i + 1;
String userName = user.getString("username");
String fullName = user.getString("full_name");
String countryCode = user.getString("country_code");
int points = Integer.parseInt(user.getString("total_user_points"));
collectionData.getUserList().add(new LeaderboardData(userName, fullName, countryCode, rank, points));
}
} catch (Exception e) {
e.printStackTrace();
}
}
return collectionData;
}
}
|
from sklearn.cluster import KMeans
import numpy as np
# Create array of the data points
data_points = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
# Calculate clusters
kmeans = KMeans(n_clusters=3).fit(data_points)
# Get labels for each point
labels = kmeans.predict(data_points)
# Print clusters
print(labels) # Output: [0, 1, 2] |
def prime_factors(n):
result = []
while n % 2 == 0:
result.append(2)
n = n // 2
for i in range(3,int(n**0.5)+1,2):
while n % i== 0:
result.append(i)
n = n / i
if n > 2:
result.append(n)
return result |
module.exports = {
projectId: "heigvd-cld-micha",
// Or the contents of the key file:
credentials: require('./heigvd-cld-micha-23b18f7e74aa.json')
};
|
declare module 'mxgraph' {
/**
*
* @class mxCellStatePreview
*
* Implements a live preview for moving cells.
*/
class mxCellStatePreview {
/**
* Constructs a move preview for the given graph.
*
* @param {mxGraph} graph Reference to the enclosing <mxGraph>.
* @constructor
* @memberof mxCellStatePreview
*/
constructor(graph: mxGraph);
/**
* Reference to the enclosing <mxGraph>.
*/
graph: mxGraph;
/**
* Reference to the enclosing <mxGraph>.
*/
deltas: mxDictionary;
/**
* Contains the number of entries in the map.
*/
count: number;
/**
* Returns true if this contains no entries.
*/
isEmpty(): boolean;
/**
*
*
* @param {mxCellState} state
* @param {number} dx
* @param {number} dy
* @param {boolean} add
* @param {boolean} includeEdges
* @return {*} {mxPoint}
* @memberof mxCellStatePreview
*/
moveState(state: mxCellState, dx: number, dy: number, add: boolean, includeEdges: boolean): mxPoint;
/**
*
*
* @param {Function} visitor
* @memberof mxCellStatePreview
*/
show(visitor: Function): void;
/**
*
*
* @param {mxCellState} state
* @param {number} dx
* @param {number} dy
* @memberof mxCellStatePreview
*/
translateState(state: mxCellState, dx: number, dy: number): void;
/**
*
*
* @param {mxCellState} state
* @param {number} dx
* @param {number} dy
* @param {Function} visitor
* @memberof mxCellStatePreview
*/
revalidateState(state: mxCellState, dx: number, dy: number, visitor: Function): void;
/**
*
*
* @param {mxCellState} state
* @memberof mxCellStatePreview
*/
addEdges(state: mxCellState): void;
}
}
|
package nightmarethreatreis.com.github.mvp.events;
import javafx.event.Event;
import javafx.event.EventType;
public class OnShowEvent extends Event {
private static final long serialVersionUID = -119655647073475242L;
public static final EventType<OnShowEvent> SHOW_EVENT = new EventType<OnShowEvent>(ANY);
public OnShowEvent() {
super(SHOW_EVENT);
}
}
|
<reponame>gaunthan/design-patterns-by-golang
package facade
import (
"design-patterns-by-golang/02_structural_patterns/10_facade/delivery"
"fmt"
)
func ExampleOnlineShopping() {
shopping := NewOnlineShopping()
outputResult(shopping.Buy("Joe", "apple"))
outputResult(shopping.Buy("Joe", "orange"))
outputResult(shopping.Buy("Void", "banana"))
// Output:
// ok
// error: orange out of stock
// error: destination unreachable
}
func ExampleDelivery() {
fmt.Println(delivery.SendExpress("Void", "apple"))
// Output:
// false
}
func outputResult(ok bool, err error) {
if ok {
fmt.Println("ok")
} else {
fmt.Printf("error: %v\n", err)
}
}
|
<gh_stars>0
import Statistic from 'antd/es/statistic'
import 'antd/es/statistic/style'
const { Countdown } = Statistic
export { Statistic, Countdown as StatisticCountdown }
|
package com.ufrn.embarcados.reaqua.service;
import com.ufrn.embarcados.reaqua.model.ApplicationUser;
import com.ufrn.embarcados.reaqua.model.Tower;
import com.ufrn.embarcados.reaqua.repository.ApplicationUserRepository;
import lombok.RequiredArgsConstructor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.List;
@Service
@RequiredArgsConstructor(onConstructor = @__(@Autowired))
public class ApplicationUserService {
private final ApplicationUserRepository userRepository;
public ApplicationUser getUserByEmail (String email) {return userRepository.findByEmail(email);}
public List<ApplicationUser> getAllByTower(Tower tower) {
return userRepository.findByTower(tower);
}
public void saveUser(ApplicationUser user) { userRepository.save(user); }
}
|
<gh_stars>10-100
# encoding: utf-8
class ApiUploader < CarrierWave::Uploader::Base
attr_accessor :success_action_redirect
attr_accessor :success_action_status
attr_accessor :user, :repo
%w{ key aws_access_key_id acl policy signature }.each do |method|
define_method method do
method
end
end
def direct_fog_url
"/api/#{@user}/#{@repo}/uploads/asset"
end
end
|
<filename>dynomitemanager-core/src/main/java/com/netflix/dynomitemanager/storage/Bootstrap.java
package com.netflix.dynomitemanager.storage;
public enum Bootstrap {
NOT_STARTED, CANNOT_CONNECT_FAIL, WARMUP_ERROR_FAIL, RETRIES_FAIL, EXPIRED_BOOTSTRAPTIME_FAIL, IN_SYNC_SUCCESS,
}
|
#!/bin/bash
set -e
# assume this is ran from the root of moov-io/infra
last=$(ls -1 | grep fuzz | tail -n1)
if [ -n "$last" ];
then
echo "Using fuzz findings from $last"
for dir in $(ls -1 "$last");
do
# Create a .tar file of the crashing inputs and outputs
tar cf "$last"/"$dir".tar "$last"/"$dir"/crashers/*
done
else
echo "No fuzz-* directories found in moov-io/infra"
fi
|
<reponame>manas1410/Miscellaneous-Development
import cx_Freeze
import json
executables=[cx_Freeze.Executable(
"D:/college/Internship(Spectrum)/3/welcome.py",
base="Win32GUI",
icon='D:/college/Internship(Spectrum)/3/logo/spectrumlogo.ico')]
cx_Freeze.setup(
name='Students Mark Entry',
options={"build_exe":{"packages":['tkinter','webbrowser','sqlite3','PIL','os'],"include_files":['D:/college/Internship(Spectrum)/3/mark_list.db','D:/college/Internship(Spectrum)/3/user_pas.db','D:/college/Internship(Spectrum)/3/logo/spectrumlogo.ico']}},
executables=executables)
|
#!/bin/bash
set -e
source "$(dirname $0)/../os-env.sh"
TAG=""
if [ $# -gt 0 ]; then
TAG=$1
echo "TAG=$TAG"
else
echo "First parameter should be the new TAG"
exit 1
fi
VERSION=${TAG:1}
GIT_ROOT=$(git rev-parse --show-toplevel)
OUTPUT_FOLDER=$GIT_ROOT/dist
cp -Lr $GIT_ROOT/chart/* $OUTPUT_FOLDER/
for CHART in extendeddaemonset
do
find $OUTPUT_FOLDER/$CHART -name Chart.yaml | xargs $SED "s/PLACEHOLDER_VERSION/$VERSION/g"
find $OUTPUT_FOLDER/$CHART -name values.yaml | xargs $SED "s/PLACEHOLDER_VERSION/$VERSION/g"
tar -zcvf $OUTPUT_FOLDER/$CHART.tar.gz -C $OUTPUT_FOLDER $CHART
done
|
<reponame>ebdrup/nodeerrors
"use strict";
describe("When running tests", function () {
it("should have sinon defined", function () {
expect(sinon).to.be.ok;
});
it("should have expect defined", function () {
expect(expect).to.be.ok;
});
}); |
/*
let valor1 = parseInt(gets());
let valor2 = parseInt(gets());
let total = 0; // Altere o valor da variável com o cálculo esperado
console.log("PROD = " + total);
*/
//Solução
let A = parseInt(gets());
let B = parseInt(gets());
let total = A * B; // Variável alterada com o cálculo esperado
console.log("PROD = " + total);
|
#!/bin/bash
CC=dpcpp
PROJECT=myproject
source /opt/intel/inteloneapi/setvars.sh
# source /opt/intel/inteloneapi/setvars.sh --dnnl-configuration=cpu_gomp --force> /dev/null 2>&1
CFLAGS="-O3 -fpic -std=c++11"
LDFLAGS="-L${DNNLROOT}/lib"
INCFLAGS="-I${DNNLROOT}/include"
GLOB_ENVS="-DDNNL_CPU_RUNTIME=SYCL -DDNNL_GPU_RUNTIME=SYCL"
PROJECT=myproject
# ${CC} ${CFLAGS} ${INCFLAGS} -c firmware/model.cpp -o model.o ${LDFLAGS} -ldnnl
# ${CC} ${CFLAGS} ${INCFLAGS} -shared model.o -o firmware/model.so ${LDFLAGS} -ldnnl
${CC} ${CFLAGS} ${INCFLAGS} -c firmware/${PROJECT}.cpp -o ${PROJECT}.o ${LDFLAGS} -ldnnl
${CC} ${CFLAGS} ${INCFLAGS} -shared ${PROJECT}.o -o firmware/${PROJECT}.so ${LDFLAGS} -ldnnl
rm -f *.o |
#!/usr/bin/env bash
pip install -r requirements.txt |
function createAppShared(req, res, next) {
req.appShared = { };
next();
}
module.exports = createAppShared; |
const ENS = artifacts.require('@ensdomains/ens/ENSRegistry');
const PublicResolver = artifacts.require('@ensdomains/resolver/PublicResolver');
const BaseRegistrar = artifacts.require('./BaseRegistrarImplementation');
const ETHRegistrarController = artifacts.require('./ETHRegistrarController');
const DummyOracle = artifacts.require('./DummyOracle');
const StablePriceOracle = artifacts.require('./StablePriceOracle');
const { evm, exceptions } = require("@ensdomains/test-utils");
const namehash = require('eth-ens-namehash');
const sha3 = require('web3-utils').sha3;
const toBN = require('web3-utils').toBN;
const DAYS = 24 * 60 * 60;
const NULL_ADDRESS = "0x0000000000000000000000000000000000000000"
contract('ETHRegistrarController', function (accounts) {
let ens;
let resolver;
let baseRegistrar;
let controller;
let priceOracle;
const secret = "<KEY>";
const ownerAccount = accounts[0]; // Account that owns the registrar
const registrantAccount = accounts[1]; // Account that owns test names
before(async () => {
ens = await ENS.new();
resolver = await PublicResolver.new(ens.address);
baseRegistrar = await BaseRegistrar.new(ens.address, namehash.hash('eth'), {from: ownerAccount});
await ens.setSubnodeOwner('0x0', sha3('eth'), baseRegistrar.address);
const dummyOracle = await DummyOracle.new(toBN(100000000));
priceOracle = await StablePriceOracle.new(dummyOracle.address, [1]);
controller = await ETHRegistrarController.new(
baseRegistrar.address,
priceOracle.address,
600,
86400,
{from: ownerAccount});
await baseRegistrar.addController(controller.address, {from: ownerAccount});
await controller.setPriceOracle(priceOracle.address, {from: ownerAccount});
});
const checkLabels = {
"testing": true,
"longname12345678": true,
"sixsix": true,
"five5": true,
"four": true,
"iii": true,
"ii": false,
"i": false,
"": false,
// { ni } { hao } { ma } (chinese; simplified)
"\u4f60\u597d\u5417": true,
// { ta } { ko } (japanese; hiragana)
"\u305f\u3053": false,
// { poop } { poop } { poop } (emoji)
"\ud83d\udca9\ud83d\udca9\ud83d\udca9": true,
// { poop } { poop } (emoji)
"\ud83d\udca9\ud83d\udca9": false
};
it('should report label validity', async () => {
for (const label in checkLabels) {
assert.equal(await controller.valid(label), checkLabels[label], label);
}
});
it('should report unused names as available', async () => {
assert.equal(await controller.available(sha3('available')), true);
});
it('should permit new registrations', async () => {
var commitment = await controller.makeCommitment("newname", registrantAccount, secret);
var tx = await controller.commit(commitment);
assert.equal(await controller.commitments(commitment), (await web3.eth.getBlock(tx.receipt.blockNumber)).timestamp);
await evm.advanceTime((await controller.minCommitmentAge()).toNumber());
var balanceBefore = await web3.eth.getBalance(controller.address);
var tx = await controller.register("newname", registrantAccount, 28 * DAYS, secret, {value: 28 * DAYS + 1, gasPrice: 0});
assert.equal(tx.logs.length, 1);
assert.equal(tx.logs[0].event, "NameRegistered");
assert.equal(tx.logs[0].args.name, "newname");
assert.equal(tx.logs[0].args.owner, registrantAccount);
assert.equal((await web3.eth.getBalance(controller.address)) - balanceBefore, 28 * DAYS);
});
it('should report registered names as unavailable', async () => {
assert.equal(await controller.available('newname'), false);
});
it('should permit new registrations with config', async () => {
var commitment = await controller.makeCommitmentWithConfig("newconfigname", registrantAccount, secret, resolver.address, registrantAccount);
var tx = await controller.commit(commitment);
assert.equal(await controller.commitments(commitment), (await web3.eth.getBlock(tx.receipt.blockNumber)).timestamp);
await evm.advanceTime((await controller.minCommitmentAge()).toNumber());
var balanceBefore = await web3.eth.getBalance(controller.address);
var tx = await controller.registerWithConfig("newconfigname", registrantAccount, 28 * DAYS, secret, resolver.address, registrantAccount, {value: 28 * DAYS + 1, gasPrice: 0});
assert.equal(tx.logs.length, 1);
assert.equal(tx.logs[0].event, "NameRegistered");
assert.equal(tx.logs[0].args.name, "newconfigname");
assert.equal(tx.logs[0].args.owner, registrantAccount);
assert.equal((await web3.eth.getBalance(controller.address)) - balanceBefore, 28 * DAYS);
var nodehash = namehash.hash("newconfigname.eth");
assert.equal((await ens.resolver(nodehash)), resolver.address);
assert.equal((await ens.owner(nodehash)), registrantAccount);
assert.equal((await resolver.addr(nodehash)), registrantAccount);
});
it('should not allow a commitment with addr but not resolver', async () => {
await exceptions.expectFailure(controller.makeCommitmentWithConfig("newconfigname2", registrantAccount, secret, NULL_ADDRESS, registrantAccount));
});
it('should permit a registration with resolver but not addr', async () => {
var commitment = await controller.makeCommitmentWithConfig("newconfigname2", registrantAccount, secret, resolver.address, NULL_ADDRESS);
var tx = await controller.commit(commitment);
assert.equal(await controller.commitments(commitment), (await web3.eth.getBlock(tx.receipt.blockNumber)).timestamp);
await evm.advanceTime((await controller.minCommitmentAge()).toNumber());
var balanceBefore = await web3.eth.getBalance(controller.address);
var tx = await controller.registerWithConfig("newconfigname2", registrantAccount, 28 * DAYS, secret, resolver.address, NULL_ADDRESS, {value: 28 * DAYS + 1, gasPrice: 0});
assert.equal(tx.logs.length, 1);
assert.equal(tx.logs[0].event, "NameRegistered");
assert.equal(tx.logs[0].args.name, "newconfigname2");
assert.equal(tx.logs[0].args.owner, registrantAccount);
assert.equal((await web3.eth.getBalance(controller.address)) - balanceBefore, 28 * DAYS);
var nodehash = namehash.hash("newconfigname2.eth");
assert.equal((await ens.resolver(nodehash)), resolver.address);
assert.equal((await resolver.addr(nodehash)), 0);
});
it('should include the owner in the commitment', async () => {
await controller.commit(await controller.makeCommitment("newname2", accounts[2], secret));
await evm.advanceTime((await controller.minCommitmentAge()).toNumber());
var balanceBefore = await web3.eth.getBalance(controller.address);
await exceptions.expectFailure(controller.register("newname2", registrantAccount, 28 * DAYS, secret, {value: 28 * DAYS, gasPrice: 0}));
});
it('should reject duplicate registrations', async () => {
await controller.commit(await controller.makeCommitment("newname", registrantAccount, secret));
await evm.advanceTime((await controller.minCommitmentAge()).toNumber());
var balanceBefore = await web3.eth.getBalance(controller.address);
await exceptions.expectFailure(controller.register("newname", registrantAccount, 28 * DAYS, secret, {value: 28 * DAYS, gasPrice: 0}));
});
it('should reject for expired commitments', async () => {
await controller.commit(await controller.makeCommitment("newname2", registrantAccount, secret));
await evm.advanceTime((await controller.maxCommitmentAge()).toNumber() + 1);
var balanceBefore = await web3.eth.getBalance(controller.address);
await exceptions.expectFailure(controller.register("newname2", registrantAccount, 28 * DAYS, secret, {value: 28 * DAYS, gasPrice: 0}));
});
it('should allow anyone to renew a name', async () => {
var expires = await baseRegistrar.nameExpires(sha3("newname"));
var balanceBefore = await web3.eth.getBalance(controller.address);
await controller.renew("newname", 86400, {value: 86400 + 1});
var newExpires = await baseRegistrar.nameExpires(sha3("newname"));
assert.equal(newExpires.toNumber() - expires.toNumber(), 86400);
assert.equal((await web3.eth.getBalance(controller.address)) - balanceBefore, 86400);
});
it('should require sufficient value for a renewal', async () => {
await exceptions.expectFailure(controller.renew("name", 86400));
});
it('should allow the registrar owner to withdraw funds', async () => {
await controller.withdraw({gasPrice: 0, from: ownerAccount});
assert.equal(await web3.eth.getBalance(controller.address), 0);
});
});
|
from bs4 import BeautifulSoup
def extract_row_content(html):
soup = BeautifulSoup(html, 'html.parser')
row_content = [td.get_text() for td in soup.find_all('td')]
return row_content |
<gh_stars>0
#include <stdio.h>
inline int max(const int&a , const int&b) { return a > b ? a : b ; }
inline int min(const int&a , const int&b) { return a < b ? a : b ; }
inline void swap(int&a , int&b) { register int c = a; a = b; b = c; }
inline int F() { register int aa , bb ,ch;
while(ch = getchar() , (ch<'0'||ch>'9') && ch != '-'); ch == '-' ? aa=bb=0 : (aa=ch-'0',bb=1);
while(ch = getchar() , ch>='0'&&ch<='9') aa = aa*10 + ch-'0'; return bb ? aa : -aa;
}
const int Maxn = 100010;
const int Maxm = 200010;
const int Maxt = 409610;
int n , dep[Maxn] , fa[Maxn] , son[Maxn] , size[Maxn] , pl[Maxn] , bl[Maxn] , v[Maxn] , dfn = 0;
int to[Maxm] , next[Maxm] , g[Maxn] , w[Maxm] , ecnt = 0; bool vis[Maxm];
int ll[Maxt] , rr[Maxt] , tree[Maxt] , lazy[Maxt] , tag[Maxt];
inline void ins(int a , int b , int wi) {
to[++ecnt] = b; next[ecnt] = g[a]; g[a] = ecnt; w[ecnt] = wi;
to[++ecnt] = a; next[ecnt] = g[b]; g[b] = ecnt; w[ecnt] = wi;
}
void dfs1(int x) {
dep[x] = dep[fa[x]] + 1; size[x] = 1;
for(int i=g[x]; i; i=next[i]) {
if(to[i] == fa[x]) continue;
fa[to[i]] = x;
v[to[i]] = w[i]; vis[i] = 1;
dfs1(to[i]);
size[x] += size[to[i]];
if(size[to[i]] > size[son[x]]) son[x] = to[i];
}
}
void dfs2(int x , int y) {
bl[x] = y; pl[x] = ++dfn;
if(!son[x]) return;
dfs2(son[x],y);
for(int i=g[x]; i; i=next[i]) {
if(to[i] == fa[x] || to[i] == son[x]) continue;
dfs2(to[i] , to[i]);
}
}
int lca(int a , int b) {
while(bl[a] != bl[b]) {
if(dep[fa[bl[a]]] < dep[fa[bl[b]]]) swap(a,b);
a = fa[bl[a]];
} return dep[a] < dep[b] ? a : b;
}
void update(int x) { tree[x] = max(tree[x<<1] , tree[x<<1|1]); }
void downpush(int x) {
if(ll[x] == rr[x]) { lazy[x] = 0; tag[x] = -1; return; }
if(tag[x] != -1) {
lazy[x<<1] = lazy[x<<1|1] = 0;
tag[x<<1] = tag[x<<1|1] = tag[x];
tree[x<<1] = tree[x<<1|1] = tag[x];
tag[x] = -1;
}
if(lazy[x]) {
tree[x<<1] += lazy[x];
tree[x<<1|1] += lazy[x];
if(tag[x<<1] == -1) lazy[x<<1] += lazy[x];
else tag[x<<1] += lazy[x];
if(tag[x<<1|1] == -1) lazy[x<<1|1] += lazy[x];
else tag[x<<1|1] += lazy[x];
lazy[x] = 0;
}
}
void Build(int x , int l , int r) {
ll[x] = l; rr[x] = r; tag[x] = -1; lazy[x] = tree[x] = 0;
if(l == r) return;
int mid = (l + r) >> 1;
Build(x<<1 , l , mid); Build(x<<1|1 , mid+1 , r);
}
int query(int x , int l , int r) {
downpush(x);
l = max(ll[x] , l); r = min(rr[x] , r);
if(l > r) return 0;
if(l == ll[x] && r == rr[x]) return tree[x];
return max(query(x<<1 , l , r) , query(x<<1|1 , l , r));
}
void modifyadd(int x , int l , int r , int val) {
downpush(x);
l = max(ll[x] , l); r = min(rr[x] , r);
if(l > r) return;
if(l == ll[x] && r == rr[x]) {
lazy[x] += val;
tree[x] += val;
return;
}
else {
modifyadd(x<<1 , l , r , val);
modifyadd(x<<1|1 , l , r , val);
update(x);
}
}
void modifytag(int x , int l , int r , int val) {
downpush(x);
l = max(ll[x] , l); r = min(rr[x] , r);
if(l > r) return;
if(l == ll[x] && r == rr[x]) {
tag[x] = tree[x] = val;
lazy[x] = 0;
return;
}
else {
modifytag(x<<1 , l , r , val);
modifytag(x<<1|1 , l , r , val);
update(x);
}
}
void solmodify(int x , int f , int val) {
while(bl[x] != bl[f]) {
modifytag(1,pl[bl[x]],pl[x],val);
x = fa[bl[x]];
}if(pl[f]+1 <= pl[x]) modifytag(1,pl[f]+1,pl[x],val);
}
int solquery(int x , int f) {
int ans = 0;
while(bl[x] != bl[f]) {
ans = max(ans , query(1,pl[bl[x]],pl[x]));
x = fa[bl[x]];
}if(pl[f]+1 <= pl[x]) ans = max(ans , query(1,pl[f]+1,pl[x]));
return ans;
}
void soladd(int x , int f , int val) {
while(bl[x] != bl[f]) {
modifyadd(1,pl[bl[x]],pl[x],val);
x = fa[bl[x]];
}if(pl[f]+1 <= pl[x]) modifyadd(1,pl[f]+1,pl[x],val);
}
char op[10];
int main() {
n = F();
for(int i=1; i<n; ++i) {
int a = F() , b = F() , wi = F();
ins(a,b,wi);
}
dfs1(1);
dfs2(1,1);
Build(1,1,n);
for(int i=1; i<=n; ++i)
modifytag(1,pl[i],pl[i],v[i]);
while(scanf("%s",op)) {
if(op[0] == 'C') {
if(op[1] == 'h') {
int k = F() , val = F();
if(vis[k<<1]) k = to[k<<1];
else k = to[(k<<1)-1];
modifytag(1,pl[k],pl[k],val);
}
else{
int a = F() , b = F() , val = F();
int L = lca(a , b);
solmodify(a,L,val); solmodify(b,L,val);
}
}
else if(op[0] == 'M') {
int a = F() , b = F();
int L = lca(a , b);
printf("%d\n",max(solquery(a,L),solquery(b,L)) );
}
else if(op[0] == 'A') {
int a = F() , b = F() , val = F();
int L = lca(a,b);
soladd(a,L,val); soladd(b,L,val);
}else break;
}
} |
#ifndef PARAM_INFO
#error "You must define PARAM_INFO macro before include this file"
#endif
PARAM_INFO(Width, float, width, m_width, (1.0f))
PARAM_INFO(Height, float, height, m_height, (1.0f))
PARAM_INFO(Depth, float, depth, m_depth, (1.0f))
|
import isEmail from "validator/lib/isEmail";
import { create, test, enforce, only, optional } from "vest";
import { AccountGeneralDto, ChangePasswordForm } from "~/types";
export const ACCOUNT_GENERAL_SCHEMA: any = create(
({ email, full_name, username }: AccountGeneralDto, currentField: string) => {
only(currentField);
optional({ full_name: () => !full_name });
enforce.extend({ isEmail });
// Username validation
test("username", "Username must not be empty!", () => {
enforce(username).isNotBlank();
});
test("username", "Username must be at least 3 characters long!", () => {
enforce(username).longerThanOrEquals(3);
});
test("username", "Username must be at most 18 characters long!", () => {
enforce(username).shorterThanOrEquals(18);
});
test("username", "Username must have no special characters!", () => {
enforce(username).matches(/^[a-zA-Z0-9.\-_]+$/);
});
// Email validation
test("email", "Email address must not be empty!", () => {
enforce(email).isNotBlank();
});
test("email", "Email must be a valid email address!", () => {
enforce(email).isEmail();
});
// Url validation
test("full_name", "Name must not be empty!", () => {
enforce(full_name).isNotBlank();
});
test("full_name", "Name must be at least a character long!", () => {
enforce(full_name).longerThanOrEquals(3);
});
test("full_name", "Name must be at most 50 characters long!", () => {
enforce(full_name).shorterThanOrEquals(50);
});
}
);
export const CHANGE_PASSWORD_SCHEMA: any = create(
(
{ old_password, new_password, confirm_password }: ChangePasswordForm,
currentField: string
) => {
only(currentField);
// Old password validation
test("old_password", "Old Password must not be empty!", () => {
enforce(old_password).isNotBlank();
});
// New password validation
test("new_password", "<PASSWORD> must not be empty!", () => {
enforce(new_password).isNotBlank();
});
test(
"new_password",
"New Password must be at least 8 characters long!",
() => {
enforce(new_password).longerThanOrEquals(8);
}
);
test(
"new_password",
"New Password must be at most 40 characters long!",
() => {
enforce(new_password).shorterThanOrEquals(40);
}
);
test(
"new_password",
"New Password must contain one special character!",
() => {
enforce(new_password).matches(/[*@!#%&()^~{}]+/);
}
);
test(
"new_password",
"New Password must contain at least one uppercase character!",
() => {
enforce(new_password).matches(/[A-Z]+/);
}
);
test(
"new_password",
"New Password must contain at least one lowercase character!",
() => {
enforce(new_password).matches(/[a-z]+/);
}
);
test(
"new_password",
"New Password must contain at least one number!",
() => {
enforce(new_password).matches(/[0-9]+/);
}
);
// Confirm password validation
test("confirm_password", "Passwords do not match!", () => {
enforce(confirm_password).equals(new_password);
});
}
);
|
#!/bin/bash
set -o nounset
set -o errexit
cd "$(dirname "$0")"
mkdir -p $PWD/../data/lib/plugins/editors/
mkdir -p $PWD/../data/lib/plugins/tools/
mkdir -p $PWD/../data/bin/
mkdir -p $PWD/../data/resources/
cp $BIN_DIR/plugins/editors/librobotsMetamodel.so $PWD/../data/lib/plugins/editors/
cp -pr $BIN_DIR/librobots-interpreter-core.so* $PWD/../data/lib/
cp $BIN_DIR/plugins/tools/librobots-plugin.so $PWD/../data/lib/plugins/tools/
cp -pr $BIN_DIR/librobots-2d-model.so* $PWD/../data/lib/
cp -pr $BIN_DIR/librobots-generator-base.so* $PWD/../data/lib/
cp -pr $BIN_DIR/librobots-kit-base.so* $PWD/../data/lib/
cp -pr $BIN_DIR/librobots-utils.so* $PWD/../data/lib/
# Cleaning up prebuild-common.sh results...
rm -rf $PWD/../data/plugins/
mv $PWD/../data/help $PWD/../data/resources/
mv $PWD/../data/images $PWD/../data/resources/
mv $PWD/../data/splashscreen.png $PWD/../data/bin/
|
<filename>pattern.js<gh_stars>0
const { hsv } = require('./lib/tools');
// Render one frame.
function draw(client) {
let secs = new Date().getTime() / 1000;
// There are two cats and each is 60 pixels wide by 8 pixels high. Loop
// over the pixels and compute the color of each based on the current time.
for (let cat = 0; cat < 2; cat++) {
for (let row = 0; row < 8; row++) {
for (let col = 0; col < 60; col++) {
// Get an (x,y) coordinate for this pixel.
// * y goes from 0.0 at the top to 1.0 at the bottom
// * x goes from tip to tail (0.0 near the cat’s nose, 1.0 near the
// tail)
//
// The physical scale is meters. 1 unit of distance = 1 meter.
//
// Since x goes from tip to tail, and the two cats are mirror images,
// the x axis on the two cats is flipped. This is usually convenient
// since you probably want to write symmetrical patterns, but you can
// easily override it.
let x = col / 59;
let y = row / 7;
// A simple rainbow fade
// (this is the actual pattern – put your code here!)
let t = secs - x - y * 2;
let [red, green, blue] = hsv(Math.sin(t) / 2 + .5, 1,
Math.sin(t) / 2 + .5);
// Save the computed pixel value in a buffer
client.setPixel(cat * 60 * 8 + row * 60 + col, red, green, blue);
}
}
}
// When this function returns the frame will be drawn. Pixel values will be
// gamma corrected to a 16-bit value, temporally dithered, and interpolated
// between frames thanks to the FadeCandy hardware.
}
module.exports = { draw };
|
/*
* Copyright 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.j2cl.transpiler.frontend.javac;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.ImmutableList.toImmutableList;
import com.google.common.base.Predicates;
import com.google.common.collect.ComparisonChain;
import com.google.common.collect.ImmutableList;
import com.google.j2cl.common.FilePosition;
import com.google.j2cl.common.SourcePosition;
import com.google.j2cl.transpiler.ast.ArrayAccess;
import com.google.j2cl.transpiler.ast.ArrayLength;
import com.google.j2cl.transpiler.ast.ArrayLiteral;
import com.google.j2cl.transpiler.ast.ArrayTypeDescriptor;
import com.google.j2cl.transpiler.ast.AssertStatement;
import com.google.j2cl.transpiler.ast.AstUtils;
import com.google.j2cl.transpiler.ast.BinaryExpression;
import com.google.j2cl.transpiler.ast.Block;
import com.google.j2cl.transpiler.ast.BooleanLiteral;
import com.google.j2cl.transpiler.ast.BreakStatement;
import com.google.j2cl.transpiler.ast.CastExpression;
import com.google.j2cl.transpiler.ast.CatchClause;
import com.google.j2cl.transpiler.ast.CompilationUnit;
import com.google.j2cl.transpiler.ast.ConditionalExpression;
import com.google.j2cl.transpiler.ast.ContinueStatement;
import com.google.j2cl.transpiler.ast.DeclaredTypeDescriptor;
import com.google.j2cl.transpiler.ast.DoWhileStatement;
import com.google.j2cl.transpiler.ast.Expression;
import com.google.j2cl.transpiler.ast.Field;
import com.google.j2cl.transpiler.ast.FieldAccess;
import com.google.j2cl.transpiler.ast.FieldDescriptor;
import com.google.j2cl.transpiler.ast.ForEachStatement;
import com.google.j2cl.transpiler.ast.ForStatement;
import com.google.j2cl.transpiler.ast.FunctionExpression;
import com.google.j2cl.transpiler.ast.IfStatement;
import com.google.j2cl.transpiler.ast.InstanceOfExpression;
import com.google.j2cl.transpiler.ast.JavaScriptConstructorReference;
import com.google.j2cl.transpiler.ast.Label;
import com.google.j2cl.transpiler.ast.LabelReference;
import com.google.j2cl.transpiler.ast.LabeledStatement;
import com.google.j2cl.transpiler.ast.Method;
import com.google.j2cl.transpiler.ast.MethodCall;
import com.google.j2cl.transpiler.ast.MethodDescriptor;
import com.google.j2cl.transpiler.ast.NewArray;
import com.google.j2cl.transpiler.ast.NewInstance;
import com.google.j2cl.transpiler.ast.NumberLiteral;
import com.google.j2cl.transpiler.ast.PostfixExpression;
import com.google.j2cl.transpiler.ast.PrefixExpression;
import com.google.j2cl.transpiler.ast.PrimitiveTypeDescriptor;
import com.google.j2cl.transpiler.ast.ReturnStatement;
import com.google.j2cl.transpiler.ast.RuntimeMethods;
import com.google.j2cl.transpiler.ast.Statement;
import com.google.j2cl.transpiler.ast.StringLiteral;
import com.google.j2cl.transpiler.ast.SuperReference;
import com.google.j2cl.transpiler.ast.SwitchCase;
import com.google.j2cl.transpiler.ast.SwitchStatement;
import com.google.j2cl.transpiler.ast.SynchronizedStatement;
import com.google.j2cl.transpiler.ast.ThisReference;
import com.google.j2cl.transpiler.ast.ThrowStatement;
import com.google.j2cl.transpiler.ast.TryStatement;
import com.google.j2cl.transpiler.ast.Type;
import com.google.j2cl.transpiler.ast.TypeDeclaration;
import com.google.j2cl.transpiler.ast.TypeDescriptor;
import com.google.j2cl.transpiler.ast.TypeDescriptors;
import com.google.j2cl.transpiler.ast.TypeLiteral;
import com.google.j2cl.transpiler.ast.UnaryExpression;
import com.google.j2cl.transpiler.ast.Variable;
import com.google.j2cl.transpiler.ast.VariableDeclarationExpression;
import com.google.j2cl.transpiler.ast.VariableDeclarationFragment;
import com.google.j2cl.transpiler.ast.Visibility;
import com.google.j2cl.transpiler.ast.WhileStatement;
import com.google.j2cl.transpiler.frontend.common.AbstractCompilationUnitBuilder;
import com.google.j2cl.transpiler.frontend.common.Nullability;
import com.google.j2cl.transpiler.frontend.common.PackageInfoCache;
import com.sun.source.tree.CompilationUnitTree;
import com.sun.source.tree.Tree.Kind;
import com.sun.tools.javac.code.Flags;
import com.sun.tools.javac.code.Symbol;
import com.sun.tools.javac.code.Symbol.ClassSymbol;
import com.sun.tools.javac.code.Symbol.MethodSymbol;
import com.sun.tools.javac.code.Symbol.PackageSymbol;
import com.sun.tools.javac.code.Symbol.VarSymbol;
import com.sun.tools.javac.tree.JCTree;
import com.sun.tools.javac.tree.JCTree.JCArrayAccess;
import com.sun.tools.javac.tree.JCTree.JCAssert;
import com.sun.tools.javac.tree.JCTree.JCAssign;
import com.sun.tools.javac.tree.JCTree.JCAssignOp;
import com.sun.tools.javac.tree.JCTree.JCBinary;
import com.sun.tools.javac.tree.JCTree.JCBlock;
import com.sun.tools.javac.tree.JCTree.JCBreak;
import com.sun.tools.javac.tree.JCTree.JCCatch;
import com.sun.tools.javac.tree.JCTree.JCClassDecl;
import com.sun.tools.javac.tree.JCTree.JCCompilationUnit;
import com.sun.tools.javac.tree.JCTree.JCConditional;
import com.sun.tools.javac.tree.JCTree.JCContinue;
import com.sun.tools.javac.tree.JCTree.JCDoWhileLoop;
import com.sun.tools.javac.tree.JCTree.JCEnhancedForLoop;
import com.sun.tools.javac.tree.JCTree.JCExpression;
import com.sun.tools.javac.tree.JCTree.JCExpressionStatement;
import com.sun.tools.javac.tree.JCTree.JCFieldAccess;
import com.sun.tools.javac.tree.JCTree.JCForLoop;
import com.sun.tools.javac.tree.JCTree.JCFunctionalExpression;
import com.sun.tools.javac.tree.JCTree.JCIdent;
import com.sun.tools.javac.tree.JCTree.JCIf;
import com.sun.tools.javac.tree.JCTree.JCInstanceOf;
import com.sun.tools.javac.tree.JCTree.JCLabeledStatement;
import com.sun.tools.javac.tree.JCTree.JCLambda;
import com.sun.tools.javac.tree.JCTree.JCLiteral;
import com.sun.tools.javac.tree.JCTree.JCMemberReference;
import com.sun.tools.javac.tree.JCTree.JCMethodDecl;
import com.sun.tools.javac.tree.JCTree.JCMethodInvocation;
import com.sun.tools.javac.tree.JCTree.JCNewArray;
import com.sun.tools.javac.tree.JCTree.JCNewClass;
import com.sun.tools.javac.tree.JCTree.JCParens;
import com.sun.tools.javac.tree.JCTree.JCReturn;
import com.sun.tools.javac.tree.JCTree.JCStatement;
import com.sun.tools.javac.tree.JCTree.JCSwitch;
import com.sun.tools.javac.tree.JCTree.JCSynchronized;
import com.sun.tools.javac.tree.JCTree.JCThrow;
import com.sun.tools.javac.tree.JCTree.JCTry;
import com.sun.tools.javac.tree.JCTree.JCTypeCast;
import com.sun.tools.javac.tree.JCTree.JCUnary;
import com.sun.tools.javac.tree.JCTree.JCVariableDecl;
import com.sun.tools.javac.tree.JCTree.JCWhileLoop;
import com.sun.tools.javac.tree.JCTree.Tag;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.Name;
import javax.lang.model.element.VariableElement;
import javax.lang.model.type.ExecutableType;
/** Creates a J2CL Java AST from the AST provided by JavaC. */
public class CompilationUnitBuilder extends AbstractCompilationUnitBuilder {
private final JavaEnvironment environment;
private final Map<VariableElement, Variable> variableByVariableElement = new HashMap<>();
private final Map<String, Label> labelsInScope = new HashMap<>();
private JCCompilationUnit javacUnit;
private CompilationUnitBuilder(JavaEnvironment environment) {
this.environment = environment;
}
private Type createType(ClassSymbol typeElement, JCTree sourcePositionNode) {
if (typeElement == null) {
return null;
}
Visibility visibility = JavaEnvironment.getVisibility(typeElement);
TypeDeclaration typeDeclaration = environment.createDeclarationForType(typeElement);
Type type =
new Type(
typeDeclaration.isAnonymous()
? getSourcePosition(sourcePositionNode)
: getNamePosition(sourcePositionNode),
visibility,
typeDeclaration);
type.setStatic(JavaEnvironment.isStatic(typeElement));
return type;
}
/**
* Constructs a type, maintains the type stack and let's the caller to do additional work by
* supplying a {@code typeProcessor}.
*
* @return {T} the value returned by {@code typeProcessor}
*/
private <T> T convertAndAddType(
ClassSymbol typeElement,
List<JCTree> bodyDeclarations,
JCTree sourcePositionNode,
Function<Type, T> typeProcessor) {
Type type = createType(typeElement, sourcePositionNode);
getCurrentCompilationUnit().addType(type);
return processEnclosedBy(
type,
() -> {
;
convertTypeBody(type, typeElement, bodyDeclarations);
return typeProcessor.apply(type);
});
}
private Type convertClassDeclaration(JCClassDecl classDecl) {
return convertClassDeclaration(classDecl, classDecl);
}
private Type convertClassDeclaration(JCClassDecl classDecl, JCTree sourcePositionNode) {
return convertAndAddType(
classDecl.sym, classDecl.getMembers(), sourcePositionNode, type -> null);
}
private void convertTypeBody(Type type, ClassSymbol classSymbol, List<JCTree> bodyDeclarations) {
TypeDeclaration currentTypeDeclaration = type.getDeclaration();
propagateCapturesFromSupertype(currentTypeDeclaration);
for (JCTree bodyDeclaration : bodyDeclarations) {
if (bodyDeclaration instanceof JCVariableDecl) {
JCVariableDecl fieldDeclaration = (JCVariableDecl) bodyDeclaration;
type.addMember(convertFieldDeclaration(fieldDeclaration));
} else if (bodyDeclaration instanceof JCMethodDecl) {
JCMethodDecl methodDeclaration = (JCMethodDecl) bodyDeclaration;
if ((methodDeclaration.mods.flags & Flags.GENERATEDCONSTR) != 0
&& (methodDeclaration.mods.flags & Flags.ANONCONSTR) == 0) {
// Skip constructors that are generated by javac. This allows to differentiate between
// a user written empty constructor and an implicit (generated) constructor.
// J2CL already has logic to synthesize default constructors for JDT.
// TODO(b/135123615): When the migration is completed, the pass synthesizing default
// constructors can be removed by adding isGenerated or isSynthetic to Method.
continue;
}
type.addMember(convertMethodDeclaration(methodDeclaration));
// } else if (bodyDeclaration instanceof AnnotationTypeMemberDeclaration) {
// AnnotationTypeMemberDeclaration memberDeclaration =
// (AnnotationTypeMemberDeclaration) bodyDeclaration;
// type.addMethod(convert(memberDeclaration));
} else if (bodyDeclaration instanceof JCBlock) {
JCBlock initializer = (JCBlock) bodyDeclaration;
Block block = convertBlock(initializer);
if (initializer.isStatic()) {
type.addStaticInitializerBlock(block);
} else {
type.addInstanceInitializerBlock(block);
}
} else if (bodyDeclaration instanceof JCClassDecl) {
// Nested class
JCClassDecl nestedTypeDeclaration = (JCClassDecl) bodyDeclaration;
convertClassDeclaration(nestedTypeDeclaration);
} else {
throw internalCompilerError(
"Unimplemented translation for BodyDeclaration type: %s.",
bodyDeclaration.getClass().getName());
}
}
for (Variable capturedVariable : getCapturedVariables(currentTypeDeclaration)) {
FieldDescriptor fieldDescriptor =
AstUtils.getFieldDescriptorForCapture(type.getTypeDescriptor(), capturedVariable);
type.addMember(
Field.Builder.from(fieldDescriptor)
.setCapturedVariable(capturedVariable)
.setSourcePosition(type.getSourcePosition())
.setNameSourcePosition(capturedVariable.getSourcePosition())
.build());
}
if (JavaEnvironment.capturesEnclosingInstance(classSymbol)) {
// add field for enclosing instance.
type.addMember(
0,
Field.Builder.from(
AstUtils.getFieldDescriptorForEnclosingInstance(
type.getTypeDescriptor(),
type.getEnclosingTypeDeclaration().toUnparameterizedTypeDescriptor()))
.setSourcePosition(type.getSourcePosition())
.build());
}
}
private Field convertFieldDeclaration(JCVariableDecl fieldDeclaration) {
Expression initializer;
VariableElement variableElement = fieldDeclaration.sym;
if (variableElement.getConstantValue() == null) {
initializer = convertExpressionOrNull(fieldDeclaration.getInitializer());
} else {
initializer = convertConstantToLiteral(variableElement);
}
return Field.Builder.from(
environment.createFieldDescriptor(variableElement, fieldDeclaration.type))
.setInitializer(initializer)
.setSourcePosition(getSourcePosition(fieldDeclaration))
.setNameSourcePosition(getNamePosition(fieldDeclaration))
.build();
}
private Method convertMethodDeclaration(JCMethodDecl methodDeclaration) {
MethodDescriptor methodDescriptor =
environment.createDeclarationMethodDescriptor(methodDeclaration.sym);
return processEnclosedBy(
methodDescriptor,
() -> {
List<Variable> parameters = new ArrayList<>();
for (JCVariableDecl parameter : methodDeclaration.getParameters()) {
parameters.add(createVariable(parameter, true));
}
// If a method has no body, initialize the body with an empty list of statements.
Block body =
methodDeclaration.getBody() == null
? Block.newBuilder()
.setSourcePosition(getSourcePosition(methodDeclaration))
.build()
: convertBlock(methodDeclaration.getBody());
return newMethodBuilder(methodDeclaration.sym)
.setBodySourcePosition(body.getSourcePosition())
.setSourcePosition(getNamePosition(methodDeclaration))
.setParameters(parameters)
.addStatements(body.getStatements())
.build();
});
}
private Block convertBlock(JCBlock block) {
return Block.newBuilder()
.setSourcePosition(getSourcePosition(block))
.setStatements(
block.getStatements().stream()
.map(this::convertStatement)
.filter(Predicates.notNull())
.collect(toImmutableList()))
.build();
}
private Variable createVariable(JCVariableDecl variableDeclaration, boolean isParameter) {
VariableElement variableElement = variableDeclaration.sym;
Variable variable =
environment.createVariable(
getNamePosition(variableElement.getSimpleName().toString(), variableDeclaration),
variableElement,
isParameter);
variableByVariableElement.put(variableElement, variable);
recordEnclosingType(variable, getCurrentType());
return variable;
}
private Method.Builder newMethodBuilder(ExecutableElement methodElement) {
MethodDescriptor methodDescriptor =
environment.createDeclarationMethodDescriptor(methodElement);
return Method.newBuilder().setMethodDescriptor(methodDescriptor);
}
private Expression convertConstantToLiteral(VariableElement variableElement) {
Object constantValue = variableElement.getConstantValue();
if (constantValue instanceof Boolean) {
return (boolean) constantValue ? BooleanLiteral.get(true) : BooleanLiteral.get(false);
}
if (constantValue instanceof Number) {
return new NumberLiteral(
environment.createTypeDescriptor(variableElement.asType()).toUnboxedType(),
(Number) constantValue);
}
if (constantValue instanceof Character) {
return NumberLiteral.fromChar((Character) constantValue);
}
if (constantValue instanceof String) {
return new StringLiteral((String) constantValue);
}
throw internalCompilerError(
"Unimplemented translation for compile time constants of type: %s.",
constantValue.getClass().getSimpleName());
}
//////////////////////////////////////////////////////////////////////////////////////////////
// Statements.
//////////////////////////////////////////////////////////////////////////////////////////////
private AssertStatement convertAssert(JCAssert statement) {
return AssertStatement.newBuilder()
.setSourcePosition(getSourcePosition(statement))
.setExpression(convertExpression(statement.getCondition()))
.setMessage(convertExpressionOrNull(statement.getDetail()))
.build();
}
private LabeledStatement convertLabeledStatement(JCLabeledStatement statement) {
Label label = Label.newBuilder().setName(statement.getLabel().toString()).build();
checkState(labelsInScope.put(label.getName(), label) == null);
LabeledStatement labeledStatment =
LabeledStatement.newBuilder()
.setSourcePosition(getSourcePosition(statement))
.setLabel(label)
.setStatement(convertStatement(statement.getStatement()))
.build();
labelsInScope.remove(label.getName());
return labeledStatment;
}
private BreakStatement convertBreak(JCBreak statement) {
return BreakStatement.newBuilder()
.setSourcePosition(getSourcePosition(statement))
.setLabelReference(getLabelReferenceOrNull(statement.getLabel()))
.build();
}
private ContinueStatement convertContinue(JCContinue statement) {
return ContinueStatement.newBuilder()
.setSourcePosition(getSourcePosition(statement))
.setLabelReference(getLabelReferenceOrNull(statement.getLabel()))
.build();
}
private LabelReference getLabelReferenceOrNull(Name label) {
return label == null ? null : labelsInScope.get(label.toString()).createReference();
}
private DoWhileStatement convertDoWhileLoop(JCDoWhileLoop statement) {
return DoWhileStatement.newBuilder()
.setSourcePosition(getSourcePosition(statement))
.setConditionExpression(convertConditionRemovingOuterParentheses(statement.getCondition()))
.setBody(convertStatement(statement.getStatement()))
.build();
}
private Statement convertExpressionStatement(JCExpressionStatement statement) {
return convertExpression(statement.getExpression()).makeStatement(getSourcePosition(statement));
}
private ForStatement convertForLoop(JCForLoop statement) {
return ForStatement.newBuilder()
// The order here is important since initializers can define new variables
// These can be used in the expression, updaters or the body
// This is why we need to process initializers first
.setInitializers(convertInitializers(statement.getInitializer()))
.setConditionExpression(
statement.getCondition() == null
? BooleanLiteral.get(true)
: convertExpression(statement.getCondition()))
.setBody(convertStatement(statement.getStatement()))
.setUpdates(
convertExpressions(
statement.getUpdate().stream()
.map(JCExpressionStatement::getExpression)
.collect(toImmutableList())))
.setSourcePosition(getSourcePosition(statement))
.build();
}
private List<Expression> convertInitializers(List<JCStatement> statements) {
if (statements.stream().anyMatch(s -> s.getKind() == Kind.VARIABLE)) {
// The statements are all variable declaration statements, collect them into one
// variable declaration expression.
return convertVariableDeclarations(statements);
}
return statements.stream().map(this::convertInitializer).collect(toImmutableList());
}
private List<Expression> convertVariableDeclarations(List<JCStatement> statements) {
return ImmutableList.of(
VariableDeclarationExpression.newBuilder()
.addVariableDeclarationFragments(
statements.stream()
.map(s -> createVariableDeclarationFragment((JCVariableDecl) s))
.collect(toImmutableList()))
.build());
}
private Expression convertInitializer(JCStatement statement) {
switch (statement.getKind()) {
case EXPRESSION_STATEMENT:
return convertExpression(((JCExpressionStatement) statement).expr);
default:
throw new AssertionError();
}
}
private ForEachStatement convertEnhancedForLoop(JCEnhancedForLoop statement) {
return ForEachStatement.newBuilder()
.setLoopVariable(createVariable(statement.getVariable(), false))
.setIterableExpression(convertExpression(statement.getExpression()))
.setBody(convertStatement(statement.getStatement()))
.setSourcePosition(getSourcePosition(statement))
.build();
}
private IfStatement convertIf(JCIf statement) {
return IfStatement.newBuilder()
.setSourcePosition(getSourcePosition(statement))
.setConditionExpression(convertConditionRemovingOuterParentheses(statement.getCondition()))
.setThenStatement(convertStatement(statement.getThenStatement()))
.setElseStatement(convertStatementOrNull(statement.getElseStatement()))
.build();
}
private WhileStatement convertWhileLoop(JCWhileLoop statement) {
return WhileStatement.newBuilder()
.setSourcePosition(getSourcePosition(statement))
.setConditionExpression(convertConditionRemovingOuterParentheses(statement.getCondition()))
.setBody(convertStatement(statement.getStatement()))
.build();
}
private SwitchStatement convertSwitch(JCSwitch switchStatement) {
return SwitchStatement.newBuilder()
.setSourcePosition(getSourcePosition(switchStatement))
.setSwitchExpression(convertExpressionOrNull(switchStatement.getExpression()))
.setCases(
switchStatement.getCases().stream()
.map(
caseClause ->
SwitchCase.newBuilder()
.setCaseExpression(convertExpressionOrNull(caseClause.getExpression()))
.setStatements(convertStatements(caseClause.getStatements()))
.build())
.collect(toImmutableList()))
.build();
}
private ThrowStatement convertThrow(JCThrow statement) {
return new ThrowStatement(
getSourcePosition(statement), convertExpression(statement.getExpression()));
}
private TryStatement convertTry(JCTry statement) {
List<JCCatch> catchClauses = statement.getCatches();
return TryStatement.newBuilder()
.setSourcePosition(getSourcePosition(statement))
.setResourceDeclarations(
statement.getResources().stream().map(this::toResource).collect(toImmutableList()))
.setBody(convertBlock(statement.getBlock()))
.setCatchClauses(
catchClauses.stream().map(this::convertCatchClause).collect(toImmutableList()))
.setFinallyBlock((Block) convertStatementOrNull(statement.getFinallyBlock()))
.build();
}
private VariableDeclarationExpression toResource(JCTree resourceTree) {
if (resourceTree.getTag() == Tag.VARDEF) {
return createVariableDeclarationExpression((JCVariableDecl) resourceTree);
}
checkArgument(resourceTree.getTag() == Tag.IDENT);
return toResource((JCIdent) resourceTree);
}
private VariableDeclarationExpression toResource(JCIdent ident) {
// Create temporary variables for resources declared outside of the try statement.
Expression expression = convertIdent(ident);
return VariableDeclarationExpression.newBuilder()
.addVariableDeclaration(
Variable.newBuilder()
.setName("$resource")
.setTypeDescriptor(expression.getTypeDescriptor())
.setFinal(true)
.build(),
expression)
.build();
}
private CatchClause convertCatchClause(JCCatch catchClause) {
// Order is important here, exception declaration must be converted before body.
return CatchClause.newBuilder()
.setExceptionVariable(createVariable(catchClause.getParameter(), false))
.setBody(convertBlock(catchClause.getBlock()))
.build();
}
private ReturnStatement convertReturn(JCReturn statement) {
// Grab the type of the return statement from the method declaration, not from the expression.
return ReturnStatement.newBuilder()
.setExpression(convertExpressionOrNull(statement.getExpression()))
.setTypeDescriptor(getEnclosingFunctional().getReturnTypeDescriptor())
.setSourcePosition(getSourcePosition(statement))
.build();
}
private SynchronizedStatement convertSynchronized(JCSynchronized statement) {
return new SynchronizedStatement(
getSourcePosition(statement),
convertExpression(statement.getExpression()),
convertBlock(statement.getBlock()));
}
private Statement convertVariableDeclaration(JCVariableDecl variableDeclaration) {
return createVariableDeclarationExpression(variableDeclaration)
.makeStatement(getSourcePosition(variableDeclaration));
}
private VariableDeclarationExpression createVariableDeclarationExpression(
JCVariableDecl variableDeclaration) {
return VariableDeclarationExpression.newBuilder()
.addVariableDeclarationFragments(createVariableDeclarationFragment(variableDeclaration))
.build();
}
private VariableDeclarationFragment createVariableDeclarationFragment(
JCVariableDecl variableDeclaration) {
Variable variable = createVariable(variableDeclaration, false);
return VariableDeclarationFragment.newBuilder()
.setVariable(variable)
.setInitializer(convertExpressionOrNull(variableDeclaration.getInitializer()))
.build();
}
private Statement convertStatement(JCStatement jcStatement) {
switch (jcStatement.getKind()) {
case ASSERT:
return convertAssert((JCAssert) jcStatement);
case BLOCK:
return convertBlock((JCBlock) jcStatement);
case BREAK:
return convertBreak((JCBreak) jcStatement);
case CLASS:
convertClassDeclaration((JCClassDecl) jcStatement);
return null;
case CONTINUE:
return convertContinue((JCContinue) jcStatement);
case DO_WHILE_LOOP:
return convertDoWhileLoop((JCDoWhileLoop) jcStatement);
case EMPTY_STATEMENT:
return Statement.createNoopStatement();
case ENHANCED_FOR_LOOP:
return convertEnhancedForLoop((JCEnhancedForLoop) jcStatement);
case EXPRESSION_STATEMENT:
return convertExpressionStatement((JCExpressionStatement) jcStatement);
case FOR_LOOP:
return convertForLoop((JCForLoop) jcStatement);
case IF:
return convertIf((JCIf) jcStatement);
case LABELED_STATEMENT:
return convertLabeledStatement((JCLabeledStatement) jcStatement);
case RETURN:
return convertReturn((JCReturn) jcStatement);
case SWITCH:
return convertSwitch((JCSwitch) jcStatement);
case THROW:
return convertThrow((JCThrow) jcStatement);
case TRY:
return convertTry((JCTry) jcStatement);
case VARIABLE:
return convertVariableDeclaration((JCVariableDecl) jcStatement);
case WHILE_LOOP:
return convertWhileLoop((JCWhileLoop) jcStatement);
case SYNCHRONIZED:
return convertSynchronized((JCSynchronized) jcStatement);
default:
throw new AssertionError("Unknown statement node type: " + jcStatement.getKind());
}
}
private Statement convertStatementOrNull(JCStatement statement) {
return statement != null ? convertStatement(statement) : null;
}
private List<Statement> convertStatements(List<JCStatement> statements) {
return statements.stream().map(this::convertStatement).collect(toImmutableList());
}
private SourcePosition getSourcePosition(JCTree node) {
return getSourcePosition(null, node);
}
private SourcePosition getSourcePosition(String name, JCTree node) {
int startCharacterPosition = node.getStartPosition();
int endCharacterPosition = guessEndPosition(node);
return getSourcePosition(name, startCharacterPosition, endCharacterPosition);
}
private SourcePosition getSourcePosition(
String name, int startCharacterPosition, int endCharacterPosition) {
int startLine = javacUnit.getLineMap().getLineNumber(startCharacterPosition) - 1;
int startColumn = javacUnit.getLineMap().getColumnNumber(startCharacterPosition) - 1;
int endLine = javacUnit.getLineMap().getLineNumber(endCharacterPosition) - 1;
int endColumn = javacUnit.getLineMap().getColumnNumber(endCharacterPosition) - 1;
return SourcePosition.newBuilder()
.setFilePath(javacUnit.getSourceFile().getName())
.setPackageRelativePath(getCurrentCompilationUnit().getPackageRelativePath())
.setName(name)
.setStartFilePosition(
FilePosition.newBuilder()
.setLine(startLine)
.setColumn(startColumn)
.setByteOffset(startCharacterPosition)
.build())
.setEndFilePosition(
FilePosition.newBuilder()
.setLine(endLine)
// TODO(b/92372836): Document which character this should point to
.setColumn(endColumn)
.setByteOffset(endCharacterPosition + 1)
.build())
.build();
}
/** Javac does not always report the end position of a construct. */
private int guessEndPosition(JCTree node) {
int startCharacterPosition = node.getStartPosition();
int endCharacterPosition = node.getEndPosition(javacUnit.endPositions);
if (endCharacterPosition == -1) {
try {
// Scan the source file for the end of an identifier.
String src = javacUnit.sourcefile.getCharContent(true).toString();
endCharacterPosition = startCharacterPosition;
while (endCharacterPosition < src.length()
&& Character.isJavaIdentifierPart(src.charAt(endCharacterPosition++))) {}
} catch (IOException e) {
throw internalCompilerError(e, "Error getting endPosition for: %s.", node);
}
}
return endCharacterPosition;
}
private SourcePosition getNamePosition(JCTree node) {
return getNamePosition(null, node);
}
// Return best guess for the position of a declaration node's name.
private SourcePosition getNamePosition(String name, JCTree node) {
int start = node.getPreferredPosition();
if (start == -1) {
return null;
}
try {
String src = javacUnit.sourcefile.getCharContent(true).toString();
Kind kind = node.getKind();
if (kind == Kind.ANNOTATION_TYPE
|| kind == Kind.CLASS
|| kind == Kind.ENUM
|| kind == Kind.INTERFACE) {
// Skip the class/enum/interface token.
while (src.charAt(start++) != ' ') {}
} else if (kind != Kind.METHOD && kind != Kind.VARIABLE) {
return getSourcePosition(node);
}
if (!Character.isJavaIdentifierStart(src.charAt(start))) {
return getSourcePosition(node);
}
int endPos = start + 1;
while (Character.isJavaIdentifierPart(src.charAt(endPos))) {
endPos++;
}
return getSourcePosition(name, start, endPos);
} catch (IOException e) {
throw internalCompilerError(e, "Error getting name Position for: %s.", node);
}
}
// Expressions
private ArrayAccess convertArrayAccess(JCArrayAccess expression) {
return ArrayAccess.newBuilder()
.setArrayExpression(convertExpression(expression.getExpression()))
.setIndexExpression(convertExpression(expression.getIndex()))
.build();
}
private BinaryExpression convertAssignment(JCAssign expression) {
return BinaryExpression.newBuilder()
.setLeftOperand(convertExpression(expression.getVariable()))
.setOperator(JavaEnvironment.getBinaryOperator(expression.getKind()))
.setRightOperand(convertExpression(expression.getExpression()))
.build();
}
/** Convert compound assigment. */
private BinaryExpression convertAssignment(JCAssignOp expression) {
return BinaryExpression.newBuilder()
.setLeftOperand(convertExpression(expression.getVariable()))
.setOperator(JavaEnvironment.getBinaryOperator(expression.getKind()))
.setRightOperand(convertExpression(expression.getExpression()))
.build();
}
private BinaryExpression convertBinary(JCBinary expression) {
return BinaryExpression.newBuilder()
.setLeftOperand(convertExpression(expression.getLeftOperand()))
.setOperator(JavaEnvironment.getBinaryOperator(expression.getKind()))
.setRightOperand(convertExpression(expression.getRightOperand()))
.build();
}
private UnaryExpression convertPostfixUnary(JCUnary expression) {
return PostfixExpression.newBuilder()
.setOperand(convertExpression(expression.getExpression()))
.setOperator(JavaEnvironment.getPostfixOperator(expression.getKind()))
.build();
}
private UnaryExpression convertPrefixUnary(JCUnary expression) {
return PrefixExpression.newBuilder()
.setOperand(convertExpression(expression.getExpression()))
.setOperator(JavaEnvironment.getPrefixOperator(expression.getKind()))
.build();
}
private CastExpression convertCast(JCTypeCast expression) {
TypeDescriptor castTypeDescriptor = environment.createTypeDescriptor(expression.getType().type);
return CastExpression.newBuilder()
.setExpression(convertExpression(expression.getExpression()))
.setCastTypeDescriptor(castTypeDescriptor)
.build();
}
private ConditionalExpression convertConditional(JCConditional conditionalExpression) {
return ConditionalExpression.newBuilder()
.setTypeDescriptor(environment.createTypeDescriptor(conditionalExpression.type))
.setConditionExpression(convertExpression(conditionalExpression.getCondition()))
.setTrueExpression(convertExpression(conditionalExpression.getTrueExpression()))
.setFalseExpression(convertExpression(conditionalExpression.getFalseExpression()))
.build();
}
private InstanceOfExpression convertInstanceOf(JCInstanceOf expression) {
return InstanceOfExpression.newBuilder()
.setSourcePosition(getSourcePosition(expression))
.setExpression(convertExpression(expression.getExpression()))
.setTestTypeDescriptor(environment.createTypeDescriptor(expression.getType().type))
.build();
}
private Expression convertLambda(JCLambda expression) {
MethodDescriptor functionalMethodDescriptor =
environment.getJsFunctionMethodDescriptor(expression.type);
return processEnclosedBy(
functionalMethodDescriptor,
() ->
FunctionExpression.newBuilder()
.setTypeDescriptor(getTargetType(expression))
.setParameters(
expression.getParameters().stream()
.map(variable -> createVariable((JCVariableDecl) variable, true))
.collect(toImmutableList()))
.setStatements(
convertLambdaBody(
expression.getBody(),
functionalMethodDescriptor.getReturnTypeDescriptor())
.getStatements())
.setSourcePosition(getSourcePosition(expression))
.build());
}
private TypeDescriptor getTargetType(JCFunctionalExpression expression) {
return environment.createTypeDescriptor(expression.type);
}
// Lambda expression bodies can be either an Expression or a Statement
private Block convertLambdaBody(JCTree lambdaBody, TypeDescriptor returnTypeDescriptor) {
Block body;
if (lambdaBody.getKind() == Kind.BLOCK) {
body = convertBlock((JCBlock) lambdaBody);
} else {
checkArgument(lambdaBody instanceof JCExpression);
Expression lambdaMethodBody = convertExpression((JCExpression) lambdaBody);
Statement statement =
AstUtils.createReturnOrExpressionStatement(
getSourcePosition(lambdaBody), lambdaMethodBody, returnTypeDescriptor);
body =
Block.newBuilder()
.setSourcePosition(getSourcePosition(lambdaBody))
.setStatements(statement)
.build();
}
return body;
}
/**
* Converts method reference expressions of the form:
*
* <p>
*
* <pre> {@code A::m} into {@code (par1, ..., parN) -> A.m(par1, ..., parN) } </pre>
*/
private Expression convertMemberReference(JCMemberReference memberReference) {
MethodSymbol methodSymbol = (MethodSymbol) memberReference.sym;
DeclaredTypeDescriptor expressionTypeDescriptor =
environment.createDeclaredTypeDescriptor(memberReference.type);
MethodDescriptor functionalMethodDescriptor =
environment.getJsFunctionMethodDescriptor(memberReference.type);
if (methodSymbol.getEnclosingElement().getQualifiedName().contentEquals("Array")) {
// Arrays member references are seen as references to members on a class Array.
return createArrayCreationLambda(
functionalMethodDescriptor,
environment.createTypeDescriptor(
memberReference.getQualifierExpression().type, ArrayTypeDescriptor.class),
getSourcePosition(memberReference));
}
com.sun.tools.javac.code.Type returnType =
methodSymbol.isConstructor()
? methodSymbol.getEnclosingElement().asType()
: memberReference.referentType.getReturnType();
MethodDescriptor targetMethodDescriptor =
environment.createMethodDescriptor(
(ExecutableType) memberReference.referentType, returnType, methodSymbol);
if (methodSymbol.isConstructor()) {
Expression qualifier =
targetMethodDescriptor
.getEnclosingTypeDescriptor()
.getTypeDeclaration()
.isCapturingEnclosingInstance()
// Inner classes may have an implicit enclosing class qualifier (2).
? resolveExplicitOuterClassReference(
targetMethodDescriptor.getEnclosingTypeDescriptor())
: null;
return createInstantiationLambda(
functionalMethodDescriptor,
targetMethodDescriptor,
qualifier,
getSourcePosition(memberReference));
}
Expression qualifier = convertExpressionOrNull(memberReference.getQualifierExpression());
return createMethodReferenceLambda(
getSourcePosition(memberReference),
qualifier,
targetMethodDescriptor,
expressionTypeDescriptor,
// functional interface method that the expression implements.
functionalMethodDescriptor);
}
private NewArray convertNewArray(JCNewArray expression) {
ArrayTypeDescriptor typeDescriptor =
environment.createTypeDescriptor(expression.type, ArrayTypeDescriptor.class);
List<Expression> dimensionExpressions = convertExpressions(expression.getDimensions());
// Pad the dimension expressions with null values to denote omitted dimensions.
AstUtils.addNullPadding(dimensionExpressions, typeDescriptor.getDimensions());
ArrayLiteral arrayLiteral =
expression.getInitializers() == null
? null
: new ArrayLiteral(typeDescriptor, convertExpressions(expression.getInitializers()));
return NewArray.newBuilder()
.setTypeDescriptor(typeDescriptor)
.setDimensionExpressions(dimensionExpressions)
.setArrayLiteral(arrayLiteral)
.build();
}
private static StringLiteral convertStringLiteral(JCLiteral literal) {
return new StringLiteral((String) literal.getValue());
}
private static BooleanLiteral convertBooleanLiteral(JCLiteral literal) {
return BooleanLiteral.get((Boolean) literal.getValue());
}
private static NumberLiteral convertCharLiteral(JCLiteral literal) {
return NumberLiteral.fromChar((Character) literal.getValue());
}
private NumberLiteral convertNumberLiteral(JCLiteral literal) {
return new NumberLiteral(
(PrimitiveTypeDescriptor) environment.createTypeDescriptor(literal.type),
(Number) literal.getValue());
}
private Expression convertFieldAccess(JCFieldAccess fieldAccess) {
JCExpression expression = fieldAccess.getExpression();
if (fieldAccess.name.contentEquals("class")) {
return new TypeLiteral(
getSourcePosition(fieldAccess), environment.createTypeDescriptor(expression.type));
}
if (fieldAccess.name.contentEquals("this")) {
return resolveOuterClassReference(
environment
.createDeclarationForType((ClassSymbol) ((JCIdent) expression).sym)
.toUnparameterizedTypeDescriptor(),
true);
}
if (fieldAccess.name.contentEquals("super")) {
return resolveOuterClassReference(
environment
.createDeclarationForType((ClassSymbol) ((JCIdent) expression).sym)
.toUnparameterizedTypeDescriptor(),
true);
}
Expression qualifier;
if (fieldAccess.sym instanceof VariableElement) {
qualifier = convertExpression(expression);
if (qualifier instanceof JavaScriptConstructorReference) {
// Remove qualifier if it a type. A type can only be a qualifier for a static field and
// in such cases the actual target type is part of the field descriptor.
checkState(fieldAccess.sym.isStatic());
qualifier = null;
}
if (fieldAccess.name.contentEquals("length") && qualifier.getTypeDescriptor().isArray()) {
return ArrayLength.newBuilder().setArrayExpression(qualifier).build();
}
FieldDescriptor fieldDescriptor =
environment.createFieldDescriptor((VariableElement) fieldAccess.sym, fieldAccess.type);
if (qualifier instanceof SuperReference) {
qualifier = new ThisReference((DeclaredTypeDescriptor) qualifier.getTypeDescriptor());
}
return FieldAccess.newBuilder()
.setQualifier(qualifier)
.setTargetFieldDescriptor(fieldDescriptor)
.build();
}
return null;
}
private Expression convertNewClass(JCNewClass expression) {
if (expression.getClassBody() != null) {
convertClassDeclaration(expression.getClassBody(), expression);
}
return convertInstantiation(expression);
}
private Expression convertInstantiation(JCNewClass expression) {
MethodSymbol constructorBinding = (MethodSymbol) expression.constructor;
DeclaredTypeDescriptor targetType = environment.createDeclaredTypeDescriptor(expression.type);
MethodDescriptor constructorMethodDescriptor =
environment.createMethodDescriptor(
targetType,
(MethodSymbol)
constructorBinding.asMemberOf(expression.type, environment.internalTypes),
constructorBinding);
Expression qualifier = convertExpressionOrNull(expression.getEnclosingExpression());
List<Expression> arguments =
convertArguments(constructorMethodDescriptor, expression.getArguments());
DeclaredTypeDescriptor targetClassDescriptor =
constructorMethodDescriptor.getEnclosingTypeDescriptor();
// Instantiation implicitly references all captured variables since in the flat class model
// captures become fields and need to be threaded through the constructors.
// This is crucial to cover some corner cases where the capture is never referenced in the
// class nor its superclasses but is implicitly referenced by invoking a constructor of
// the capturing class.
propagateAllCapturesOutward(targetClassDescriptor.getTypeDeclaration());
if (targetClassDescriptor.getTypeDeclaration().isAnonymous() && qualifier != null) {
// This is the qualifier to for the super invocation, pass as first parameter, since the
// constructor for the anonymous class expects it there.
arguments.add(0, qualifier);
qualifier = null;
}
boolean needsQualifier =
constructorMethodDescriptor
.getEnclosingTypeDescriptor()
.getTypeDeclaration()
.isCapturingEnclosingInstance();
checkArgument(
qualifier == null || needsQualifier,
"NewInstance of non nested class should have no qualifier.");
// Resolve the qualifier of NewInstance that creates an instance of a nested class.
// Implicit 'this' doesn't always refer to 'this', it may refer to any enclosing instances.
qualifier =
needsQualifier && qualifier == null
// find the enclosing instance in non-strict mode, which means
// for example,
// class A {
// class B {}
// class C extends class A {
// // The qualifier of new B() should be C.this, not A.this.
// public void test() { new B(); }
// }
// }
? resolveOuterClassReference(
constructorMethodDescriptor
.getEnclosingTypeDescriptor()
.getEnclosingTypeDescriptor(),
false)
: qualifier;
return NewInstance.Builder.from(constructorMethodDescriptor)
.setQualifier(qualifier)
.setArguments(arguments)
.build();
}
private Expression convertMethodInvocation(JCMethodInvocation methodInvocation) {
JCExpression jcQualifier = getExplicitQualifier(methodInvocation);
Expression qualifier = convertExpressionOrNull(jcQualifier);
MethodSymbol methodSymbol = getMemberSymbol(methodInvocation.getMethodSelect());
MethodDescriptor methodDescriptor =
environment.createMethodDescriptor(
(ExecutableType) methodInvocation.getMethodSelect().type,
methodInvocation.type,
methodSymbol);
if (methodDescriptor.isConstructor()
&& methodDescriptor.isMemberOf(TypeDescriptors.get().javaLangEnum)) {
// Fix inconsitencies in calls to JRE's Enum constructor calls. Enum constructor has 2
// implicit parameters (name and ordinal) that are added by a normalization pass. This removes
// the parameter definition from the descriptor so that they are consistent.
checkArgument(
methodDescriptor.getParameterDescriptors().size()
== methodInvocation.getArguments().size() + 2);
methodDescriptor =
MethodDescriptor.Builder.from(methodDescriptor)
.setParameterDescriptors(ImmutableList.of())
.setDeclarationDescriptor(null)
.build();
}
List<Expression> arguments =
convertArguments(methodDescriptor, methodInvocation.getArguments());
if (isSuperConstructorCall(methodInvocation)) {
DeclaredTypeDescriptor targetTypeDescriptor = methodDescriptor.getEnclosingTypeDescriptor();
if (qualifier == null
&& targetTypeDescriptor.getTypeDeclaration().isCapturingEnclosingInstance()) {
qualifier =
resolveOuterClassReference(targetTypeDescriptor.getEnclosingTypeDescriptor(), false);
}
return MethodCall.Builder.from(methodDescriptor)
.setQualifier(qualifier)
.setArguments(arguments)
.setSourcePosition(getSourcePosition(methodInvocation))
.build();
}
boolean hasSuperQualifier = isSuperExpression(jcQualifier);
boolean isStaticDispatch = isQualifiedSuperExpression(jcQualifier);
if (hasSuperQualifier
&& (qualifier.getTypeDescriptor().isInterface() || methodDescriptor.isDefaultMethod())) {
// This is a default method call through super.
qualifier = new ThisReference(methodDescriptor.getEnclosingTypeDescriptor());
isStaticDispatch = true;
} else if (hasSuperQualifier
&& !qualifier.getTypeDescriptor().isSameBaseType(getCurrentType().getTypeDescriptor())) {
qualifier =
resolveOuterClassReference((DeclaredTypeDescriptor) qualifier.getTypeDescriptor(), false);
}
if (qualifier == null && methodDescriptor.isInstanceMember()) {
DeclaredTypeDescriptor enclosingTypeDescriptor =
methodDescriptor.getEnclosingTypeDescriptor();
qualifier = resolveOuterClassReference(enclosingTypeDescriptor, false);
}
return MethodCall.Builder.from(methodDescriptor)
.setQualifier(qualifier)
.setArguments(arguments)
.setStaticDispatch(isStaticDispatch)
.setSourcePosition(getSourcePosition(methodInvocation))
.build();
}
private List<Expression> convertArguments(
MethodDescriptor methodDescriptor, List<JCExpression> argumentExpressions) {
List<Expression> arguments =
argumentExpressions.stream().map(this::convertExpression).collect(Collectors.toList());
return AstUtils.maybePackageVarargs(methodDescriptor, arguments);
}
/**
* Returns a qualifier for a method invocation that doesn't have one, specifically,
* instanceMethod() will return a resolved qualifier that may refer to "this" or to the enclosing
* instances. A staticMethod() will return null.
*/
private static JCExpression getExplicitQualifier(JCMethodInvocation methodInvocation) {
if (methodInvocation.getMethodSelect().getKind() != Kind.IDENTIFIER) {
return getQualifier(methodInvocation.getMethodSelect());
}
// No qualifier specified.
MethodSymbol memberSymbol = getMemberSymbol(methodInvocation.getMethodSelect());
if (memberSymbol.isStatic()) {
return null;
}
return getQualifier(methodInvocation.getMethodSelect());
}
private Expression convertIdent(JCIdent identifier) {
if (isThisExpression(identifier)) {
return new ThisReference(getCurrentType().getTypeDescriptor());
}
if (isSuperExpression(identifier)) {
return new SuperReference(getCurrentType().getTypeDescriptor());
}
Symbol symbol = identifier.sym;
if (symbol instanceof ClassSymbol) {
return new JavaScriptConstructorReference(
environment.createDeclarationForType((ClassSymbol) identifier.sym));
}
if (symbol instanceof MethodSymbol) {
throw new AssertionError("Unexpected symbol class: " + symbol.getClass());
}
VarSymbol varSymbol = (VarSymbol) symbol;
if (symbol.getKind() == ElementKind.LOCAL_VARIABLE
|| symbol.getKind() == ElementKind.RESOURCE_VARIABLE
|| symbol.getKind() == ElementKind.PARAMETER
|| symbol.getKind() == ElementKind.EXCEPTION_PARAMETER) {
Variable variable = variableByVariableElement.get(symbol);
return resolveVariableReference(variable);
}
FieldDescriptor fieldDescriptor = environment.createFieldDescriptor(varSymbol, identifier.type);
Expression qualifier =
fieldDescriptor.isStatic()
? null
: resolveOuterClassReference(fieldDescriptor.getEnclosingTypeDescriptor(), false);
return FieldAccess.newBuilder()
.setQualifier(qualifier)
.setTargetFieldDescriptor(fieldDescriptor)
.build();
}
private static boolean isSuperConstructorCall(JCMethodInvocation methodInvocation) {
return isSuperExpression(methodInvocation.getMethodSelect());
}
private static boolean isSuperExpression(JCExpression expression) {
if (expression instanceof JCIdent) {
JCIdent ident = (JCIdent) expression;
return ident.getName().contentEquals("super");
}
return false;
}
private static boolean isQualifiedSuperExpression(JCExpression expression) {
return expression instanceof JCFieldAccess
&& ((JCFieldAccess) expression).getIdentifier().contentEquals("super");
}
private static boolean isThisExpression(JCExpression expression) {
if (expression instanceof JCIdent) {
JCIdent ident = (JCIdent) expression;
return ident.getName().contentEquals("this");
}
return false;
}
private static MethodSymbol getMemberSymbol(JCTree.JCExpression node) {
switch (node.getKind()) {
case IDENTIFIER:
return (MethodSymbol) ((JCTree.JCIdent) node).sym.baseSymbol();
case MEMBER_SELECT:
return (MethodSymbol) ((JCTree.JCFieldAccess) node).sym;
default:
throw new AssertionError("Unexpected tree kind: " + node.getKind());
}
}
private static JCExpression getQualifier(JCTree.JCExpression node) {
switch (node.getKind()) {
case IDENTIFIER:
return null;
case MEMBER_SELECT:
return ((JCTree.JCFieldAccess) node).getExpression();
default:
throw new AssertionError("Unexpected tree kind: " + node.getKind());
}
}
private Expression convertConditionRemovingOuterParentheses(JCExpression expression) {
return convertExpression(((JCParens) expression).getExpression());
}
private Expression convertParens(JCParens expression) {
return convertExpression(expression.getExpression());
}
private Expression convertExpression(JCExpression jcExpression) {
switch (jcExpression.getKind()) {
case ARRAY_ACCESS:
return convertArrayAccess((JCArrayAccess) jcExpression);
case ASSIGNMENT:
return convertAssignment((JCAssign) jcExpression);
case CONDITIONAL_EXPRESSION:
return convertConditional((JCConditional) jcExpression);
case IDENTIFIER:
return convertIdent((JCIdent) jcExpression);
case PARAMETERIZED_TYPE:
return null;
case INSTANCE_OF:
return convertInstanceOf((JCInstanceOf) jcExpression);
case LAMBDA_EXPRESSION:
return convertLambda((JCLambda) jcExpression);
case MEMBER_REFERENCE:
return convertMemberReference((JCMemberReference) jcExpression);
case MEMBER_SELECT:
return convertFieldAccess((JCFieldAccess) jcExpression);
case METHOD_INVOCATION:
return convertMethodInvocation((JCMethodInvocation) jcExpression);
case NEW_ARRAY:
return convertNewArray((JCNewArray) jcExpression);
case NEW_CLASS:
return convertNewClass((JCNewClass) jcExpression);
case PARENTHESIZED:
return convertParens((JCParens) jcExpression);
case TYPE_CAST:
return convertCast((JCTypeCast) jcExpression);
case BOOLEAN_LITERAL:
return convertBooleanLiteral((JCLiteral) jcExpression);
case CHAR_LITERAL:
return convertCharLiteral((JCLiteral) jcExpression);
case DOUBLE_LITERAL:
case FLOAT_LITERAL:
case INT_LITERAL:
case LONG_LITERAL:
// fallthrough
return convertNumberLiteral((JCLiteral) jcExpression);
case STRING_LITERAL:
return convertStringLiteral((JCLiteral) jcExpression);
case NULL_LITERAL:
return environment.createTypeDescriptor(jcExpression.type).getNullValue();
case AND:
case CONDITIONAL_AND:
case CONDITIONAL_OR:
case DIVIDE:
case EQUAL_TO:
case GREATER_THAN:
case GREATER_THAN_EQUAL:
case LEFT_SHIFT:
case LESS_THAN:
case LESS_THAN_EQUAL:
case MINUS:
case MULTIPLY:
case NOT_EQUAL_TO:
case OR:
case PLUS:
case REMAINDER:
case RIGHT_SHIFT:
case UNSIGNED_RIGHT_SHIFT:
case XOR:
return convertBinary((JCBinary) jcExpression);
case BITWISE_COMPLEMENT:
case LOGICAL_COMPLEMENT:
case PREFIX_DECREMENT:
case PREFIX_INCREMENT:
case UNARY_MINUS:
case UNARY_PLUS:
return convertPrefixUnary((JCUnary) jcExpression);
case POSTFIX_DECREMENT:
case POSTFIX_INCREMENT:
return convertPostfixUnary((JCUnary) jcExpression);
case AND_ASSIGNMENT:
case DIVIDE_ASSIGNMENT:
case LEFT_SHIFT_ASSIGNMENT:
case MINUS_ASSIGNMENT:
case MULTIPLY_ASSIGNMENT:
case OR_ASSIGNMENT:
case PLUS_ASSIGNMENT:
case REMAINDER_ASSIGNMENT:
case RIGHT_SHIFT_ASSIGNMENT:
case UNSIGNED_RIGHT_SHIFT_ASSIGNMENT:
case XOR_ASSIGNMENT:
return convertAssignment((JCAssignOp) jcExpression);
case OTHER:
if (jcExpression.hasTag(Tag.NULLCHK)) {
// This is an expression with an implicit null check.
return RuntimeMethods.createCheckNotNullCall(
convertExpression(((JCUnary) jcExpression).arg));
}
// fall through
default:
throw new AssertionError(
"Unknown expression " + jcExpression + " (node type: " + jcExpression.getKind() + ")");
}
}
private Expression convertExpressionOrNull(JCExpression expression) {
return expression != null ? convertExpression(expression) : null;
}
private List<Expression> convertExpressions(List<JCExpression> expressions) {
return expressions.stream().map(this::convertExpression).collect(Collectors.toList());
}
private CompilationUnit build(JCCompilationUnit javacUnit) {
this.javacUnit = javacUnit;
if (javacUnit.getSourceFile().getName().endsWith("package-info.java")
&& javacUnit.getPackage() != null) {
String packageName = javacUnit.getPackageName().toString();
String packageJsNamespace = getPackageJsNamespace(javacUnit);
boolean isNullMarked = isNullMarked(javacUnit);
PackageInfoCache.get()
.setPackageProperties(
PackageInfoCache.SOURCE_CLASS_PATH_ENTRY,
packageName,
packageJsNamespace,
isNullMarked);
}
setCurrentCompilationUnit(
new CompilationUnit(
javacUnit.getSourceFile().getName(),
javacUnit.getPackageName() == null ? "" : javacUnit.getPackageName().toString()));
for (JCTree tree : javacUnit.getTypeDecls()) {
if (tree instanceof JCClassDecl) {
convertClassDeclaration((JCClassDecl) tree);
}
}
return getCurrentCompilationUnit();
}
public static List<CompilationUnit> build(
List<CompilationUnitTree> compilationUnits, JavaEnvironment javaEnvironment) {
CompilationUnitBuilder compilationUnitBuilder = new CompilationUnitBuilder(javaEnvironment);
// Ensure that all source package-info classes come before all other classes so that the
// freshness of the PackageInfoCache can be trusted.
sortPackageInfoFirst(compilationUnits);
return compilationUnits.stream()
.map(JCCompilationUnit.class::cast)
.map(compilationUnitBuilder::build)
.collect(toImmutableList());
}
private static String getPackageJsNamespace(JCCompilationUnit javacUnit) {
PackageSymbol packge = javacUnit.packge;
if (packge == null) {
return null;
}
return JsInteropAnnotationUtils.getJsNamespace(packge);
}
private static boolean isNullMarked(JCCompilationUnit javacUnit) {
PackageSymbol packge = javacUnit.packge;
if (packge == null) {
return false;
}
return AnnotationUtils.findAnnotationBindingByName(
packge.getAnnotationMirrors(), Nullability.ORG_JSPECIFY_NULLNESS_NULL_MAKED)
!= null;
}
private static void sortPackageInfoFirst(List<CompilationUnitTree> compilationUnits) {
// Ensure that all source package-info classes come before all other classes so that the
// freshness of the PackageInfoCache can be trusted.
Collections.sort(
compilationUnits,
(thisCompilationUnit, thatCompilationUnit) -> {
String thisFilePath = thisCompilationUnit.getSourceFile().getName();
String thatFilePath = thatCompilationUnit.getSourceFile().getName();
boolean thisIsPackageInfo = thisFilePath.endsWith("package-info.java");
boolean thatIsPackageInfo = thatFilePath.endsWith("package-info.java");
return ComparisonChain.start()
.compareTrueFirst(thisIsPackageInfo, thatIsPackageInfo)
.compare(thisFilePath, thatFilePath)
.result();
});
}
/**
* Returns a nested path of field accesses {@code this.$outer_this.....$outer_this} required to
* address a specific outer class.
*
* <p>The search for the enclosing class might be strict or non-strict, depending whether a
* subclass of the enclosing class is acceptable as a qualifier. When the member's qualifier is
* implicit, superclasses of the enclosing class are acceptable.
*/
private Expression resolveOuterClassReference(
DeclaredTypeDescriptor targetTypeDescriptor, boolean strict) {
Expression qualifier = new ThisReference(getCurrentType().getTypeDescriptor());
DeclaredTypeDescriptor innerTypeDescriptor = getCurrentType().getTypeDescriptor();
while (innerTypeDescriptor.getTypeDeclaration().isCapturingEnclosingInstance()) {
boolean found =
strict
? innerTypeDescriptor.hasSameRawType(targetTypeDescriptor)
: innerTypeDescriptor.isSubtypeOf(targetTypeDescriptor);
if (found) {
break;
}
qualifier =
FieldAccess.Builder.from(
AstUtils.getFieldDescriptorForEnclosingInstance(
innerTypeDescriptor, innerTypeDescriptor.getEnclosingTypeDescriptor()))
.setQualifier(qualifier)
.build();
innerTypeDescriptor = innerTypeDescriptor.getEnclosingTypeDescriptor();
}
return qualifier;
}
}
|
<gh_stars>0
// This file is part of Moodle - http://moodle.org/
//
// Moodle is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Moodle is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Moodle. If not, see <http://www.gnu.org/licenses/>.
/* global LOGNAME */
/**
* @module moodle-editor_atto-editor
* @submodule toolbarnav
*/
/**
* Toolbar Navigation functions for the Atto editor.
*
* See {{#crossLink "M.editor_atto.Editor"}}{{/crossLink}} for details.
*
* @namespace M.editor_atto
* @class EditorToolbarNav
*/
function EditorToolbarNav() {}
EditorToolbarNav.ATTRS= {
};
EditorToolbarNav.prototype = {
/**
* The current focal point for tabbing.
*
* @property _tabFocus
* @type Node
* @default null
* @private
*/
_tabFocus: null,
/**
* Set up the watchers for toolbar navigation.
*
* @method setupToolbarNavigation
* @chainable
*/
setupToolbarNavigation: function() {
// Listen for Arrow left and Arrow right keys.
this._wrapper.delegate('key',
this.toolbarKeyboardNavigation,
'down:37,39',
'.' + CSS.TOOLBAR,
this);
this._wrapper.delegate('focus',
function(e) {
this._setTabFocus(e.currentTarget);
}, '.' + CSS.TOOLBAR + ' button', this);
return this;
},
/**
* Implement arrow key navigation for the buttons in the toolbar.
*
* @method toolbarKeyboardNavigation
* @param {EventFacade} e - the keyboard event.
*/
toolbarKeyboardNavigation: function(e) {
// Prevent the default browser behaviour.
e.preventDefault();
// On cursor moves we loops through the buttons.
var buttons = this.toolbar.all('button'),
direction = 1,
button,
current = e.target.ancestor('button', true);
if (e.keyCode === 37) {
// Moving left so reverse the direction.
direction = -1;
}
button = this._findFirstFocusable(buttons, current, direction);
if (button) {
button.focus();
this._setTabFocus(button);
} else {
Y.log("Unable to find a button to focus on", 'debug', LOGNAME);
}
},
/**
* Find the first focusable button.
*
* @param {NodeList} buttons A list of nodes.
* @param {Node} startAt The node in the list to start the search from.
* @param {Number} direction The direction in which to search (1 or -1).
* @return {Node | Undefined} The Node or undefined.
* @method _findFirstFocusable
* @private
*/
_findFirstFocusable: function(buttons, startAt, direction) {
var checkCount = 0,
group,
candidate,
button,
index;
// Determine which button to start the search from.
index = buttons.indexOf(startAt);
if (index < -1) {
Y.log("Unable to find the button in the list of buttons", 'debug', LOGNAME);
index = 0;
}
// Try to find the next.
while (checkCount < buttons.size()) {
index += direction;
if (index < 0) {
index = buttons.size() - 1;
} else if (index >= buttons.size()) {
// Handle wrapping.
index = 0;
}
candidate = buttons.item(index);
// Add a counter to ensure we don't get stuck in a loop if there's only one visible menu item.
checkCount++;
// Loop while:
// * we haven't checked every button;
// * the button is hidden or disabled;
// * the group is hidden.
if (candidate.hasAttribute('hidden') || candidate.hasAttribute('disabled')) {
continue;
}
group = candidate.ancestor('.atto_group');
if (group.hasAttribute('hidden')) {
continue;
}
button = candidate;
break;
}
return button;
},
/**
* Check the tab focus.
*
* When we disable or hide a button, we should call this method to ensure that the
* focus is not currently set on an inaccessible button, otherwise tabbing to the toolbar
* would be impossible.
*
* @method checkTabFocus
* @chainable
*/
checkTabFocus: function() {
if (this._tabFocus) {
if (this._tabFocus.hasAttribute('disabled') || this._tabFocus.hasAttribute('hidden')
|| this._tabFocus.ancestor('.atto_group').hasAttribute('hidden')) {
// Find first available button.
var button = this._findFirstFocusable(this.toolbar.all('button'), this._tabFocus, -1);
if (button) {
if (this._tabFocus.compareTo(document.activeElement)) {
// We should also move the focus, because the inaccessible button also has the focus.
button.focus();
}
this._setTabFocus(button);
}
}
}
return this;
},
/**
* Sets tab focus for the toolbar to the specified Node.
*
* @method _setTabFocus
* @param {Node} button The node that focus should now be set to
* @chainable
* @private
*/
_setTabFocus: function(button) {
if (this._tabFocus) {
// Unset the previous entry.
this._tabFocus.setAttribute('tabindex', '-1');
}
// Set up the new entry.
this._tabFocus = button;
this._tabFocus.setAttribute('tabindex', 0);
// And update the activedescendant to point at the currently selected button.
this.toolbar.setAttribute('aria-activedescendant', this._tabFocus.generateID());
return this;
}
};
Y.Base.mix(Y.M.editor_atto.Editor, [EditorToolbarNav]);
|
<reponame>pip-services-archive/pip-services-runtime-go<gh_stars>0
package log
import (
"testing"
"github.com/stretchr/testify/suite"
"github.com/pip-services/pip-services-runtime-go"
"github.com/pip-services/pip-services-runtime-go/log"
)
type CompositeLogTest struct {
suite.Suite
log runtime.ILog
fixture *LogFixture
}
func (suite *CompositeLogTest) SetupTest() {
logConfig1 := runtime.DynamicMap{ "level": 6 }
log1 := log.NewConsoleLog(&logConfig1)
log2 := log.NewNullLog(nil)
logs := []runtime.ILog { log1, log2 }
suite.log = log.NewCompositeLog(logs)
suite.fixture = NewLogFixture(suite.log)
}
func (suite *CompositeLogTest) TestLogLevel() {
suite.fixture.TestLogLevel(suite.T())
}
func (suite *CompositeLogTest) TestTextOutput() {
suite.fixture.TestTextOutput(suite.T())
}
func (suite *CompositeLogTest) TestMixedOutput() {
suite.fixture.TestMixedOutput(suite.T())
}
func TestCompositeLogTestSuite(t *testing.T) {
suite.Run(t, new(CompositeLogTest))
} |
<filename>docker/doc.go
/*Package docker exposes functionality to manage building and posting Docker containers */
package docker
|
import java.io.FileInputStream;
import java.io.IOException;
import java.net.InetAddress;
import java.net.MalformedURLException;
import java.net.Socket;
import java.net.URL;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Properties;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
public class A {
public static void main(String[] args) {
String[] a = args; // user input
String s = args[0]; // user input
}
public static void userInput() throws SQLException, IOException, MalformedURLException {
System.getenv("test"); // user input
class TestServlet extends HttpServlet {
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
req.getParameter("test"); // remote user input
req.getHeader("test"); // remote user input
req.getQueryString(); // remote user input
req.getCookies()[0].getValue(); // remote user input
}
}
new Properties().getProperty("test"); // user input
System.getProperty("test"); // user input
new Object() {
public void test(ResultSet rs) throws SQLException {
rs.getString(0); // user input
}
};
new URL("test").openConnection().getInputStream(); // remote user input
new Socket("test", 1234).getInputStream(); // remote user input
InetAddress.getByName("test").getHostName(); // remote user input
System.in.read(); // user input
new FileInputStream("test").read(); // user input
}
}
|
import javax.servlet.*;
import javax.servlet.http.*;
import javax.servlet.annotation.WebServlet;
import java.io.*;
import com.mongodb.MongoClient;
import com.mongodb.client.MongoDatabase;
import com.mongodb.client.MongoCollection;
import org.bson.Document;
@WebServlet("/form")
public class FormServlet extends HttpServlet {
protected void doGet(HttpServletRequest request, HttpServletResponse response
) throws ServletException, IOException {
response.setContentType("text/html");
PrintWriter out = response.getWriter();
out.println("<html>");
out.println("<body>");
out.println("<h1>Form</h1>");
out.println("<form action='submit'>");
out.println("Name: <input type='text' name='name'><br><br>");
out.println("Age: <input type='text' name='age'><br><br>");
out.println("<input type='submit' value='Submit'>");
out.println("</form>");
out.println("</body>");
out.println("</html>");
}
protected void doPost(HttpServletRequest request, HttpServletResponse response
) throws ServletException, IOException {
//get parameters
String name = request.getParameter("name");
int age = Integer.parseInt(request.getParameter("age"));
//store in database
MongoClient mongoClient = new MongoClient();
try {
MongoDatabase db = mongoClient.getDatabase("users");
MongoCollection<Document> users = db.getCollection("users");
Document document = new Document("name", name).append("age", age);
users.insertOne(document);
System.out.println("Successfully inserted document.");
} finally {
mongoClient.close();
}
response.sendRedirect("submitted.html");
}
} |
import re
import nltk
import sklearn
# define a list of stop words
stop_words = nltk.corpus.stopwords.words('english')
# define a stemmer
stemmer = nltk.stem.porter.PorterStemmer()
# define a function to extract features from a given text
def extract_features(txt):
# tokenize the document
tokens = nltk.word_tokenize(txt)
# remove any stopwords
tokens = [token for token in tokens if token not in stop_words]
# stem the tokens
tokens = [stemmer.stem(token) for token in tokens]
# return the features
return tokens
# define a function to create a machine learning model
def create_model(input_data, target_data):
# extract the features from the input data
X = [extract_features(txt) for txt in input_data]
# create a bag of words vectorizer
vectorizer = sklearn.feature_extraction.text.CountVectorizer()
# fit the vectorizer on the X data
vectorizer.fit(X)
# get the feature vectors
X = vectorizer.transform(X)
# create a logistic regression classifier
clf = sklearn.linear_model.LogisticRegression()
# fit the classifier on the data
clf.fit(X, target_data)
# return the trained model
return clf |
#!/bin/bash
set -eu
pipenv run pipenv install
pipenv run python manage.py migrate --settings=config.settings.production
pipenv run python manage.py seed --settings=config.settings.production
pipenv run python manage.py compilescss --settings=config.settings.production
pipenv run python manage.py collectstatic --ignore=*.scss --settings=config.settings.production
pipenv run python manage.py compilescss --delete-files --settings=config.settings.production
pipenv run python manage.py compress --settings=config.settings.production
|
require 'fog/core/model'
module Fog
module OracleCloud
class SOA
class Instance < Fog::Model
identity :service_name, :aliases=>'serviceName'
attribute :service_type
attribute :resource_count
attribute :status
attribute :description
attribute :identity_domain
attribute :creation_job_id
attribute :creation_time
attribute :last_modified_time
attribute :created_by
attribute :service_uri
attribute :provisioning_progress
attribute :db_service_name, :aliases=>'dbServiceName'
attribute :num_nodes, :aliases=>'managedServerCount'
attribute :shape
attribute :version
attribute :ssh_key, :aliases=>'vmPublicKey'
# The following are only used to create an instance and are not returned in the list action
attribute :cloud_storage_container, :aliases=>'cloudStorageContainer'
attribute :cloud_storage_user, :aliases=>'cloudStorageUser'
attribute :cloud_storage_pwd, :aliases=>'cloudStoragePassword'
attribute :level
attribute :subscription_type, :aliases=>'subscriptionType'
attribute :topology
attribute :admin_username, :aliases=>'adminUserName'
attribute :admin_password, :aliases=>'<PASSWORD>'
attribute :dba_name, :aliases=>'dbaName'
attribute :dba_password, :aliases=>'<PASSWORD>'
attribute :provision_otd, :aliases=>'provisionOtd'
# The following are used to delete an instance and are not returned in the list action
attribute :dba_password
attribute :force_delete
attribute :skip_backup
def service_name=(value)
if value.include? '_' or !(value[0] =~ /[[:alpha:]]/) or value.size > 50 or !(value[/[a-zA-Z0-9-]+/] == value)
raise ArgumentError, "Invalid service name. Names must be less than 50 characters; must start with a letter and can only contain letters, numbers and hyphens (-); can not end with a hyphen"
else
attributes[:service_name] = value
end
end
def topology=(value)
if %w(osb soa soaosb b2b mft apim insight).include? value then
attributes[:topology]=value
else
raise ArgumentError, "Invalid topology. Valid values - osb, soa, soaosb, b2b, mft, apim, insight"
end
end
def num_nodes=(value)
if value.nil? then value = 1 end
if [1, 2, 4].include? value.to_i then
attributes[:num_nodes] = value.to_i
else
raise ArgumentError, "Invalid server count (#{value}). Valid values - 1, 2 or 4"
end
end
def shape=(value)
if %w( oc1m oc2m oc3m oc4m oc5m).include? value then
attributes[:shape]=value
else
raise ArgumentError, "Invalid Shape. Valid values - oc1m, oc2m, oc3m, oc4m or oc5m"
end
end
def admin_password=(value)
if !(value[0] =~ /[[:alpha:]]/) or value.size < 8 or value.size > 30 or !(value =~ /[_#$]/) or !(value =~ /[0-9]/)
raise ArgumentError, "Invalid admin password. Password must be between 8 and 30 characters in length; must start with a letter and can only contain letters, numbers and $, \#, _"
else
attributes[:admin_password] = value
end
end
def save
#identity ? update : create
create
end
def ready?
status == "Running"
end
def stopping?
status == 'Maintenance' || status == 'Terminating'
end
def stopped?
status == 'Stopped'
end
def destroy(dba_name, dba_password)
requires :service_name
service.delete_instance(service_name, dba_name, dba_password,
:force_delete => force_delete,
:skip_backup => skip_backup).body
end
def job_status
requires :creation_job_id
service.get_job_status('create', creation_job_id)
end
private
def create
requires :service_name, :dba_name, :dba_password, :db_service_name, :shape, :version, :ssh_key, :admin_password, :admin_username, :topology
stor_user = cloud_storage_user || service.username
stor_pwd = cloud_storage_pwd || <PASSWORD>
if cloud_storage_container.nil? then
cloud_storage_container = "#{service_name}_Backup"
begin
container = Fog::Storage[:oraclecloud].containers.get(cloud_storage_container)
rescue Excon::Error::NotFound => error
# Doesn't exist, create it first
# The Oracle Cloud currently doesn't create a storage container for us, if it doesn't exist. Do it manually now
container = Fog::Storage[:oraclecloud].containers.create(
:name => cloud_storage_container,
)
end
end
params = {
:serviceName => service_name,
:cloudStorageContainer => cloud_storage_container,
:cloudStoragePassword => <PASSWORD>,
:cloudStorageUser => stor_user,
:description => description,
:provisionOTD => provision_otd.nil? ? false : provision_otd,
:subscriptionType => 'MONTHLY',
:level => 'PAAS',
:topology => topology
}
options = {
:adminPassword => <PASSWORD>,
:adminUserName => admin_username,
:dbaName => dba_name,
:dbaPassword => <PASSWORD>,
:dbServiceName => db_service_name,
:managedServerCount => num_nodes || 1,
:shape => shape,
:VMsPublicKey => ssh_key,
:version => version
}
data = service.create_instance(params, options)
end
end
end
end
end
|
/*
* CPAchecker is a tool for configurable software verification.
* This file is part of CPAchecker.
*
* Copyright (C) 2007-2014 <NAME>
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* CPAchecker web page:
* http://cpachecker.sosy-lab.org
*/
package org.sosy_lab.cpachecker.cpa.monitor;
import java.io.PrintStream;
import java.util.concurrent.TimeUnit;
import org.sosy_lab.common.time.TimeSpan;
import org.sosy_lab.cpachecker.core.CPAcheckerResult.Result;
import org.sosy_lab.cpachecker.core.interfaces.Statistics;
import org.sosy_lab.cpachecker.core.reachedset.UnmodifiableReachedSet;
class MonitorStatistics implements Statistics {
private final MonitorCPA mCpa;
MonitorStatistics(MonitorCPA pCpa) {
mCpa = pCpa;
}
@Override
public String getName() {
return "MonitorCPA";
}
@Override
public void printStatistics(PrintStream out, Result pResult, UnmodifiableReachedSet pReached) {
MonitorTransferRelation trans = mCpa.getTransferRelation();
out.println("Max. Post Time: " + trans.totalTimeOfTransfer.getMaxTime().formatAs(TimeUnit.SECONDS));
out.println("Avg. Post Time: " + trans.totalTimeOfTransfer.getAvgTime().formatAs(TimeUnit.SECONDS));
out.println("Max Post time on a path: " + TimeSpan.ofMillis(trans.maxTotalTimeForPath).formatAs(TimeUnit.SECONDS));
}
}
|
import os
import pickle
from typing import List, Optional
import pymysql
from .interfaces import ISessionProvider, IDbSessionProvider
from .session import HttpSession
from ..util import b64
class MemorySessionProvider(ISessionProvider):
def __init__(self, expired: int, *args, **kwargs):
self.sessions = {}
super().__init__(expired, *args, **kwargs)
def remove(self, session_id: str):
if self.exists(session_id):
del self.sessions[session_id]
def get_expired_session(self, time_before: float) -> List[str]:
expired_sessions = []
for session in self.sessions.values():
if time_before > session.last_access_time:
expired_sessions.append(session.id)
return expired_sessions
def get(self, session_id: str) -> Optional[HttpSession]:
return self.sessions[session_id] if self.exists(session_id) else None
def set(self, session: HttpSession):
self.sessions[session.id] = session
def exists(self, session_id: str):
return session_id in self.sessions
def dispose(self):
self.sessions.clear()
super().dispose()
class FileSessionProvider(ISessionProvider):
def __init__(self, expired: int, sessions_root: str, *args, **kwargs):
self.sessions_root = os.path.abspath(os.path.join(sessions_root, 'restfx_sessions'))
if not os.path.exists(self.sessions_root):
os.makedirs(self.sessions_root)
# print('mkdir:' + self.sessions_root)
super().__init__(expired, *args, **kwargs)
def _get_session_path(self, session_id: str) -> str:
# session_id 中可能存在 / 符号
return os.path.join(self.sessions_root, session_id.replace('/', '_'))
def _load_session(self, session_id: str):
# print('Load session: ' + session_id)
session_file = self._get_session_path(session_id)
if not os.path.isfile(session_file):
# print('The session currently loading not exists:' + session_file)
return None
if os.path.getsize(session_file) == 0:
# print('The session currently loading is empty:' + session_file)
return None
with open(session_file, mode='rb') as fp:
# noinspection PyBroadException
try:
session = pickle.load(fp)
fp.close()
except Exception: # as e:
# print('Load session file %s failed: %s' % (session_file, repr(e)))
fp.close()
# 无法解析 session 文件
# 说明session文件已经损坏,将其删除
os.remove(session_file)
return None
setattr(session, '_update_watcher', self.set)
setattr(session, '_drop_watcher', self.remove)
return session
def remove(self, session_id: str):
# print('Remove session:' + session_id)
session_file = self._get_session_path(session_id)
if not self.exists(session_file):
# print("The session to remove is not exists:" + session_file)
return
os.remove(session_file)
def get_expired_session(self, time_before: float) -> List[str]:
entities = os.listdir(self.sessions_root)
sessions = []
for entity in entities:
last_access_time = os.path.getatime(self._get_session_path(entity))
if time_before > last_access_time:
sessions.append(entity)
return sessions
def get(self, session_id: str) -> Optional[HttpSession]:
return self._load_session(session_id)
def set(self, session: HttpSession):
# print('Set session:' + session.id)
with open(self._get_session_path(session.id), mode='wb') as fp:
pickle.dump(session, fp, pickle.HIGHEST_PROTOCOL)
fp.close()
def exists(self, session_id: str):
return os.path.isfile(self._get_session_path(session_id))
def dispose(self):
os.removedirs(self.sessions_root)
super().dispose()
class MysqlSessionProvider(IDbSessionProvider):
def __init__(self, pool, table_name="restfx_sessions", expired=20, *args, **kwargs):
self.table_name = table_name
super().__init__(pool, expired, *args, **kwargs)
def execute(self, sql: str, *args):
conn = self.connect()
cursor = None
try:
cursor = conn.cursor(pymysql.cursors.DictCursor)
rows = cursor.execute(sql, args)
if sql.startswith('SELECT'):
data = cursor.fetchall()
else:
data = None
except Exception as e:
print(repr(e))
return 0, None
finally:
if cursor is not None:
cursor.close()
conn.close()
return rows, data
def table_exists(self) -> bool:
rows, _ = self.execute(
"""SELECT 1 FROM information_schema.tables
WHERE table_name ='{table_name}' LIMIT 1""".format(table_name=self.table_name))
return rows > 0
def create_table(self):
self.execute("""CREATE TABLE {table_name} (
id VARCHAR(256) PRIMARY KEY NOT NULL,
creation_time LONG NOT NULL,
last_access_time LONG NOT NULL,
store TEXT,
INDEX last_access(last_access_time(8) ASC)
) ENGINE=InnoDB DEFAULT CHARSET=utf8""".format(table_name=self.table_name))
def get_expired_session(self, time_before: float) -> List[str]:
rows, data = self.execute(
'SELECT id FROM {table_name} WHERE last_access_time < %s'.format(table_name=self.table_name),
int(time_before))
return [item['id'] for item in data]
def get(self, session_id: str) -> Optional[HttpSession]:
rows, data = self.execute("SELECT * FROM {table_name} WHERE id=%s LIMIT 1".format(table_name=self.table_name),
session_id)
if rows == 0:
return None
item = data[0]
item['store'] = b64.dec_bytes(item['store'])
return self.parse(item)
def upsert(self, session_id: str, creation_time: float, last_access_time: float, store: bytes):
data = b64.enc_str(store)
self.execute("""INSERT INTO {table_name} VALUES(%s, %s, %s, %s) ON DUPLICATE KEY UPDATE
last_access_time=%s, store=%s""".format(table_name=self.table_name),
session_id,
int(creation_time),
int(last_access_time),
data,
int(last_access_time),
data)
def exists(self, session_id: str) -> bool:
rows, _ = self.execute("""SELECT 1 FROM {table_name} WHERE id=%s limit 1""".format(table_name=self.table_name),
session_id)
return rows > 0
def remove(self, session_id: str):
self.execute("""DELETE FROM {table_name} WHERE id=%s""".format(table_name=self.table_name), session_id)
def dispose(self):
self.execute('TRUNCATE TABLE {table_name}'.format(table_name=self.table_name))
|
#!/bin/sh -e
# 1.
# Requires plugin https://github.com/heroku/heroku-builds
# Install via `heroku plugins:install heroku-builds`
# 2.
# Set heroku environment variables by running following script:
# private/update-environment-variables-on-heroku.mjs
heroku builds:create -a factorio-mods-localization
|
<gh_stars>1-10
package com.twelvemonkeys.imageio.plugins.pict;
import com.twelvemonkeys.imageio.spi.ReaderWriterProviderInfo;
import com.twelvemonkeys.imageio.spi.ReaderWriterProviderInfoTest;
/**
* PICTProviderInfoTest.
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @author last modified by $Author: harald.kuhr$
* @version $Id: PICTProviderInfoTest.java,v 1.0 02/06/16 harald.kuhr Exp$
*/
public class PICTProviderInfoTest extends ReaderWriterProviderInfoTest {
@Override
protected ReaderWriterProviderInfo createProviderInfo() {
return new PICTProviderInfo();
}
} |
#!/bin/bash
#
# generate_service_certificates.sh <service>
#--------------------------------------
# Script to generate a key and certificate for GitLab services, including Gitaly
# and Praefect to enable TLS support.
#
# Generates `<service>.crt` & `<service>.key` in a temporary directory, and
# places them into the current working directory.
#
# By default generates a key and certificated for `gitaly` in `default` namespace
# and `gitlab` release. Use `RELEASE_NAME` and `NAMESPACE` environment variables
# for non-default namespace and release.
#
# After generation, create a TLS secret:
#
# kubectl create secret tls <service>-tls --cert=gitaly.crt --key=gitaly.key
#
# Then, configure the chart to use this:
# global:
# <service>:
# tls:
# enabled: true
# secretName: <service>-tls
#--------------------------------------
VALID_DAYS=${VALID_DAYS-365}
CERT_NAME=${1-gitaly}
RELEASE_NAME=${RELEASE_NAME-gitlab}
NAMESPACE=${NAMESPACE-default}
DNS_SUFFIX=${DNS_SUFFIX:-.svc}
WORKDIR=`pwd`
TEMP_DIR=$(mktemp -d)
pushd ${TEMP_DIR} || exit
SERVICE_NAME="${RELEASE_NAME}-${CERT_NAME}"
SERVICE_NAME="${SERVICE_NAME:0:63}"
(
cat <<SANDOC
[req_ext]
subjectAltName = @san
[san]
DNS.1 = ${SERVICE_NAME}.${NAMESPACE}${DNS_SUFFIX}
DNS.2 = *.${SERVICE_NAME}.${NAMESPACE}${DNS_SUFFIX}
SANDOC
) > san.conf
openssl req -x509 -nodes -newkey rsa:4096 \
-keyout "${CERT_NAME}.key" \
-out "${CERT_NAME}.crt" \
-days ${VALID_DAYS} \
-subj "/CN=${CERT_NAME}" \
-reqexts req_ext -extensions req_ext \
-config <(cat /etc/ssl/openssl.cnf san.conf )
mv ${CERT_NAME}.* $WORKDIR/
popd
rm -rf ${TEMP_DIR}
|
pkg_origin=core
pkg_name=openldap
pkg_version=2.4.58
pkg_description="Community developed LDAP software"
pkg_maintainer="The Habitat Maintainers <humans@habitat.sh>"
pkg_license=("OLDAP-2.8")
pkg_upstream_url=http://www.openldap.org/
pkg_source=https://www.openldap.org/software/download/OpenLDAP/${pkg_name}-release/${pkg_name}-${pkg_version}.tgz
pkg_shasum=57b59254be15d0bf6a9ab3d514c1c05777b02123291533134a87c94468f8f47b
pkg_deps=(core/glibc core/libtool core/db core/openssl core/cyrus-sasl)
pkg_build_deps=(core/gcc core/make core/groff)
pkg_bin_dirs=(bin sbin libexec)
pkg_include_dirs=(include)
pkg_lib_dirs=(lib)
pkg_exports=(
[port]=port
)
pkg_exposes=(port)
do_build() {
./configure --prefix="${pkg_prefix}" \
--localstatedir="${pkg_svc_var_path}" \
--enable-dynamic \
--enable-local \
--enable-proctitle \
--enable-shared \
--enable-ipv6 \
--enable-slapd \
--enable-crypt \
--enable-modules \
--enable-rewrite \
--enable-rlookups \
--enable-bdb \
--enable-hdb \
--enable-syncprov=yes \
--enable-overlays=mod \
--with-tls=openssl \
--with-cyrus-sasl
make depend
make
}
do_check() {
make test
}
|
course := "parprog1"
assignment := "reductions"
assignmentInfo := AssignmentInfo(
key = "<KEY>",
itemId = "U1eU3",
premiumItemId = Some("4rXwX"),
partId = "gmSnR",
styleSheet = None
)
|
package io.github.rcarlosdasilva.weixin.core.http;
public enum HttpMethod {
GET, HEAD, POST, PUT, PATCH, DELETE;
}
|
package gv.jleon
package mirror
import shapeless.{ HNil }
import test._
import Prop._
import Mirror._
object MirrorProperties extends Properties("Mirror")
with MirrorGenerator
with UriGenerator {
property("baseUrl consistency") =
forAll { (b: BaseUrl, p: Prefix) ⇒
(b :: p :: true :: HNil).baseUrl ?= b
}
property("prefix consistency") =
forAll { (b: BaseUrl, p: Prefix) ⇒
(b :: p :: "Hello" :: HNil).prefix ?= p
}
property("urls begin with baseUrl") =
forAll { (mirror: Mirror, path: Uri.Path) ⇒
mirror.urlFor(path).toString startsWith mirror.baseUrl.toString
}
}
|
# algorithm to optimize a given data model
def optimize_model(model):
# create a graph of the given model
graph = create_model_graph(model)
# optimize the graph using a graph optimization algorithm
optimized_graph = optimize_graph(graph)
# convert the optimized graph to an optimized model
optimized_model = convert_graph_to_model(optimized_graph)
# return the optimized model
return optimized_model |
package io.opensphere.mantle.util.columnanalyzer;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import javax.xml.bind.annotation.adapters.XmlAdapter;
import org.apache.commons.lang3.StringUtils;
/**
* An XmlAdapter used to marshal and unmarshal a
* typeKey-to-list-of-DataTypeColumnAnalyzerDataSet map.
*/
public class DataTypeAnalysisDataXmlAdapter
extends XmlAdapter<DataTypeAnalysisXmlDataMap, Map<String, DataTypeColumnAnalyzerDataSet>>
{
@Override
public DataTypeAnalysisXmlDataMap marshal(Map<String, DataTypeColumnAnalyzerDataSet> map)
{
DataTypeAnalysisXmlDataMap result = new DataTypeAnalysisXmlDataMap();
for (Entry<String, DataTypeColumnAnalyzerDataSet> entry : map.entrySet())
{
if (StringUtils.isNotEmpty(entry.getKey()) && entry.getValue() != null)
{
DataTypeAnalaysMapXmMapEntry mapEntry = new DataTypeAnalaysMapXmMapEntry();
mapEntry.setTypeKey(entry.getKey());
mapEntry.setData(entry.getValue());
result.addLayer(mapEntry);
}
}
return result;
}
@Override
public Map<String, DataTypeColumnAnalyzerDataSet> unmarshal(DataTypeAnalysisXmlDataMap value)
{
Map<String, DataTypeColumnAnalyzerDataSet> map = new HashMap<>();
for (DataTypeAnalaysMapXmMapEntry entry : value.getDataList())
{
map.put(entry.getTypeKey(), entry.getData());
}
return map;
}
}
|
<gh_stars>0
package ff.camaro;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.gradle.api.Project;
import org.gradle.api.tasks.SourceSet;
import org.snakeyaml.engine.v2.api.Dump;
import org.snakeyaml.engine.v2.api.DumpSettings;
import org.snakeyaml.engine.v2.api.Load;
import org.snakeyaml.engine.v2.api.LoadSettings;
import org.snakeyaml.engine.v2.common.FlowStyle;
/**
* Camaro configuration files are yaml files. there are two types of stores:
* <br>
*
* 1. plugins: they define folder structure, dependencies, configurations,
* project, the configuration files reside into a package
* "ff.camaro.plugins.models" <br>
* 2. files: they define files like metadata configuration files, camaro.json
* files. The configuration files reside into a package "ff.camaro.files.models"
* <br>
*
* @author fernandojerez
*
*/
public class ConfigLoader {
public static final ConfigLoader plugin = new ConfigLoader("/ff/camaro/plugin");
public static final ConfigLoader files = new ConfigLoader("/ff/camaro/files");
public static String eclipse_output_path(final String name, final String sourceSet) {
return "build/classes/" + name + "/" + sourceSet;
}
public static String output_main_path(final org.gradle.api.Project project, final String name) {
return ConfigLoader.output_path(project, name, SourceSet.MAIN_SOURCE_SET_NAME);
}
public static String output_path(final org.gradle.api.Project project, final String name, final String sourceSet) {
return "classes/" + name + "/" + sourceSet;
}
public static String output_test_path(final Project project, final String name) {
return ConfigLoader.output_path(project, name, SourceSet.TEST_SOURCE_SET_NAME);
}
public static String src_main_path(final String name) {
return ConfigLoader.src_path(name, SourceSet.MAIN_SOURCE_SET_NAME);
}
public static String src_path(final String name, final String sourceSet) {
return "src/" + sourceSet + "/" + name;
}
public static String src_test_path(final String name) {
return ConfigLoader.src_path(name, SourceSet.TEST_SOURCE_SET_NAME);
}
private final Pattern pattern = Pattern.compile("\\{([a-zA-Z\\?\\:\\_]+)\\}");
private final String pck_resolver;
public ConfigLoader(final String pck_resolver) {
super();
this.pck_resolver = pck_resolver;
}
@SuppressWarnings("unchecked")
/**
* Load a configuration file and the use section and merge the files
*
* if the file is a fragment merge the process is: <br/>
* 1. load the configuration <br/>
* 2. merge the use <br/>
* 3. merge the configuration again <br/>
*
* if the file is a project model: <br/>
* 1. create a empty configuration<br/>
* 2. merge the use section <br/>
* 3. merge the configuration <br/>
*
* @param prj the gradle project reference
* @param resolver the collection of configurations loaded
* @param name the name of the configuration file
* @param fragment if a fragment file or model file
* @return A map with properties merged
*/
public Map<String, Object> load(final Project prj, final KeyResolver resolver, final String name,
final boolean fragment) {
final LoadSettings settings = LoadSettings.builder().setLabel("Camaro").build();
final Load load = new Load(settings);
final Map<String, Object> cfg = (Map<String, Object>) load.loadFromInputStream(ConfigLoader.class
.getResourceAsStream(pck_resolver + "/models/" + (fragment ? "fragment/" : "") + name + ".yml"));
final List<String> uses = value(cfg, "use", true);
final Map<String, Object> first = fragment ? new LinkedHashMap<>() : new HashMap<>();
if (fragment) {
first.putAll(cfg);
}
for (final String use : uses) {
if (fragment) {
first.putAll(load(prj, resolver, use, fragment));
} else {
merge(prj, resolver, first, load(prj, resolver, use, fragment));
}
}
if (fragment) {
first.putAll(cfg);
} else {
merge(prj, resolver, first, cfg);
}
return first;
}
/**
* Load a model configuration file
*
* @param prj the gradle project
* @param name the name of the file
* @return A map with properties merged
*/
public Map<String, Object> load(final Project prj, final String name) {
final KeyResolver resolver = new KeyResolver();
return load(prj, resolver, name, false);
}
@SuppressWarnings("unchecked")
private void merge(final Project prj, final KeyResolver resolver, final Map<String, Object> first,
final Map<String, Object> second) {
resolver.push(second);
resolver.push(first);
for (final Map.Entry<String, Object> entry : second.entrySet()) {
final Object value = process_object(prj, resolver, entry.getValue());
final String key = entry.getKey();
if (value instanceof List) {
first.put(key, merge_list(prj, resolver, value(first, key, false), (List<Object>) value));
continue;
}
if (value instanceof Map) {
final Object result = first.get(key);
if (result instanceof Map) {
resolver.push((Map<String, Object>) result);
resolver.push((Map<String, Object>) value);
merge(prj, resolver, (Map<String, Object>) result, (Map<String, Object>) value);
resolver.pop();
resolver.pop();
continue;
}
first.put(key, value);
continue;
}
if (value instanceof String) {
if ("$remove".equals(value)) {
first.remove(key);
continue;
}
}
final Object fvalue = first.get(key);
if (fvalue == null || !(fvalue instanceof List)) {
first.put(key, process_object(prj, resolver, value));
} else if (fvalue instanceof List) {
((List<Object>) fvalue).add(process_object(prj, resolver, value));
}
}
resolver.pop();
resolver.pop();
}
private Object merge_list(final Project prj, final KeyResolver resolver, final List<Object> object,
final List<Object> list) {
final List<Object> result = new LinkedList<>();
result.addAll(object);
for (final Object value : list) {
if (value.equals("$clear")) {
result.clear();
continue;
}
result.add(process_object(prj, resolver, value));
}
return result;
}
@SuppressWarnings("unchecked")
private Object process_object(final Project prj, final KeyResolver resolver, final Object value) {
if (value instanceof Map) {
final Map<String, Object> result = (Map<String, Object>) value;
final List<String> uses = value(result, "use", true);
final Map<String, Object> first = new LinkedHashMap<>();
first.putAll(result);
for (final String use : uses) {
first.putAll(load(prj, resolver, use, true));
}
first.putAll(result);
final Map<String, Object> nresult = new LinkedHashMap<>();
resolver.push(nresult);
for (final Map.Entry<String, Object> entry : first.entrySet()) {
nresult.put(entry.getKey(), process_object(prj, resolver, entry.getValue()));
}
resolver.pop();
return nresult;
}
if (value instanceof List) {
final List<Object> result = new LinkedList<>();
for (final Object val : (List<Object>) value) {
result.add(process_object(prj, resolver, val));
}
return result;
}
if (value instanceof String) {
final String str = ((String) value).trim();
if (str.startsWith("$output_main_path ")) {
return ConfigLoader.output_main_path(prj,
toString(resolver.get(str.substring("$output_main_path ".length()).trim())));
}
if (str.startsWith("$output_test_path ")) {
return ConfigLoader.output_test_path(prj,
toString(resolver.get(str.substring("$output_main_path ".length()).trim())));
}
if (str.startsWith("$format ")) {
final Matcher matcher = pattern.matcher(str.substring("$format".length()).trim());
final StringBuilder result = new StringBuilder();
while (true) {
if (!matcher.find()) {
matcher.appendTail(result);
break;
} else {
String group = matcher.group(1).trim();
int tix = group.indexOf('?');
if (tix != -1) {
final String test = group.substring(0, tix);
group = group.substring(tix + 1);
tix = group.indexOf(':');
String trueValue = group;
String falseValue = "";
if (tix != -1) {
trueValue = group.substring(0, tix).trim();
falseValue = group.substring(tix + 1).trim();
}
final Object testValue = resolver.get(test);
if (value != null && "true".equals(String.valueOf(testValue))) {
if (trueValue.length() > 0) {
matcher.appendReplacement(result, String.valueOf(resolver.get(trueValue)));
} else {
matcher.appendReplacement(result, "");
}
} else {
if (falseValue.length() > 0) {
matcher.appendReplacement(result, String.valueOf(resolver.get(falseValue)));
} else {
matcher.appendReplacement(result, "");
}
}
continue;
}
matcher.appendReplacement(result, String.valueOf(resolver.get(group)));
}
}
return result.toString();
}
return str;
}
return value;
}
private String toString(final Object object) {
if (object == null) {
return null;
}
return String.valueOf(object);
}
public String toYaml(final Map<String, Object> config) {
final Dump dump = new Dump(
DumpSettings.builder().setCanonical(false).setDefaultFlowStyle(FlowStyle.BLOCK).build());
return dump.dumpToString(config);
}
@SuppressWarnings("unchecked")
private <T> List<T> value(final Map<String, Object> result, final String key, final boolean remove) {
final Object val = remove ? result.remove(key) : result.get(key);
if (val == null) {
return Collections.emptyList();
}
if (val instanceof List) {
return (List<T>) val;
}
return Collections.singletonList((T) val);
}
}
|
#!/usr/bin/env bash
function check_java_version {
if type -p java; then
echo found java executable in PATH
_java=java
elif [[ -n "$JAVA_HOME" ]] && [[ -x "$JAVA_HOME/bin/java" ]]; then
echo found java executable in JAVA_HOME
_java="$JAVA_HOME/bin/java"
else
echo "no java"
return -1
fi
if [[ "$_java" ]]; then
version=$("$_java" -version 2>&1 | awk -F '"' '/version/ {print $2}')
echo version "$version"
if [[ "$version" > "1.8" ]]; then
echo "version is more than 1.8"
return true
else
echo "version is less than 1.8"
return false
fi
fi
}
jdk_version_8_or_more=$(check_java_version)
if [ "$jdk_version_8_or_more" = true ];
then
cassandra_version="3.2"
else
cassandra_version="2.2.9"
fi
pip install 'requests[security]'
pip install ccm
ccm create test -v ${cassandra_version} -n 1 -s
exit $?
|
<gh_stars>0
export const aave = 'https://api.thegraph.com/subgraphs/name/aave/protocol-multy-raw';
export const aavev2 = 'https://api.thegraph.com/subgraphs/name/aave/protocol-v2';
export const uniswapV2 = 'https://api.thegraph.com/subgraphs/name/uniswap/uniswap-v2';
export const synthetixSnx = 'https://api.thegraph.com/subgraphs/name/synthetixio-team/synthetix';
// synthetix exchanges != synthetix exchanger
export const synthetixExchanges =
'https://api.thegraph.com/subgraphs/name/synthetixio-team/synthetix-exchanges';
export const synthetixExchanger =
'https://api.thegraph.com/subgraphs/name/synthetixio-team/synthetix-exchanger';
|
<gh_stars>0
package app;
import org.jooby.Jooby;
import org.jooby.banner.Banner;
import org.jooby.crash.Crash;
import org.jooby.crash.HttpShellPlugin;
import org.jooby.json.Jackson;
public class CrashApp extends Jooby {
{
conf("crash.conf");
use(new Jackson());
use(new Banner("crash me!"));
use(new Crash()
.plugin(HttpShellPlugin.class)
.plugin(AuthPlugin.class));
before("/path", (req, rsp) -> {
});
after("/path", (req, rsp, v) -> {
return v;
});
complete("/path", (req, rsp, v) -> {
});
get("/", () -> "OK");
}
public static void main(final String[] args) {
run(CrashApp::new, args);
}
}
|
import { SocketIoAdapter } from './modules/websocket/socketio.adapter';
import 'module-alias/register';
import { NestFactory } from '@nestjs/core';
import { Logger } from '@nestjs/common';
import { NestExpressApplication } from '@nestjs/platform-express';
import * as helmet from 'helmet';
import * as rateLimit from 'express-rate-limit';
import * as compression from 'compression';
import { setupSwagger } from './swagger';
import { AppModule } from './app.module';
// 中间设置
async function initPlugins(app) {
app.setGlobalPrefix('');
app.use(helmet()); // 免受一些众所周知的Web漏洞的影响
// app.use(csurf()); // 跨站点请求伪造(称为CSRF或XSRF)是一种恶意利用网站
// 限速请求
app.use(
new rateLimit({
windowMs: 15 * 60 * 1000,
max: 100,
}),
);
app.use(compression()); // 启用压缩
}
async function bootstrap() {
const app = await NestFactory.create<NestExpressApplication>(AppModule, {
// logger: false,
});
app.useWebSocketAdapter(new SocketIoAdapter(app));
setupSwagger(app); // 初始化swagger文档
app.enableCors(); // 允许跨域
// app.useGlobalPipes(new ValidationPipe());
// app.useGlobalFilters(new HttpExceptionFilterFilter()); // 全局异常捕获
// app.useGlobalInterceptors(new ReturnBodyInterceptor()); // 自动包裹返回体
initPlugins(app);
await app.listen(3000, () => {
Logger.log(`Server run at port 3000`);
});
}
bootstrap();
|
#!/bin/bash
#SBATCH -J Act_tanhrev_1
#SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de
#SBATCH --mail-type=FAIL
#SBATCH -e /work/scratch/se55gyhe/log/output.err.%j
#SBATCH -o /work/scratch/se55gyhe/log/output.out.%j
#SBATCH -n 1 # Number of cores
#SBATCH --mem-per-cpu=6000
#SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins
#module load intel python/3.5
python3 /home/se55gyhe/Act_func/sequence_tagging/arg_min/PE-my.py tanhrev 81 Adamax 1 0.7085139240848437 0.0016453806372394488 varscaling 0.3
|
#include <stdio.h>
#include <stdlib.h>
#include <oqs/kem_kyber.h>
#ifdef OQS_ENABLE_KEM_kyber_512_cca_kem
OQS_KEM *OQS_KEM_kyber_512_cca_kem_new() {
OQS_KEM *kem = malloc(sizeof(OQS_KEM));
if (kem == NULL) {
return NULL;
}
kem->method_name = OQS_KEM_alg_kyber_512_cca_kem;
kem->alg_version = "https://github.com/pq-crystals/kyber/commit/ab996e7460e5356b0e23aa034e7c2fe6922e60e6";
kem->claimed_nist_level = 1;
kem->ind_cca = true;
kem->length_public_key = OQS_KEM_kyber_512_cca_kem_length_public_key;
kem->length_secret_key = OQS_KEM_kyber_512_cca_kem_length_secret_key;
kem->length_ciphertext = OQS_KEM_kyber_512_cca_kem_length_ciphertext;
kem->length_shared_secret = OQS_KEM_kyber_512_cca_kem_length_shared_secret;
kem->keypair = OQS_KEM_kyber_512_cca_kem_keypair;
kem->encaps = OQS_KEM_kyber_512_cca_kem_encaps;
kem->decaps = OQS_KEM_kyber_512_cca_kem_decaps;
return kem;
}
#endif
#ifdef OQS_ENABLE_KEM_kyber_768_cca_kem
OQS_KEM *OQS_KEM_kyber_768_cca_kem_new() {
OQS_KEM *kem = malloc(sizeof(OQS_KEM));
if (kem == NULL) {
return NULL;
}
kem->method_name = OQS_KEM_alg_kyber_768_cca_kem;
kem->alg_version = "https://github.com/pq-crystals/kyber/commit/ab996e7460e5356b0e23aa034e7c2fe6922e60e6";
kem->claimed_nist_level = 3;
kem->ind_cca = true;
kem->length_public_key = OQS_KEM_kyber_768_cca_kem_length_public_key;
kem->length_secret_key = OQS_KEM_kyber_768_cca_kem_length_secret_key;
kem->length_ciphertext = OQS_KEM_kyber_768_cca_kem_length_ciphertext;
kem->length_shared_secret = OQS_KEM_kyber_768_cca_kem_length_shared_secret;
kem->keypair = OQS_KEM_kyber_768_cca_kem_keypair;
kem->encaps = OQS_KEM_kyber_768_cca_kem_encaps;
kem->decaps = OQS_KEM_kyber_768_cca_kem_decaps;
return kem;
}
int PQCLEAN_KYBER768_CLEAN_crypto_kem_keypair(unsigned char *pk, unsigned char *sk);
int PQCLEAN_KYBER768_CLEAN_crypto_kem_enc(unsigned char *ct, unsigned char *ss, const unsigned char *pk);
int PQCLEAN_KYBER768_CLEAN_crypto_kem_dec(unsigned char *ss, const unsigned char *ct, const unsigned char *sk);
OQS_API OQS_STATUS OQS_KEM_kyber_768_cca_kem_keypair(uint8_t *public_key,
uint8_t *secret_key) {
return (OQS_STATUS) PQCLEAN_KYBER768_CLEAN_crypto_kem_keypair(public_key, secret_key);
}
OQS_API OQS_STATUS OQS_KEM_kyber_768_cca_kem_encaps(uint8_t *ciphertext,
uint8_t *shared_secret,
const uint8_t *public_key) {
return (OQS_STATUS) PQCLEAN_KYBER768_CLEAN_crypto_kem_enc(ciphertext, shared_secret, public_key);
}
OQS_API OQS_STATUS OQS_KEM_kyber_768_cca_kem_decaps(uint8_t *shared_secret,
const unsigned char *ciphertext,
const uint8_t *secret_key) {
return (OQS_STATUS) PQCLEAN_KYBER768_CLEAN_crypto_kem_dec(shared_secret, ciphertext, secret_key);
}
#endif
#ifdef OQS_ENABLE_KEM_kyber_1024_cca_kem
OQS_KEM *OQS_KEM_kyber_1024_cca_kem_new() {
OQS_KEM *kem = malloc(sizeof(OQS_KEM));
if (kem == NULL) {
return NULL;
}
kem->method_name = OQS_KEM_alg_kyber_1024_cca_kem;
kem->alg_version = "https://github.com/pq-crystals/kyber/commit/ab996e7460e5356b0e23aa034e7c2fe6922e60e6";
kem->claimed_nist_level = 5;
kem->ind_cca = true;
kem->length_public_key = OQS_KEM_kyber_1024_cca_kem_length_public_key;
kem->length_secret_key = OQS_KEM_kyber_1024_cca_kem_length_secret_key;
kem->length_ciphertext = OQS_KEM_kyber_1024_cca_kem_length_ciphertext;
kem->length_shared_secret = OQS_KEM_kyber_1024_cca_kem_length_shared_secret;
kem->keypair = OQS_KEM_kyber_1024_cca_kem_keypair;
kem->encaps = OQS_KEM_kyber_1024_cca_kem_encaps;
kem->decaps = OQS_KEM_kyber_1024_cca_kem_decaps;
return kem;
}
#endif
|
<filename>opencl/precision/read_cl.c
/*
* read_cl.c
*
* Created on: Apr 3, 2015
* Author: panhao
*/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
char*
read_file (char *file_name)
{
FILE *fp;
char *p;
fp = fopen (file_name, "r"); // read mode
if (fp == NULL)
{
perror ("Error while opening the file.\n");
exit (EXIT_FAILURE);
}
int flen = 0;
fseek (fp, 0L, SEEK_END);
flen = ftell (fp);
p = (char *) malloc (flen + 1);
if (p == NULL)
{
fclose (fp);
return NULL;
}
fseek (fp, 0L, SEEK_SET);
fread (p, flen, 1, fp);
p[flen] = 0;
fclose (fp);
return p;
}
|
import random
def generate_password(length):
characters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
password = ""
for _ in range(length):
index = random.randint(0, len(characters)-1)
password += characters[index]
return password |
#Steps of the official ros installation for kinetic distro as 7/03/2018
#Set up sources
sudo sh -c 'echo "deb http://packages.ros.org/ros/ubuntu $(lsb_release -sc) main" > /etc/apt/sources.list.d/ros-latest.list'
#Set up keys
sudo apt-key adv --keyserver hkp://ha.pool.sks-keyservers.net:80 --recv-key 421C365BD9FF1F717815A3895523BAEEB01FA116
#Installation
sudo apt-get update
#Desktop full install
sudo apt-get install ros-kinetic-desktop-full
#init rosdep
sudo rosdep init
rosdep update
#environment setup
echo "source /opt/ros/kinetic/setup.bash" >> ~/.bashrc
#dependencies for building packages
sudo apt-get install python-rosinstall python-rosinstall-generator python-wstool build-essential
|
<reponame>NajibAdan/kitsu-server
require 'flipper/adapters/redis'
Flipper.configure do |config|
config.default do
# Connect to Redis and initialize Flipper
adapter = Flipper::Adapters::Redis.new(Redis.new)
Flipper.new(adapter)
end
end
Flipper.register(:staff) do |user|
user.try(:has_role?, :admin)
end
Flipper.register(:pro) do |user|
user.try(:pro?)
end
Flipper.register(:mod) do |user|
user.try(:has_role?, :mod) || user.try(:has_role?, :admin, Anime)
end
Flipper.register(:aozora) do |user|
user.try(:ao_id)
end
|
# Import relevant packages
import pandas as pd
from sklearn.tree import DecisionTreeClassifier
from sklearn.model_selection import train_test_split
# Load the data
data = pd.read_csv('patients_data.csv')
# Create the feature and target vectors
X = data[['fever', 'headache', 'sore throat', 'nausea']]
y = data['disease']
# Split the data into train and test sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2, random_state = 0)
# Create and fit the Decision Tree
tree = DecisionTreeClassifier()
tree.fit(X_train, y_train)
# Make predictions
predictions = tree.predict(X_test)
# Evaluate the model
accuracy = tree.score(X_test, y_test)
print('Accuracy: ', accuracy) |
import React from 'react';
import clsx from 'clsx';
import styles from './styles.module.css';
const FeatureList = [
{
title: 'StarkNet in your hands',
Svg: require('@site/static/img/starknet.svg').default,
description: (
<>
Get StarkNet data directly from Juno.
</>
),
},
{
title: 'Service Orientated Architecture',
Svg: require('@site/static/img/cloud.svg').default,
description: (
<>
Different parts of Juno's functionality are encapsulated as separate services.
</>
),
},
{
title: 'Powered by Golang',
Svg: require('@site/static/img/go_saiyan.svg').default,
description: (
<>
Go makes it easy for us to write maintainable, testable, lightweight and performant code.
</>
),
},
];
function Feature({Svg, title, description}) {
return (
<div className={clsx('col col--4')}>
<div className="text--center">
<Svg className={styles.featureSvg} role="img"/>
</div>
<div className="text--center padding-horiz--md">
<h3>{title}</h3>
<p>{description}</p>
</div>
</div>
);
}
export default function HomepageFeatures() {
return (
<section className={styles.features}>
<div className="container">
<div className="row">
{FeatureList.map((props, idx) => (
<Feature key={idx} {...props} />
))}
</div>
</div>
</section>
);
}
|
#!/bin/sh
set -e
set -u
set -o pipefail
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/PublicModule/PublicModule.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SDWebImage/SDWebImage.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/PublicModule/PublicModule.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SDWebImage/SDWebImage.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
package ru.autometry.obd.commands.listener;
import ru.autometry.obd.commands.Command;
import ru.autometry.obd.commands.Response;
import ru.autometry.obd.processing.CommandDispatcher;
import ru.autometry.utils.common.ByteUtils;
/**
* Created by jeck on 13/08/14
*/
public class LogListener implements Listener {
@Override
public void onRequest(Command command, CommandDispatcher dispatcher) {
//System.out.println(System.currentTimeMillis()+":: "+command.getId() + ": " + ByteUtils.debugHexString(command.getBytes()));
}
@Override
public void onResponse(Response response, CommandDispatcher dispatcher) {
System.out.println(response.getTime().getTime() + ":: " + response.getCommand().getId() + "=" + response.getAnswers() + ", dump=" + ByteUtils.debugHexString(response.getRawResponse()));
}
@Override
public void onError(Exception e, Command command, CommandDispatcher dispatcher) {
System.out.println("ERROR on " + command.getId() + ", " + e.getLocalizedMessage());
}
}
|
<reponame>JonathanO/phonehome
addSbtPlugin("com.typesafe.sbt" % "sbt-scalariform" % "1.3.0")
addSbtPlugin("com.typesafe.sbt" % "sbt-native-packager" % "1.0.6")
addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "0.8.0")
libraryDependencies += "org.vafer" % "jdeb" % "1.3" artifacts (Artifact("jdeb", "jar", "jar")) |
def levenshtein_distance(s1, s2):
# Base cases
if s1 == s2:
return 0
elif len(s1) == 0:
return len(s2)
elif len(s2) == 0:
return len(s1)
# Construct a matrix
matrix = [[0 for c in range(len(s1) + 1)] for r in range(len(s2) + 1)]
# Fill in the first row and column
for r in range(len(s2) + 1):
matrix[r][0] = r
for c in range(len(s1) + 1):
matrix[0][c] = c
# Fill in the rest of the matrix
for r in range(1, len(s2) + 1):
for c in range(1, len(s1) + 1):
if s2[r - 1] == s1[c - 1]:
cost = 0
else:
cost = 1
matrix[r][c] = min(matrix[r - 1][c] + 1, matrix[r][c - 1] + 1,
matrix[r - 1][c - 1] + cost)
# Return the similarity score
return matrix[len(s2)][len(s1)]
string1 = 'Apple'
string2 = 'Apples'
score = levenshtein_distance(string1, string2)
print(score) # Output: 1 |
@interface Triangle : NSObject
@property (nonatomic) float base;
@property (nonatomic) float height;
-(float) calculateArea;
@end
@implementation Triangle
-(float) calculateArea {
return 0.5 * self.base * self.height;
}
@end |
def fibonacci(n):
a = 0
b = 1
if n < 0:
print("Incorrect input")
elif n == 0:
return a
elif n == 1:
return b
else:
for i in range(2,n):
c = a + b
a = b
b = c
return b |
using System.Xml;
public class XmlNavigator
{
private XmlNode currentNode;
public XmlNavigator(XmlNode node)
{
currentNode = node;
}
public bool MoveToParent()
{
if (currentNode.ParentNode != null)
{
currentNode = currentNode.ParentNode;
return true;
}
return false;
}
public bool MoveToFirstChild()
{
if (currentNode.FirstChild != null)
{
currentNode = currentNode.FirstChild;
return true;
}
return false;
}
public bool MoveToNextSibling()
{
if (currentNode.NextSibling != null)
{
currentNode = currentNode.NextSibling;
return true;
}
return false;
}
public string GetCurrentElementName()
{
return currentNode.Name;
}
public string GetInnerText()
{
return currentNode.InnerText;
}
public XmlAttributeCollection GetAttributes()
{
if (currentNode.NodeType == XmlNodeType.Element)
{
return ((XmlElement)currentNode).Attributes;
}
return null;
}
}
public static XmlNavigator CreateNavigator(XmlDocument document, XmlNode node)
{
return new XmlNavigator(node);
} |
<gh_stars>0
# Capistrano
module Capistrano
# GitCopy
module GitCopy
# gem version
VERSION = '1.0.0'
end
end
|
/*
* COMMAND LINE INTERFACE: CREATE ASSET
*/
// DEFINE DEPENDENCIES
const fs = require('fs');
const ops = require('../operations/ops');
// LOCAL VARIABLES
async function addInventoryRole(item) {
// notify
// LOCAL
const templateFile = fs.readFileSync('./server/models/inventoryRoleTemplate.json', 'utf8');
var role = JSON.parse(templateFile);
// CUSTOMIZED ELEMENTS
role.name = item.name;
role.description = item.description;
role.parent = item.parent;
// EXECUTE
try {
await ops.Inventory.Roles.create(role);
console.log('roll added');
} catch (error) {
console.log('addInventoryRole error: ');
console.error(error);
}
};
var allItems = [
{ name: 'Pint Lids', description: "50x pint lids.", parent: "-MX8jNAp8ynkyxgLVUbE" },
{ name: 'Half Pint Lids', description: "50x half pint lids.", parent: "-MX8jNAwukh34erN2ZqZ" },
{ name: 'Platter Lids', description: "10x platter lids.", parent: "-MX8jNAxTar3jJEh0KFa" },
{ name: 'Platter Bottoms', description: "10x platter bottoms.", parent: "-MX8jNAxTar3jJEh0KFa" }
];
// EXECUTEC
allItems.forEach(function(item) {
addInventoryRole(item);
});
|
<reponame>socialsensor/social-event-detection
package clustering.louvain;
import java.io.File;
import java.io.IOException;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import models.MultimodalItem;
import org.gephi.data.attributes.api.AttributeColumn;
import org.gephi.data.attributes.api.AttributeController;
import org.gephi.data.attributes.api.AttributeModel;
import org.gephi.graph.api.GraphController;
import org.gephi.graph.api.GraphFactory;
import org.gephi.graph.api.GraphModel;
import org.gephi.graph.api.Node;
import org.gephi.graph.api.UndirectedGraph;
import org.gephi.io.exporter.api.ExportController;
import org.gephi.partition.api.Partition;
import org.gephi.partition.api.PartitionController;
import org.gephi.partition.plugin.NodeColorTransformer;
import org.gephi.project.api.ProjectController;
import org.gephi.project.api.Workspace;
import org.gephi.statistics.plugin.Modularity;
import org.openide.util.Lookup;
import clustering.SameClassLink;
import edu.uci.ics.jung.graph.Graph;
import edu.uci.ics.jung.graph.util.Pair;
public class LouvainClustering {
@SuppressWarnings("unused")
private Workspace workspace;
private ProjectController pc;
private GraphModel graphModel;
private Map<String, Node> nodes = new HashMap<String, Node>();
private GraphFactory factory;
private PartitionController partitionController;
public LouvainClustering() {
this.pc = Lookup.getDefault().lookup(ProjectController.class);
pc.newProject();
this.workspace = pc.getCurrentWorkspace();
this.graphModel = Lookup.getDefault().lookup(GraphController.class).getModel();
this.factory = graphModel.factory();
this.partitionController = Lookup.getDefault().lookup(PartitionController.class);
}
private UndirectedGraph getGraph(Graph<MultimodalItem, SameClassLink> itemsGraph) {
UndirectedGraph graph = graphModel.getUndirectedGraph();
int v = 0;
Collection<MultimodalItem> vertices = itemsGraph.getVertices();
for(MultimodalItem vertex : vertices) {
if(!nodes.containsKey(vertex.id)) {
Node node = factory.newNode(vertex.id);
nodes.put(vertex.id, node);
graph.addNode(node);
}
if(++v%10000==0) {
System.out.println(v + " nodes inserted into graph");
}
}
int e = 0;
long t1 = 0, t2 = 0;
Collection<SameClassLink> edges = itemsGraph.getEdges();
for(SameClassLink edge : edges) {
long t = System.currentTimeMillis();
Pair<MultimodalItem> endpoints = itemsGraph.getEndpoints(edge);
MultimodalItem item1 = endpoints.getFirst();
MultimodalItem item2 = endpoints.getSecond();
t1 += (System.currentTimeMillis() - t);
Node n1 = nodes.get(item1.id);
Node n2 = nodes.get(item2.id);
if(edge.weight == 1) {
t = System.currentTimeMillis();
graph.addEdge(factory.newEdge(n1, n2, edge.weight, false));
t2 += (System.currentTimeMillis() - t);
}
if(++e%100000==0) {
System.out.println(e + " edges inserted into graph. Load:" + t1 + ", Insert:" + t2);
}
}
return graph;
}
public Partition<Node> partition(Graph<MultimodalItem, SameClassLink> itemsGraph) {
UndirectedGraph graph = getGraph(itemsGraph);
System.out.println("Nodes: " + graph.getNodeCount());
System.out.println("Edges: " + graph.getEdgeCount());
AttributeModel attributeModel = Lookup.getDefault().lookup(AttributeController.class).getModel();
// Run modularity algorithm - community detection
Modularity modularity = new Modularity();
modularity.setUseWeight(true);
modularity.setResolution(1.);
modularity.setRandom(true);
modularity.execute(graphModel, attributeModel);
AttributeColumn modColumn = attributeModel.getNodeTable().getColumn(Modularity.MODULARITY_CLASS);
@SuppressWarnings("unchecked")
Partition<Node> p = partitionController.buildPartition(modColumn, graph);
return p;
}
public void save(String filename, Partition<Node> p) {
NodeColorTransformer nodeColorTransformer = new NodeColorTransformer();
nodeColorTransformer.randomizeColors(p);
partitionController.transform(p, nodeColorTransformer);
ExportController ec = Lookup.getDefault().lookup(ExportController.class);
// Export
try {
ec.exportFile(new File(filename));
} catch (IOException ex) {
ex.printStackTrace();
}
}
} |
<filename>src/swipe-up/debug/buttons/close-me-button.js
import $ from '../../utils/dom'
export default class CloseMeButton {
constructor(debugWidget) {
this._debugWidget = debugWidget
this._selfName = 'debugWidgetCloseBtn'
let self = document.createElement('button')
self.className = this._selfName
$(self).html('x')
this._debugWidget._debugWidgetContainer.appendChild(self)
}
attachClickAfterButtonAddedToDom() {
$(`.${this._selfName}`).click(() => this._debugWidget.hide())
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.