text stringlengths 1 1.05M |
|---|
DEV_PACKAGES="
git
build-essential
curl
emacs24-nox
htop
nmon
slurm
tcpdump
unzip
vim
"
ESSENTIAL_PACKAGES="
ntp
nfs-common
"
sudo apt-get update
sudo apt-get -y install $ESSENTIAL_PACKAGES $DEV_PACKAGES
|
#!/bin/sh
if ! updates_arch=$(checkupdates 2> /dev/null | wc -l ); then
updates_arch=0
fi
if ! updates_aur=$(yay -Qum 2> /dev/null | wc -l); then
# if ! updates_aur=$(paru -Qum 2> /dev/null | wc -l); then
# if ! updates_aur=$(cower -u 2> /dev/null | wc -l); then
# if ! updates_aur=$(trizen -Su --aur --quiet | wc -l); then
# if ! updates_aur=$(pikaur -Qua 2> /dev/null | wc -l); then
# if ! updates_aur=$(rua upgrade --printonly 2> /dev/null | wc -l); then
updates_aur=0
fi
updates=$((updates_arch + updates_aur))
if [ "$updates" -gt 0 ]; then
echo " $updates"
else
echo ""
fi
|
/*******************************************************************************
* Copyright (c) 2018-07-03 @author <a href="mailto:<EMAIL>"><NAME></a>.
* All rights reserved.
*
* Contributors:
* <a href="mailto:<EMAIL>"><NAME></a> - initial API and implementation.
* Auto Generate By foreveross.com Quick Deliver Platform.
******************************************************************************/
package com.foreveross.qdp.application.system.auth.rs.impl;
import com.foreveross.common.ResultBean;
import com.foreveross.qdp.application.system.auth.AuthAccountApplication;
import com.foreveross.qdp.infra.vo.system.auth.AuthAccountVO;
import com.foreveross.qdp.infra.vo.system.auth.EditPasswordVO;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.apache.commons.lang3.StringUtils;
import org.iff.infra.util.BeanHelper;
import org.iff.infra.util.NumberHelper;
import org.iff.infra.util.PreCheckHelper;
import org.iff.infra.util.mybatis.plugin.Page;
import org.springframework.web.bind.annotation.*;
import javax.inject.Inject;
import java.util.HashMap;
import java.util.Map;
/**
* AuthAccount
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @since 2018-07-03
* @version 1.0.0
* auto generate by qdp v5.0.
*/
@RestController
@RequestMapping("/api/AuthAccount")
@Api("AuthAccount Api")
public class AuthAccountRSApplicationImpl {
@Inject
AuthAccountApplication authAccountApplication;
/**
* <pre>
* get AuthAccountVO by id.
* USAGE:
* GET /api/AuthAccount/get/{id}
* SUCCESS:
* {header:{status:success},
* body:{AuthAccountVO}}
* ERROR:
* {header: {status: error}, body:{Exception.getMessage()}}
* </pre>
*
* @param id
* @return ResultBean
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @since 2018-07-03
* auto generate by qdp v5.0
*/
@ApiOperation(value = "get AuthAccount by id", notes = "get AuthAccount by id")
@GetMapping("/get/{id}")
public ResultBean getAuthAccountById(@PathVariable String id) {
try {
return ResultBean.success().setBody(authAccountApplication.getAuthAccountById(id));
} catch (Exception e) {
return ResultBean.error().setBody(e.getMessage());
}
}
/**
* <pre>
* page find AuthAccountVO.
* USAGE:
* GET /api/AuthAccount/page/{loginEmail}/{status}/{currentPage}/{pageSize}/{asc}/{desc}
* SUCCESS:
* {header:{status:success},
* body:{pageSize,totalCount,currentPage,offset,offsetPage,orderBy:[],rows:[{AuthAccountVO}]}}
* ERROR:
* {header: {status: error}, body:{Exception.getMessage()}}
* </pre>
*
* @param loginEmail
* @param status
* @param currentPage
* @param pageSize
* @param asc
* @param desc
* @return ResultBean
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @since 2018-07-03
* auto generate by qdp v5.0
*/
@ApiOperation(value = "page find AuthAccount", notes = "page find AuthAccount")
@GetMapping({"/page/{loginEmail}/{status}/{currentPage}/{pageSize}/{asc}/{desc}",
"/page/{loginEmail}/{status}/{currentPage}/{pageSize}",
"/page/{loginEmail}/{status}/{currentPage}",
"/page/{loginEmail}/{status}",
"/page/{loginEmail}",
"/page"})
public ResultBean pageFindAuthAccount(
@PathVariable(required = false, value = "loginEmail") String loginEmail,
@PathVariable(required = false, value = "status") String status,
@PathVariable(required = false, value = "currentPage") Integer currentPage,
@PathVariable(required = false, value = "pageSize") Integer pageSize,
@PathVariable(required = false, value = "asc") String asc,
@PathVariable(required = false, value = "desc") String desc) {
try {
Map<String, String> map = new HashMap<String, String>();
{
map.put("loginEmail", PreCheckHelper.equalsToNull(loginEmail, "-"));
map.put("status", PreCheckHelper.equalsToNull(status, "-"));
}
Page page = new Page();
{
page.setCurrentPage(NumberHelper.getInt(currentPage, 1));
page.setPageSize(NumberHelper.getInt(pageSize, 10));
asc = PreCheckHelper.equalsToNull(asc, "-");
desc = PreCheckHelper.equalsToNull(desc, "-");
if (StringUtils.isNotBlank(asc)) {
page.addAscOrderBy(asc);
}
if (StringUtils.isNotBlank(desc)) {
page.addDescOrderBy(desc);
}
}
AuthAccountVO vo = BeanHelper.copyProperties(AuthAccountVO.class, map);
return ResultBean.success().setBody(authAccountApplication.pageFindAuthAccount(vo, page));
} catch (Exception e) {
return ResultBean.error().setBody(e.getMessage());
}
}
/**
* <pre>
* page find AuthAccountVO.
* USAGE:
* GET /api/AuthAccount/pageMap/{loginEmail}/{status}/{currentPage}/{pageSize}/{asc}/{desc}
* SUCCESS:
* {header:{status:success},
* body:{pageSize,totalCount,currentPage,offset,offsetPage,orderBy:[],rows:[{AuthAccountVO}]}}
* ERROR:
* {header: {status: error}, body:{Exception.getMessage()}}
* </pre>
*
* @param loginEmail
* @param status
* @param currentPage
* @param pageSize
* @param asc
* @param desc
* @return ResultBean
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @since 2018-07-03
* auto generate by qdp v5.0
*/
@ApiOperation(value = "page find AuthAccount", notes = "page find AuthAccount")
@GetMapping({"/pageMap/{loginEmail}/{status}/{currentPage}/{pageSize}/{asc}/{desc}",
"/pageMap/{loginEmail}/{status}/{currentPage}/{pageSize}",
"/pageMap/{loginEmail}/{status}/{currentPage}",
"/pageMap/{loginEmail}/{status}",
"/pageMap/{loginEmail}",
"/pageMap"})
public ResultBean pageFindAuthAccountMap(
@PathVariable(required = false, value = "loginEmail") String loginEmail,
@PathVariable(required = false, value = "status") String status,
@PathVariable(required = false, value = "currentPage") Integer currentPage,
@PathVariable(required = false, value = "pageSize") Integer pageSize,
@PathVariable(required = false, value = "asc") String asc,
@PathVariable(required = false, value = "desc") String desc) {
try {
Map<String, String> map = new HashMap<String, String>();
{
map.put("loginEmail", PreCheckHelper.equalsToNull(loginEmail, "-"));
map.put("status", PreCheckHelper.equalsToNull(status, "-"));
}
Page page = new Page();
{
page.setCurrentPage(NumberHelper.getInt(currentPage, 1));
page.setPageSize(NumberHelper.getInt(pageSize, 10));
asc = PreCheckHelper.equalsToNull(asc, "-");
desc = PreCheckHelper.equalsToNull(desc, "-");
if (StringUtils.isNotBlank(asc)) {
page.addAscOrderBy(asc);
}
if (StringUtils.isNotBlank(desc)) {
page.addDescOrderBy(desc);
}
}
AuthAccountVO vo = BeanHelper.copyProperties(AuthAccountVO.class, map);
return ResultBean.success().setBody(authAccountApplication.pageFindAuthAccountMap(vo, page));
} catch (Exception e) {
return ResultBean.error().setBody(e.getMessage());
}
}
/**
* <pre>
* add AuthAccount.
* USAGE:
* POST /api/AuthAccount/
* {AuthAccountVO}
* SUCCESS:
* {header:{status:success},
* body:{AuthAccountVO}}
* ERROR:
* {header: {status: error}, body:{Exception.getMessage()}}
* </pre>
*
* @param vo
* @return AuthAccountVO
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @since 2018-07-03
* auto generate by qdp v5.0.
*/
@ApiOperation(value = "add AuthAccount", notes = "add AuthAccount")
@PostMapping("/")
public ResultBean addAuthAccount(@RequestBody AuthAccountVO vo) {
try {
return ResultBean.success().setBody(authAccountApplication.addAuthAccount(vo));
} catch (Exception e) {
return ResultBean.error().setBody(e.getMessage());
}
}
/**
* <pre>
* update AuthAccount.
* USAGE:
* POST /api/AuthAccount/
* {AuthAccountVO}
* SUCCESS:
* {header:{status:success},
* body:{AuthAccountVO}}
* ERROR:
* {header: {status: error}, body:{Exception.getMessage()}}
* </pre>
*
* @param vo
* @return AuthAccountVO
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @since 2018-07-03
* auto generate by qdp v5.0.
*/
@ApiOperation(value = "update AuthAccount", notes = "update AuthAccount")
@PutMapping("/")
public ResultBean updateAuthAccount(@RequestBody AuthAccountVO vo) {
try {
return ResultBean.success().setBody(authAccountApplication.updateAuthAccount(vo));
} catch (Exception e) {
return ResultBean.error().setBody(e.getMessage());
}
}
/**
* <pre>
* remove AuthAccount multi ids join by comma ','.
* USAGE:
* DELETE /api/AuthAccount/{id}
* SUCCESS:
* {header:{status:success},
* body:{}}
* ERROR:
* {header: {status: error}, body:{Exception.getMessage()}}
* </pre>
*
* @param id
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @since 2018-07-03
* auto generate by qdp v5.0.
*/
@ApiOperation(value = "delete AuthAccount", notes = "delete AuthAccount")
@DeleteMapping("/{id}")
public ResultBean removeAuthAccountById(@PathVariable String id) {
try {
if (id.indexOf(',') > -1) {
authAccountApplication.removeAuthAccountByIds(StringUtils.split(id, ','));
} else {
authAccountApplication.removeAuthAccountById(id);
}
return ResultBean.success();
} catch (Exception e) {
return ResultBean.error().setBody(e.getMessage());
}
}
/**
* <pre>
* get AuthAccount by unique name
* USAGE:
* GET /api/AuthAccount/get/loginEmail/{loginEmail}
* SUCCESS:
* {header:{status:success},
* body:{AuthAccountVO}}
* ERROR:
* {header: {status: error}, body:{Exception.getMessage()}}
* </pre>
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @since 2018-07-03
* auto generate by qdp v5.0.
*/
@ApiOperation(value = "get AuthAccount by loginEmail", notes = "get AuthAccount by loginEmail")
@GetMapping("/get/loginEmail/{loginEmail}")
public ResultBean getByLoginEmail(@PathVariable String loginEmail) {
try {
return ResultBean.success().setBody(authAccountApplication.getByLoginEmail(loginEmail));
} catch (Exception e) {
return ResultBean.error().setBody(e.getMessage());
}
}
/**
* <pre>
* page find assign AuthRoleVO.
* USAGE:
* GET /api/AuthAccount/pageAssignAuthRole/{loginEmail}/{status}/{currentPage}/{pageSize}/{asc}/{desc}
* SUCCESS:
* {header:{status:success},
* body:{pageSize,totalCount,currentPage,offset,offsetPage,orderBy:[],rows:[{AuthRoleVO}]}}
* ERROR:
* {header: {status: error}, body:{Exception.getMessage()}}
* </pre>
*
* @param loginEmail
* @param status
* @param currentPage
* @param pageSize
* @param asc
* @param desc
* @return ResultBean
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @since 2018-07-03
* auto generate by qdp v5.0
*/
@ApiOperation(value = "page find assign AuthRole", notes = "page find assign AuthRole")
@GetMapping({"/pageAssignAuthRole/{loginEmail}/{status}/{currentPage}/{pageSize}/{asc}/{desc}",
"/pageAssignAuthRole/{loginEmail}/{status}/{currentPage}/{pageSize}",
"/pageAssignAuthRole/{loginEmail}/{status}/{currentPage}",
"/pageAssignAuthRole/{loginEmail}/{status}",
"/pageAssignAuthRole/{loginEmail}",
"/pageAssignAuthRole"})
public ResultBean pageFindAssignAuthRole(
@PathVariable(required = false, value = "loginEmail") String loginEmail,
@PathVariable(required = false, value = "status") String status,
@PathVariable(required = false, value = "currentPage") Integer currentPage,
@PathVariable(required = false, value = "pageSize") Integer pageSize,
@PathVariable(required = false, value = "asc") String asc,
@PathVariable(required = false, value = "desc") String desc) {
try {
Map<String, String> map = new HashMap<String, String>();
{
map.put("loginEmail", PreCheckHelper.equalsToNull(loginEmail, "-"));
map.put("status", PreCheckHelper.equalsToNull(status, "-"));
}
Page page = new Page();
{
page.setCurrentPage(NumberHelper.getInt(currentPage, 1));
page.setPageSize(NumberHelper.getInt(pageSize, 10));
asc = PreCheckHelper.equalsToNull(asc, "-");
desc = PreCheckHelper.equalsToNull(desc, "-");
if (StringUtils.isNotBlank(asc)) {
page.addAscOrderBy(asc);
}
if (StringUtils.isNotBlank(desc)) {
page.addDescOrderBy(desc);
}
}
AuthAccountVO vo = BeanHelper.copyProperties(AuthAccountVO.class, map);
return ResultBean.success().setBody(authAccountApplication.pageFindAssignAuthRole(vo, page));
} catch (Exception e) {
return ResultBean.error().setBody(e.getMessage());
}
}
/**
* <pre>
* assign AuthRole by id(s).
* USAGE:
* POST /api/AuthAccount/assignAuthRole
* {AuthAccountVO}
* SUCCESS:
* {header:{status:success},
* body:{}
* ERROR:
* {header: {status: error}, body:{Exception.getMessage()}}
* </pre>
*
* @param vo
* @return ResultBean
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @since 2018-07-03
* auto generate by qdp v5.0.
*/
@ApiOperation(value = "assign AuthRole by id(s)", notes = "assign AuthRole by id(s)")
@PostMapping("/assignAuthRole")
public ResultBean assignAuthRole(AuthAccountVO vo) {
try {
authAccountApplication.assignAuthRole(vo);
return ResultBean.success();
} catch (Exception e) {
return ResultBean.error().setBody(e.getMessage());
}
}
/**
* <pre>
* editPassword.
* USAGE:
* POST /api/AuthAccount/editPassword
* {EditPasswordVO}
* SUCCESS:
* {header:{status:success},
* body:{}
* ERROR:
* {header: {status: error}, body:{Exception.getMessage()}}
* </pre>
*
* @param vo
* @return ResultBean
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @since 2018-07-03
* auto generate by qdp v5.0.
*/
@ApiOperation(value = "operation editPassword", notes = "operation editPassword")
@PostMapping("/editPassword")
public ResultBean editPassword(EditPasswordVO vo) {
try {
authAccountApplication.editPassword(vo);
return ResultBean.success();
} catch (Exception e) {
return ResultBean.error().setBody(e.getMessage());
}
}
}
|
#!/bin/bash
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# "These are common commands used in various situations:"
# " source-bringup.sh -s aosp -b c6 -t android-6.0.1_61 -m"
# " source-bringup.sh -s caf -b c6 -t LA.BF64.1.2.2_rb4.44 -m"
# " source-bringup.sh -b c6 -t android-6.0.1_r61 -m -s"
# " source-bringup.sh -u <Gerrit Username> -g <Gerrit URL> -P 29418 -b c6 -p"
# " source-bringup.sh -u <Gerrit Username> -b c6 -p -g -r"
# Hardcode the name of the rom here
# This is only used when pushing merges to Github
# See function push
custom_rom="CandyRoms"
# This is the array of upstream repos we track
upstream=()
# This is the array of repos to blacklist and not merge
# Add or remove repos as you see fit
blacklist=('manifest' 'prebuilt' 'packages/apps/DeskClock')
# Colors
COLOR_RED='\033[0;31m'
COLOR_BLANK='\033[0m'
COLOR_GREEN='\033[0;32m'
function is_in_blacklist() {
for j in ${blacklist[@]}
do
if [ "$j" == "$1" ]; then
return 0;
fi
done
return 1;
}
function get_repos() {
if [ -f aosp-list ]; then
rm -f aosp-list
fi
touch aosp-list
declare -a repos=( $(repo list | cut -d: -f1) )
curl --output /tmp/rebase.tmp $REPO --silent # Download the html source of the Android source page
# Since their projects are listed, we can grep for them
for i in ${repos[@]}
do
if grep -qw "$i" /tmp/rebase.tmp; then # If Google/CAF has it and
if grep -qw "$i" ./.repo/manifests/candy.xml; then # If we have it in our manifest and
if grep -w "$i" ./.repo/manifests/candy.xml | grep -qe "remote=\"$REMOTE\""; then # If we track our own copy of it
if ! is_in_blacklist $i; then # If it's not in our blacklist
upstream+=("$i") # Then we need to update it
echo $i >> aosp-list
else
echo "================================================"
echo " "
echo "$i is in blacklist"
fi
fi
fi
fi
done
echo " "
echo "I have found a total of ${#upstream[@]} repositories being tracked"
echo "that will be checked for $TAG and merged if applicable."
echo " "
rm /tmp/rebase.tmp
}
function delete_upstream() {
for i in ${upstream[@]}
do
rm -rf $i
done
}
function force_sync() {
echo "================================================"
echo " "
echo " Force Syncing all your repos "
echo " and deleting all upstream repos "
echo " This is done so we make sure you're up to date "
echo " "
echo "================================================"
echo " "
echo "Repo Syncing........."
sleep 10
repo sync -c --force-sync >/dev/null 2>&1; # Silence!
if [ $? -eq 0 ]; then
echo "Repo Sync success"
else
echo "Repo Sync failure"
exit 1
fi
}
function print_result() {
if [ ${#failed[@]} -eq 0 ]; then
echo " "
echo "========== "$TAG" is merged sucessfully =========="
echo "========= Compile and test before pushing to github ========="
echo " "
else
echo -e $COLOR_RED
echo -e "These repos have merge errors: \n"
for i in ${failed[@]}
do
echo -e "$i"
done
echo -e $COLOR_BLANK
fi
}
function merge() {
while read path; do
project=`echo ${path} | sed -e 's/\//\_/g'`
echo " "
echo "====================================================================="
echo " "
echo " PROJECT: ${project} -> [ ${path}/ ]"
echo " "
cd $path;
git merge --abort >/dev/null 2>&1; # Silence!
repo sync -d .
if [ "$aosp" = "1" ]; then
if git branch | grep "android-aosp-merge" > /dev/null; then
echo -e $COLOR_GREEN
echo "Deleting branch android-aosp-merge"
git branch -D android-aosp-merge > /dev/null
echo "Recreating branch android-aosp-merge"
repo start android-aosp-merge .
echo -e $COLOR_BLANK
else
echo -e $COLOR_GREEN
echo "Creating branch android-aosp-merge"
repo start android-aosp-merge .
echo -e $COLOR_BLANK
fi
fi
if [ "$aosp" = "1" ]; then
if ! git remote | grep "aosp" > /dev/null; then
git remote add aosp https://android.googlesource.com/platform/$path > /dev/null
git fetch --tags aosp
else
git fetch --tags aosp
fi
fi
if [ "$caf" = "1" ]; then
if git branch | grep "android-caf-merge" > /dev/null; then
echo -e $COLOR_GREEN
echo "Deleting branch android-caf-merge"
git branch -D android-caf-merge > /dev/null
echo "Recreating branch android-caf-merge"
repo start android-caf-merge .
echo $COLOR_BLANK
else
echo -e $COLOR_GREEN
echo "Creating branch android-caf-merge"
repo start android-caf-merge .
echo -e $COLOR_BLANK
fi
fi
if [ "$caf" = "1" ]; then
if ! git remote | grep "caf" > /dev/null; then
git remote add caf https://source.codeaurora.org/quic/la/platform/$path > /dev/null
git fetch --tags caf
else
git fetch --tags caf
fi
fi
if [ "$aosp" = "1" ]; then
git merge $TAG;
else
git merge caf/$TAG;
fi
if [ $? -ne 0 ]; then # If merge failed
failed+=($path/) # Add to the list of failed repos
fi
cd - > /dev/null
done < aosp-list
}
function push () {
while read path;
do
project=`echo ${path} | sed -e 's/\//\_/g'`
echo ""
echo "====================================================================="
echo " PROJECT: ${project} -> [ ${path}/ ]"
echo ""
cd $path;
echo " Pushing..."
git push --no-thin ssh://${USERNAME}@${GERRIT}:${PORT}/${custom_rom}/${project} HEAD:refs/heads/${BRANCH} >/dev/null 2>&1; # Silence!
if [ $? -ne 0 ]; then # If merge failed
echo " "
echo "Failed to push ${project} to HEAD:refs/heads/${BRANCH}"
else
echo " Success!"
fi
cd - > /dev/null
done < aosp-list
}
# Let's parse the users commands so that their order is not required.
# Credits to Noah Hoffman for making it possible to use Python's argparse module in shell scripts.
# See, https://github.com/nhoffman/argparse-bash, for more details.
# Python 2.6+ or 3.2+ is required for this to work.
# TODO: Rewrite this entire script in Python.
source $(dirname $0)/scripts/argparse.bash || exit 1
argparse "$@" <<EOF || exit 1
parser.add_argument('-s', dest='source', help='Target AOSP or CAF [AOSP is default]', nargs='?', const="aosp",
default="aosp")
parser.add_argument('-t', dest='tag', help='The tag from AOSP or CAF that we are merging')
parser.add_argument('-b', dest='branch', help='Your default branch name', required=True)
parser.add_argument('-r', dest='remote', help='Your default remote name', required=True)
parser.add_argument('-u', dest='username', help='Your username on Gerrit')
parser.add_argument('-g', dest='gerrit', help='URL Gerrit '
'[gerrit.bbqdroid.org is default]', nargs='?', const="gerrit.bbqdroid.org",
default="gerrit.bbqdroid.org")
parser.add_argument('-P', dest='port', help='Which port SSH listens on for Gerrit '
'[29418 is default]', nargs='?', const="29418", default="29418")
parser.add_argument('-m', dest='merge', help='Merge the specified tag '
'[No arg required]', nargs='?', const="merge")
parser.add_argument('-p', dest='push', help='Push merge to Github through Gerrit '
'[No arg required]', nargs='?', const="push")
EOF
if [ -z $REMOTE ] && [ -n $MERGE ]; then
echo ""
echo "source-bringup.sh: error: argument -r is required"
echo ""
exit 0
fi
if [ -z $USERNAME ] && [ -n $PUSH ]; then
echo ""
echo "source-bringup.sh: error: argument -u is required"
echo ""
exit 0
fi
if [ -z $TAG ] && [ -n $MERGE ]; then
echo ""
echo "source-bringup.sh: error: argument -t is required"
echo ""
exit 0
fi
case "${SOURCE}" in
# Google source
[aA][oO][sS][pP]) REPO=https://android.googlesource.com/platform/; aosp=1; caf=0; get_repos ;;
# Code Aurora source
[cC][aA][fF]) REPO=https://source.codeaurora.org/quic/la/platform/; aosp=0; caf=1; get_repos ;;
# Wrong entry, try again
*) echo " "; echo "Did you mean AOSP or CAF? I am confused!"; sleep 1 ;;
esac
if [[ $MERGE =~ ^([mM][eE][rR][gG][eE])$ ]]; then
delete_upstream # Clean up sources
force_sync # Force sync sources
merge # Bringup sources to latest code
print_result # Print any repos that failed, so we can fix merge issues
fi
if [[ $PUSH =~ ^([pP][uU][sS][hH])$ ]]; then
push # Push latest changes through gerrit straight to github
fi
|
import java.util.*;
import java.lang.*;
public class escape{
public static void main(String args[]){
Scanner sc = new Scanner(System.in);
double vp=sc.nextDouble();
double vd=sc.nextDouble();
double t=sc.nextDouble();
double f=sc.nextDouble();
double c=sc.nextDouble();
double p=vp*t;
double d=0;
double b=0;
while(p<c){
// System.out.print((vd*(p/(vd-vp)))+"* ");
if((vd*(p/(vd-vp)))<c && (vd*(p/(vd-vp)))>0){
b++;
p=((vd*(p/(vd-vp)))+(((p/(vd-vp))+f)*vp));
// System.out.print(p+"# ");
}
else
break;
}
System.out.print((int)b);
}
} |
<reponame>VladimirHumeniuk/keystone
import { css } from 'glamor';
import React, { PropTypes } from 'react';
import octicons from './octicons';
import colors from './colors';
import sizes from './sizes';
import classes from './styles';
// FIXME static octicon classes leaning on Elemental to avoid duplicate
// font and CSS; inflating the project size
function Glyph ({
cssStyles,
className,
color,
component: Component,
name,
size,
style,
...props
}) {
const colorIsValidType = Object.keys(colors).includes(color);
props.className = css(
classes.glyph,
colorIsValidType && classes['color__' + color],
classes['size__' + size],
cssStyles
) + ` ${octicons[name]}`;
if (className) {
props.className += (' ' + className);
}
// support random color strings
props.style = {
color: !colorIsValidType ? color : null,
...style,
};
return <Component {...props} />;
};
Glyph.propTypes = {
cssStyles: PropTypes.shape({
_definition: PropTypes.object,
_name: PropTypes.string,
}),
color: PropTypes.oneOfType([
PropTypes.oneOf(Object.keys(colors)),
PropTypes.string, // support random color strings
]),
name: PropTypes.oneOf(Object.keys(octicons)).isRequired,
size: PropTypes.oneOf(Object.keys(sizes)),
};
Glyph.defaultProps = {
component: 'i',
color: 'inherit',
size: 'small',
};
module.exports = Glyph;
|
#!/bin/sh
set -e
set -u
set -o pipefail
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/LTJSBridge/LTJSBridge.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/LTJSBridge/LTJSBridge.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
<filename>db/migrate/20170930163035_add_deposit_info_to_proposals.rb
class AddDepositInfoToProposals < ActiveRecord::Migration[5.0]
def change
add_column :proposals, :deposit_id, :string
add_column :proposals, :deposit_paid, :boolean
add_column :proposals, :deposit_paid_on, :date
end
end
|
import React from 'react';
const Arm = () => {
return (
<svg className="arm" x="0px" y="0px" width="140px" height="200px" viewBox="0 0 40 100">
<line fill="none" stroke="#000000" strokeMiterlimit="10" x1="11.415" y1="15.61" x2="8.69" y2="66.685"/>
<line fill="none" stroke="#000000" strokeMiterlimit="10" x1="11.415" y1="15.61" x2="34.661" y2="69.707"/>
<line fill="none" stroke="#000000" strokeMiterlimit="10" x1="8.69" y1="66.685" x2="23.038" y2="88.144"/>
<line fill="none" stroke="#000000" strokeMiterlimit="10" x1="23.038" y1="88.144" x2="34.844" y2="85.726"/>
<line fill="none" stroke="#000000" strokeMiterlimit="10" x1="34.661" y1="69.707" x2="34.661" y2="85.726"/>
</svg>
);
};
export default Arm;
|
<reponame>holwech/robogamer
export enum Mode {
Sim,
Real,
}
export default class Options{
public static readonly default = new Options("./localTeam.js", "./opponents/random.js", Mode.Sim);
public constructor(
public readonly red: string,
public readonly blue: string,
public readonly mode: Mode,
) {
}
public setRed(redTeam: string): Options {
return this.set({red: redTeam});
}
public setBlue(blueTeam: string): Options {
return this.set({blue: blueTeam});
}
public setReal(): Options {
return this.set({mode: Mode.Real});
}
public setSim(): Options {
return this.set({mode: Mode.Sim});
}
public set({red = this.red, blue = this.blue, mode = this.mode}): Options{
return new Options(red, blue, mode)
}
}
|
from collections import Counter
arr = [1, 1, 2, 3, 4, 1]
arr_dict = dict(Counter(arr)) |
package app.services;
public class SearchParams {
private String name;
private String genre;
private Integer fromYear;
private Integer toYear;
public Integer getFromYear() {
return fromYear;
}
public void setFromYear(Integer fromYear) {
this.fromYear = fromYear;
}
public String getGenre() {
return genre;
}
public void setGenre(String genre) {
this.genre = genre;
}
public Integer getToYear() {
return toYear;
}
public void setToYear(Integer toYear) {
this.toYear = toYear;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
SearchParams params = (SearchParams) o;
if (fromYear != null ? !fromYear.equals(params.fromYear) : params.fromYear != null) return false;
if (genre != null ? !genre.equals(params.genre) : params.genre != null) return false;
if (name != null ? !name.equals(params.name) : params.name != null) return false;
if (toYear != null ? !toYear.equals(params.toYear) : params.toYear != null) return false;
return true;
}
@Override
public int hashCode() {
int result = name != null ? name.hashCode() : 0;
result = 31 * result + (genre != null ? genre.hashCode() : 0);
result = 31 * result + (fromYear != null ? fromYear.hashCode() : 0);
result = 31 * result + (toYear != null ? toYear.hashCode() : 0);
return result;
}
}
|
export type Data = {
beds: number
city: string
country: string
maxGuests: number
photo: string
rating: number
superHost: boolean
title: string
type: string
}
|
#!/usr/bin/env bash
# Copyright (c) 2020-2021 Eli Aloni a.k.a elix22.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
while getopts b:d:g:k:i:o:n: option
do
case "${option}"
in
b) BUILD=${OPTARG};;
d) DEPLOY=${OPTARG};;
g) GENERATE_KEY=${OPTARG};;
k) KEY_STORE=${OPTARG};;
i) APK_INPUT_PATH=${OPTARG};;
o) APK_OUTPUT_PATH=${OPTARG};;
n) APK_NAME=${OPTARG};;
esac
done
KEY_STORE=$(echo "$KEY_STORE" | tr -d ' ')
if [[ "${KEY_STORE}" == "." ]]; then
KEY_STORE=""
fi
CWD=$(pwd)
KEY_STORE_PATH_INFO=${CWD}/keystore_path.txt
path_to_executable=$(which zipalign)
if [ -x "$path_to_executable" ]; then
echo "found zipalign: $path_to_executable"
else
echo "No zipalign in path. usually this can be found in ANDROID-SDK/build-tools/[....]/zipalign"
exit 1
fi
path_to_executable=$(which apksigner)
if [ -x "$path_to_executable" ]; then
echo "found apksigner: $path_to_executable"
APK_SIGNER=apksigner
else
echo "No apksigner in path. usually this can be found in ANDROID-SDK/build-tools/[....]/apksigner"
path_to_executable=$(which jarsigner)
if [ -x "$path_to_executable" ]; then
echo "found jarsigner: $path_to_executable"
JAR_SIGNER=jarsigner
else
echo "No jarsigner in path. usually this can be found in JDK/bin/jarsigner"
exit 1
fi
fi
generate_keystore()
{
path_to_executable=$(which keytool)
if [ -x "$path_to_executable" ]; then
echo "found keytool: $path_to_executable"
else
echo "No keytool in path. usually this can be found in /usr/bin/keytool"
exit 1
fi
echo "Enter path for output generated keystore."
read -p "keystore path: " key_store_path
if [[ "$key_store_path" == "" ]]; then
echo
echo "keystore path not specified , will generate in the current directory"
key_store_path=$(pwd)
fi
mkdir -p ${key_store_path}
cd ${key_store_path}
rm -f android-release-key.jks
keytool -genkey -v -storepass Android -keypass Android -keystore android-release-key.jks -keyalg RSA -keysize 2048 -validity 10000 -alias my-alias
KEY_STORE=$(pwd)/android-release-key.jks
cd ${CWD}
}
check_keystore_path_info()
{
if [ -f "${KEY_STORE_PATH_INFO}" ]; then
TMP_KEY_STORE=$(cat "${KEY_STORE_PATH_INFO}")
if [ -f "${TMP_KEY_STORE}" ]; then
if [ ! -d "$TMP_KEY_STORE" ]; then
echo "keystore taken from ${KEY_STORE_PATH_INFO} , keystore file is ${TMP_KEY_STORE}"
KEY_STORE="${TMP_KEY_STORE}"
fi
fi
fi
}
set_relative_to_full_path()
{
echo $(echo "$(cd "$(dirname "$1")"; pwd)/$(basename "$1")")
}
if [[ "$GENERATE_KEY" == "1" ]]; then
generate_keystore
fi
if [[ "$APK_NAME" == "" ]]; then
echo "apk name was not provided , using default app-release-unsigned.apk."
APK_NAME="app-release-unsigned.apk"
fi
if [[ "$APK_INPUT_PATH" == "" ]]; then
echo "apk source path was not specified , using current directory"
APK_INPUT_PATH=${CWD}
fi
if [[ "$APK_OUTPUT_PATH" == "" ]]; then
echo "apk destination path was not specified , using current directory"
APK_OUTPUT_PATH=${CWD}
fi
if [[ "$KEY_STORE" == "" || "$KEY_STORE" == " " ]]; then
check_keystore_path_info
if [ ! -f "${KEY_STORE}" ]; then
KEY_STORE=${CWD}/android-release-key.jks
echo "searching keystore in ${KEY_STORE}"
if [ -f "${KEY_STORE}" ]; then
echo "keystore path was not specified , using ${KEY_STORE}"
else
echo "keystore was not found generate"
generate_keystore
fi
fi
fi
if [ ! -f "${KEY_STORE}" ]; then
check_keystore_path_info
if [ ! -f ${KEY_STORE} ]; then
echo "keystore was not found generate"
generate_keystore
fi
fi
echo "KEY_STORE=${KEY_STORE}"
# always create a new KEY_STORE_PATH_INFO , overwrite older path
if [ -f ${KEY_STORE} ]; then
KEY_STORE=$(set_relative_to_full_path ${KEY_STORE})
rm -f ${KEY_STORE_PATH_INFO}
touch ${KEY_STORE_PATH_INFO}
echo ${KEY_STORE} > ${KEY_STORE_PATH_INFO}
fi
filename=$(basename -- "${APK_INPUT_PATH}/${APK_NAME}" .apk)
filename_with_ext=$(basename -- "${APK_INPUT_PATH}/${APK_NAME}")
zipalign -f -p 4 ${APK_INPUT_PATH}/${APK_NAME} ${APK_OUTPUT_PATH}/${filename}-aligned.apk
out_file_name=$(echo ${filename} | sed 's/unsigned//g' | sed 's/--/-/g' | sed 's/\-\././g')
out_file_name=${out_file_name}-signed.apk
out_file_name=$(echo ${out_file_name} | sed 's/--/-/g' | sed 's/\-\././g')
rm -f ${APK_OUTPUT_PATH}/${out_file_name}
if [ -n "$APK_SIGNER" ]; then
${APK_SIGNER} sign --ks ${KEY_STORE} --ks-pass pass:Android --out ${APK_OUTPUT_PATH}/${out_file_name} ${APK_OUTPUT_PATH}/${filename}-aligned.apk
elif [ -n "$JAR_SIGNER" ]; then
${JAR_SIGNER} -keystore ${KEY_STORE} -storepass Android ${APK_OUTPUT_PATH}/${filename}-aligned.apk -signedjar ${APK_OUTPUT_PATH}/${out_file_name} my-alias
fi
rm ${APK_OUTPUT_PATH}/${filename}-aligned.apk
|
from .code import Code
from .commands import Commands
from .environment import Environment
from .labels import Labels
from .registers import Registers
from .types import Types
from .vars import Vars
from .config import ASM_COMMANDS_SEPARATOR
class Compiler:
entry_point_label = '_main'
exit_interrupt = 0x80
def __init__(self):
self.environment = Environment()
self.code = Code(self)
self.labels = Labels()
self.vars = Vars(self, self.code, self.environment)
self.types = Types(self)
self.target_register = None
self.runtime = []
self.externs = []
def exit(self):
self.code.add(Commands.PUSH, [0])
self.code.add(Commands.MOV, [Registers.EAX, 1])
self.code.add(Commands.SUB, [Registers.ESP, 1])
self.code.add(Commands.INT, [self.exit_interrupt])
def add_extern(self, code, names):
if isinstance(names, list):
self.externs += names
else:
self.externs.append(names)
self.runtime.append(code)
def get_externs(self, externs):
return ''.join(map(lambda f: 'EXTERN %s%s' % (f, ASM_COMMANDS_SEPARATOR), externs))
def get_section(self, section_name, content):
return '%sSECTION .%s %s%s' % (ASM_COMMANDS_SEPARATOR, section_name, ASM_COMMANDS_SEPARATOR,
ASM_COMMANDS_SEPARATOR.join(content))
def get_content(self, content):
return ASM_COMMANDS_SEPARATOR + ASM_COMMANDS_SEPARATOR.join(content) + ASM_COMMANDS_SEPARATOR
def get_result(self):
self.exit()
externs = self.get_externs(self.externs)
global_decl = '%sglobal %s' % (ASM_COMMANDS_SEPARATOR, self.entry_point_label)
labels = self.get_content(self.labels)
self.code.allocate_stack_memory(self.environment.list['root']['memory'], place=0)
self.code.add_label(self.entry_point_label, place=0)
code = self.get_content(self.code.assemble())
bss = "" if len(self.vars.bss) == 0 else self.get_section("bss", self.vars.bss)
if len(self.vars.bss) != 0:
global_decl = "%sSECTION .text%s" % (ASM_COMMANDS_SEPARATOR, ASM_COMMANDS_SEPARATOR) + global_decl
return externs + bss + global_decl + labels + code
def get_runtime(self):
return ASM_COMMANDS_SEPARATOR.join(self.runtime)
|
<reponame>ErVincit/meetbox-server
const { Pool } = require("pg");
const pool = new Pool({ connectionString: process.env.DATABASE_URL, ssl: true });
pool.on("error", (error) => console.error("Error:", error));
// NOTE: pg-cursor per leggere grandi quantità di informazioni
module.exports = pool;
|
#!/usr/bin/env bash
#################################################################################
# Parameters
#################################################################################
# The rootfs image file
ROOTFS_IMAGE=$PWD/images/web-server-rootfs.ext4
# Temporary mount folder
MOUNT_FOLDER=/tmp/microvm-web-server
# Rust Executable
EXECUTABLE_NAME=web-server
EXECUTABLE_SOURCE=./web-server/target/x86_64-unknown-linux-musl/release/$EXECUTABLE_NAME
#################################################################################
# Rebuild the executable and copy it to the rootfs image
#################################################################################
# Switch to the Rust program folder
cd ./web-server
# Rebuild the Rust executable using the x86_64-unknown-linux-musl target in order to run inside the Micro VM
cargo build --target x86_64-unknown-linux-musl --release
# Switch back to script folder
cd ..
# Try to unmount folder
echo 'Umount image in case is still mounted'
sudo umount $MOUNT_FOLDER
# Remove mount folder
echo 'Removing old mount folder'
sudo rm -Rf $MOUNT_FOLDER
# Create mount folder
echo 'Creating the mount folder'
sudo mkdir $MOUNT_FOLDER
# Mount image on mount folder
echo 'Mounting the rootfs image'
sudo mount $ROOTFS_IMAGE $MOUNT_FOLDER
# Remove old executable
echo 'Removing old executable'
sudo rm -f $MOUNT_FOLDER/bin/$EXECUTABLE_NAME
# Copy executable into rootfs bin folder
echo 'Copying new executable to rootfs image'
sudo cp $EXECUTABLE_SOURCE $MOUNT_FOLDER/bin
# Make it executable
echo 'Making the new file executable'
sudo chmod +x $MOUNT_FOLDER/bin
# Unmount the rootfs image
echo 'Unmount the rootfs image'
sudo umount $MOUNT_FOLDER
echo 'Rootfs image updated with new executable'
|
package webapp.controller;
import org.noear.solon.annotation.Controller;
import org.noear.solon.annotation.Inject;
import org.noear.solon.annotation.Mapping;
import org.noear.solon.data.annotation.Tran;
import webapp.dso.service.App2Service;
import webapp.dso.service.AppService;
/**
* 多数据源事务演示
* */
@Mapping("/tran2/")
@Controller
public class Tran2Controller {
@Inject
AppService appService;
@Inject
App2Service app2Service;
/**
* 申明这是一个多数据源的事务(这个可以放在任何事务的最外层;用于管理下面的子事务)
* */
@Tran
@Mapping("test")
public void test() throws Throwable {
//内部申明了用db2的事务
app2Service.add();
//内部申明了用db1的事务
appService.add();
}
}
|
/*
* Copyright (c) 2020. The Kathra Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Contributors:
* IRT SystemX (https://www.kathra.org/)
*
*/
package org.kathra;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class MockitoWhenChainingMethod<T> implements Answer<T> {
public T processReturnObject(T object) {
return object;
}
@Override
public T answer(InvocationOnMock invocation) throws Throwable {
Object[] args = invocation.getArguments();
Object object_to_return = args[0];
Logger logger = LoggerFactory.getLogger("MockitoWhenChainingMethod_" + object_to_return.getClass().getName());
logger.debug("Returning answer");
logger.debug(object_to_return == null ? "NULL" : object_to_return.toString());
return processReturnObject((T) object_to_return);
}
}
|
<filename>Back-End/SiteService/SITE/Model.go
package main
import (
"net/http"
)
//Routing model
type Route struct {
Name string
Method string
Pattern string
HandlerFunc http.HandlerFunc
}
type Routes []Route
//Response messaging
type Response struct {
State State `json:"state"`
Code int `json:"code"`
}
type State struct {
Message string `json:"message"`
}
//Request by key
type Request struct {
Key string `json:"key"`
}
type RequestingProject struct{
Key string `json:"key"`
ProjectInstance Project `json:"project_instance"`
}
//Project model
type Project struct {
Layers []Layer `json:"layers"`
//Information about project
Name string `json:"name"`
Zoom int `json:"zoom"`
Reference Point `json:"reference"`
}
type Layer struct {
Stages []VectorSequence `json:"stages"`
Level int `json:"level"`
}
//Functional Areas
//Nature types: 1. Dynamic Area, 2. Temporal Area, 3. Locked Area, 4.Machinery path 5. workers path
type VectorSequence struct {
Id string `json:"id"`
VectorsSequence []Point `json:"vectors_sequence"`
Variables []Variable `json:"variables"`
}
//Nature: data type
type Variable struct {
Name string `json:"name"`
Content string `json:"content"`
VarType bool `json:"var_type"`
}
type Point struct {
X float64 `json:"x"`
Y float64 `json:"y"`
} |
#!/bin/bash
algo="RS HC SA"
CC="0500000"
dir_pre="test_2_2/MCTS_"
dir_post="_2_"
sub="elab"
# Create `elab` dirs and copy txt files into them
# for C in $CC
# do
# for a in $algo
# do
# dir=${dir_pre}${a}${dir_post}${C}
# mkdir ${dir}/${sub}
# cp ${dir}/*txt ${dir}/${sub}
# done
# done
# Collect robustness
out_file="BestRob.dat"
for C in $CC
do
for a in $algo
do
dir=${dir_pre}${a}${dir_post}${C}/${sub}
cat ${dir}/*txt | grep 'Best Rob' | cut -d: -f2 > ${dir}/${out_file}
done
done
# Collect time
out_file1="Time.dat"
out_file2="TraceTime.dat"
for C in $CC
do
for a in $algo
do
dir=${dir_pre}${a}${dir_post}${C}/${sub}
cat ${dir}/*txt | grep 'Simulation time' | cut -d: -f2 | cut -d ' ' -f 1 > ${dir}/${out_file1}
cat ${dir}/*txt | grep 'time.*trace' | cut -d: -f2 | cut -d ' ' -f 1 > ${dir}/${out_file2}
done
done
|
<reponame>smagill/opensphere-desktop
package io.opensphere.mantle.crust;
import java.awt.Color;
import java.util.function.Consumer;
import io.opensphere.core.Toolbox;
import io.opensphere.core.event.DynamicService;
import io.opensphere.core.geometry.renderproperties.DefaultTileRenderProperties;
import io.opensphere.core.geometry.renderproperties.TileRenderProperties;
import io.opensphere.core.order.OrderCategory;
import io.opensphere.core.order.impl.DefaultOrderCategory;
import io.opensphere.core.order.impl.DefaultOrderParticipantKey;
import io.opensphere.core.util.ColorUtilities;
import io.opensphere.core.util.Service;
import io.opensphere.core.util.lang.PhasedTaskCanceller;
import io.opensphere.mantle.GroupService;
import io.opensphere.mantle.MantleToolbox;
import io.opensphere.mantle.TypeService;
import io.opensphere.mantle.data.AbstractActivationListener;
import io.opensphere.mantle.data.ActivationListener;
import io.opensphere.mantle.data.ActivationState;
import io.opensphere.mantle.data.DataGroupActivationProperty;
import io.opensphere.mantle.data.DataGroupInfo;
import io.opensphere.mantle.data.DataTypeInfo;
import io.opensphere.mantle.data.LoadsTo;
import io.opensphere.mantle.data.MapVisualizationType;
import io.opensphere.mantle.data.MetaDataInfo;
import io.opensphere.mantle.data.impl.DefaultBasicVisualizationInfo;
import io.opensphere.mantle.data.impl.DefaultDataGroupInfo;
import io.opensphere.mantle.data.impl.DefaultDataTypeInfo;
import io.opensphere.mantle.data.impl.DefaultMapFeatureVisualizationInfo;
import io.opensphere.mantle.data.impl.DefaultMapTileVisualizationInfo;
/**
* EXPERIMENTAL. Simplifies some common Mantle use cases.
*/
public abstract class AbstractMantleController extends DynamicService<String, Service>
{
/** The toolbox. */
private final Toolbox myToolbox;
/** The mantle toolbox. */
private final MantleToolbox myMantleToolbox;
/** The provider type. */
private final String myProviderType;
/** The root group. */
private volatile DefaultDataGroupInfo myRootGroup;
/** The group activation listener. */
private final ActivationListener myActivationListener = new AbstractActivationListener()
{
@Override
public void commit(DataGroupActivationProperty property, ActivationState state, PhasedTaskCanceller canceller)
{
handleGroupActivation(property, state, canceller);
}
};
/**
* Constructor.
*
* @param toolbox The toolbox
* @param providerType The provider type
*/
public AbstractMantleController(Toolbox toolbox, String providerType)
{
super(toolbox.getEventManager());
myToolbox = toolbox;
myProviderType = providerType;
myMantleToolbox = toolbox.getPluginToolboxRegistry().getPluginToolbox(MantleToolbox.class);
}
@Override
public void open()
{
super.open();
myRootGroup = createRootGroup();
myMantleToolbox.getDataGroupController().addRootDataGroupInfo(myRootGroup, this);
}
@Override
public void close()
{
myMantleToolbox.getDataGroupController().removeDataGroupInfo(myRootGroup, this);
myRootGroup = null;
super.close();
}
/**
* Adds a folder/layer to the root group.
*
* @param folderName the folder name
* @param layerName the layer name
* @param layerId the unique layer ID
* @param orderCategory the order category
* @return the data group that was created, or the existing one
*/
public DataGroupInfo add1stLevelLayer(String folderName, String layerName, String layerId, OrderCategory orderCategory)
{
return add1stLevelLayer(folderName, layerName, layerId, orderCategory, null);
}
/**
* Adds a folder/layer to the root group.
*
* @param folderName the folder name
* @param layerName the layer name
* @param layerId the unique layer ID
* @param orderCategory the order category
* @param metadataInfo If a feature layer this is required, null if a tile
* layer.
* @return the data group that was created, or the existing one
*/
public DataGroupInfo add1stLevelLayer(String folderName, String layerName, String layerId, OrderCategory orderCategory,
MetaDataInfo metadataInfo)
{
return add1stLevelLayer(folderName, layerName, layerId, orderCategory, metadataInfo, null);
}
/**
* Adds a folder/layer to the root group.
*
* @param folderName the folder name
* @param layerName the layer name
* @param layerId the unique layer ID
* @param orderCategory the order category
* @param metadataInfo If a feature layer this is required, null if a tile
* layer.
* @param deleteListener An object wanting notification when the group is
* deleted. This can be null if the layer can not be deleted by
* the user.
* @return the data group that was created, or the existing one
*/
public DataGroupInfo add1stLevelLayer(String folderName, String layerName, String layerId, OrderCategory orderCategory,
MetaDataInfo metadataInfo, Consumer<DataGroupInfo> deleteListener)
{
return add1stLevelLayer(folderName, layerName, layerId, orderCategory, metadataInfo, deleteListener,
new DefaultDataGroupAndTypeFactory());
}
/**
* Adds a folder/layer to the root group.
*
* @param folderName the folder name
* @param layerName the layer name
* @param layerId the unique layer ID
* @param orderCategory the order category
* @param metadataInfo If a feature layer this is required, null if a tile
* layer.
* @param deleteListener An object wanting notification when the group is
* deleted. This can be null if the layer can not be deleted by
* the user.
* @param factory Instantiates new {@link DefaultDataGroupInfo} and
* {@link DefaultDataTypeInfo} or child classes of those.
* @return the data group that was created, or the existing one
*/
public DataGroupInfo add1stLevelLayer(String folderName, String layerName, String layerId, OrderCategory orderCategory,
MetaDataInfo metadataInfo, Consumer<DataGroupInfo> deleteListener, DataGroupAndTypeFactory factory)
{
DataGroupInfo group = myRootGroup.getGroupById(folderName);
GroupService groupService = null;
if (group == null)
{
groupService = createGroup(folderName, deleteListener, factory);
group = groupService.getGroup();
}
DataTypeInfo dataType = group.getMemberById(layerId, false);
if (dataType == null)
{
dataType = newDataType(layerName, layerId, orderCategory, metadataInfo, factory);
GroupService theGroupService = groupService != null ? groupService : (GroupService)getDynamicService(folderName);
TypeService typeService = new TypeService(myToolbox, group, dataType);
if (groupService == null)
{
typeService.open();
}
theGroupService.addService(typeService);
}
if (groupService != null)
{
addDynamicService(folderName, groupService);
}
return group;
}
/**
* Creates a group service.
*
* @param folderName the folder name
* @param deleteListener An object wanting notification when the group is
* deleted. This can be null if the layer can not be deleted by
* the user.
* @param factory Instantiates new {@link DefaultDataGroupInfo} and
* {@link DefaultDataTypeInfo} or child classes of those.
* @return the group service
*/
public GroupService createGroup(String folderName, Consumer<DataGroupInfo> deleteListener, DataGroupAndTypeFactory factory)
{
DataGroupInfo group = newDataGroup(folderName, deleteListener, factory);
group.activationProperty().addListener(myActivationListener);
GroupService groupService = new GroupService(myRootGroup, group);
return groupService;
}
/**
* Creates a data type service.
*
* @param layerName the layer name
* @param id the layer ID
* @param orderCategory the order category
* @param metaDataInfo If a feature layer this is required, null if a tile
* layer.
* @param factory Instantiates new {@link DefaultDataGroupInfo} and
* {@link DefaultDataTypeInfo} or child classes of those.
* @param groupService the group service
* @return the data type
*/
public TypeService createType(String layerName, String id, OrderCategory orderCategory, MetaDataInfo metaDataInfo,
DataGroupAndTypeFactory factory, GroupService groupService)
{
DataTypeInfo dataType = newDataType(layerName, id, orderCategory, metaDataInfo, factory);
TypeService typeService = new TypeService(myToolbox, groupService.getGroup(), dataType);
groupService.addService(typeService);
return typeService;
}
/**
* Removes a layer.
*
* @param id the unique layer ID (same as group ID)
* @return the removed data group, or null
*/
public DataGroupInfo removeLayer(String id)
{
DataGroupInfo removedGroup = null;
Service service = removeDynamicService(id);
if (service instanceof GroupService)
{
removedGroup = ((GroupService)service).getGroup();
}
return removedGroup;
}
/**
* Creates a root group.
*
* @return The root group.
*/
protected DefaultDataGroupInfo createRootGroup()
{
return new DefaultDataGroupInfo(true, myToolbox, myProviderType, myProviderType);
}
/**
* Handles group de/activation.
*
* @param activationProperty the activation property
* @param state the activation state
* @param canceller the canceller
*/
protected abstract void handleGroupActivation(DataGroupActivationProperty activationProperty, ActivationState state,
PhasedTaskCanceller canceller);
/**
* Creates a new data group.
*
* @param folderName the folder name
* @param deleteListener An object wanting notification when the group is
* deleted. This can be null if the layer can not be deleted by
* the user.
* @param factory Instantiates new {@link DefaultDataGroupInfo} and
* {@link DefaultDataTypeInfo} or child classes of those.
* @return the data group
*/
protected DataGroupInfo newDataGroup(String folderName, Consumer<DataGroupInfo> deleteListener,
DataGroupAndTypeFactory factory)
{
return factory.createGroup(myToolbox, myProviderType, folderName, deleteListener);
}
/**
* Creates a new data type.
*
* @param layerName the layer name
* @param id the layer ID
* @param orderCategory the order category
* @param metaDataInfo If a feature layer this is required, null if a tile
* layer.
* @param factory Instantiates new {@link DefaultDataGroupInfo} and
* {@link DefaultDataTypeInfo} or child classes of those.
* @return the data type
*/
protected DataTypeInfo newDataType(String layerName, String id, OrderCategory orderCategory, MetaDataInfo metaDataInfo,
DataGroupAndTypeFactory factory)
{
DefaultDataTypeInfo dataType = factory.createType(myToolbox, myProviderType, id, layerName, layerName);
if (orderCategory == DefaultOrderCategory.FEATURE_CATEGORY)
{
dataType.setBasicVisualizationInfo(new DefaultBasicVisualizationInfo(LoadsTo.TIMELINE,
DefaultBasicVisualizationInfo.LOADS_TO_STATIC_AND_TIMELINE, Color.ORANGE, true));
dataType.applyColorPreferences();
dataType.setMapVisualizationInfo(new DefaultMapFeatureVisualizationInfo(MapVisualizationType.POINT_ELEMENTS));
dataType.setOrderKey(new DefaultOrderParticipantKey(DefaultOrderCategory.DEFAULT_FEATURE_LAYER_FAMILY, orderCategory,
dataType.getTypeKey()));
dataType.setMetaDataInfo(metaDataInfo);
}
else if (orderCategory == DefaultOrderCategory.IMAGE_DATA_CATEGORY)
{
dataType.setBasicVisualizationInfo(new DefaultBasicVisualizationInfo(LoadsTo.TIMELINE,
DefaultBasicVisualizationInfo.LOADS_TO_BASE_AND_TIMELINE, Color.WHITE, false));
dataType.applyColorPreferences();
TileRenderProperties props = new DefaultTileRenderProperties(0, true, false);
float opacity = myMantleToolbox.getDataTypeInfoPreferenceAssistant().getOpacityPreference(dataType.getTypeKey(),
ColorUtilities.COLOR_COMPONENT_MAX_VALUE) / (float)ColorUtilities.COLOR_COMPONENT_MAX_VALUE;
props.setOpacity(opacity);
dataType.setMapVisualizationInfo(new DefaultMapTileVisualizationInfo(MapVisualizationType.IMAGE_TILE, props, false));
dataType.setOrderKey(new DefaultOrderParticipantKey(DefaultOrderCategory.DEFAULT_IMAGE_LAYER_FAMILY, orderCategory,
dataType.getTypeKey()));
}
if (orderCategory == DefaultOrderCategory.AREAS_CATEGORY)
{
dataType.setBasicVisualizationInfo(new DefaultBasicVisualizationInfo(LoadsTo.STATIC,
DefaultBasicVisualizationInfo.LOADS_TO_STATIC_ONLY, Color.DARK_GRAY, true));
dataType.applyColorPreferences();
dataType.setMapVisualizationInfo(new DefaultMapFeatureVisualizationInfo(MapVisualizationType.POLYGON_ELEMENTS));
dataType.setOrderKey(new DefaultOrderParticipantKey(DefaultOrderCategory.DEFAULT_AREAS_LAYER_FAMILY, orderCategory,
dataType.getTypeKey()));
dataType.setMetaDataInfo(metaDataInfo);
}
return dataType;
}
/**
* Gets the toolbox.
*
* @return the toolbox
*/
protected Toolbox getToolbox()
{
return myToolbox;
}
/**
* Gets the mantleToolbox.
*
* @return the mantleToolbox
*/
public MantleToolbox getMantleToolbox()
{
return myMantleToolbox;
}
/**
* Gets the providerType.
*
* @return the providerType
*/
protected String getProviderType()
{
return myProviderType;
}
/**
* Gets the rootGroup.
*
* @return the rootGroup
*/
public DefaultDataGroupInfo getRootGroup()
{
return myRootGroup;
}
}
|
<filename>bootstrapped/ideal/runtime/texts/text_visitor.java
// Autogenerated from runtime/texts/text_visitor.i
package ideal.runtime.texts;
import ideal.library.elements.*;
import ideal.library.texts.*;
import ideal.runtime.elements.*;
import ideal.library.channels.output;
public abstract class text_visitor<result_type> {
public result_type process(final text_fragment fragment) {
if (fragment instanceof string) {
return process_string(((string) fragment));
} else if (fragment instanceof text_element) {
return process_element(((text_element) fragment));
} else if (fragment instanceof list_text_node) {
return process_nodes(((list_text_node) fragment));
} else if (fragment instanceof special_text) {
return process_special(((special_text) fragment));
} else {
utilities.panic(ideal.machine.elements.runtime_util.concatenate(new base_string("Unknown fragment: "), fragment));
return null;
}
}
protected abstract result_type process_string(string s);
protected abstract result_type process_element(text_element element);
protected abstract result_type process_nodes(list_text_node nodes);
protected abstract result_type process_special(special_text t);
}
|
#
# Defines tmux aliases and provides for auto launching it at start-up.
#
# Authors:
# Sorin Ionescu <sorin.ionescu@gmail.com>
# Colin Hebert <hebert.colin@gmail.com>
# Georges Discry <georges@discry.be>
# Xavier Cambar <xcambar@gmail.com>
#
# Return if requirements are not found.
if (( ! $+commands[tmux] )); then
return 1
fi
#
# Auto Start
#
if ([[ "$TERM_PROGRAM" = 'iTerm.app' ]] && \
zstyle -t ':prezto:module:tmux:iterm' integrate \
); then
_tmux_iterm_integration='-CC'
fi
if [[ -z "$TMUX" && -z "$EMACS" && -z "$VIM" && -z "$INSIDE_EMACS" ]] && ( \
( [[ -n "$SSH_TTY" ]] && zstyle -t ':prezto:module:tmux:auto-start' remote ) ||
( [[ -z "$SSH_TTY" ]] && zstyle -t ':prezto:module:tmux:auto-start' local ) \
); then
tmux start-server
# Create a 'prezto' session if no session has been defined in tmux.conf.
if ! tmux has-session 2> /dev/null; then
zstyle -s ':prezto:module:tmux:session' name tmux_session || tmux_session='prezto'
tmux -2 \
new-session -d -s "$tmux_session" \; \
set-option -t "$tmux_session" destroy-unattached off &> /dev/null
fi
# Attach to the 'prezto' session or to the last session used. (detach first)
exec tmux -2 $_tmux_iterm_integration attach-session -d
fi
#
# Aliases
#
alias tmuxa="tmux $_tmux_iterm_integration new-session -A"
alias tmuxl='tmux list-sessions'
|
<filename>commands/log_writer.go
package commands
import (
"io"
"strings"
)
type LogWriter struct {
writer io.Writer
offset int64
}
func NewLogWriter(writer io.Writer) *LogWriter {
return &LogWriter{
writer: writer,
}
}
func (lw *LogWriter) Flush(logs string) error {
reader := strings.NewReader(logs)
_, err := reader.Seek(lw.offset, 0)
if err != nil {
return err
}
written, err := io.Copy(lw.writer, reader)
if err != nil {
return err
}
lw.offset += written
return nil
}
|
#!/usr/bin/env bash
set -e
# Get all server logs
export CLICKHOUSE_CLIENT_SERVER_LOGS_LEVEL="trace"
CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
. $CURDIR/../shell_config.sh
cur_name=$(basename "${BASH_SOURCE[0]}")
server_logs_file=${CLICKHOUSE_TMP}/$cur_name"_server.logs"
server_logs="--server_logs_file=$server_logs_file"
rm -f "$server_logs_file"
settings="$server_logs --log_queries=1 --log_query_threads=1 --log_profile_events=1 --log_query_settings=1 --experimental_use_processors=0"
# Test insert logging on each block and checkPacket() method
$CLICKHOUSE_CLIENT $settings -n -q "
DROP TABLE IF EXISTS null_00634;
CREATE TABLE null_00634 (i UInt8) ENGINE = MergeTree PARTITION BY tuple() ORDER BY tuple();"
head -c 1000 /dev/zero | $CLICKHOUSE_CLIENT $settings --max_insert_block_size=10 --min_insert_block_size_rows=1 --min_insert_block_size_bytes=1 -q "INSERT INTO null_00634 FORMAT RowBinary"
$CLICKHOUSE_CLIENT $settings -n -q "
SELECT count() FROM null_00634;
DROP TABLE null_00634;"
(( `cat "$server_logs_file" | wc -l` >= 110 )) || echo Fail
# Check ProfileEvents in query_log
heavy_cpu_query="SELECT ignore(sum(sipHash64(hex(sipHash64(hex(sipHash64(hex(number)))))))) FROM (SELECT * FROM system.numbers_mt LIMIT 1000000)"
$CLICKHOUSE_CLIENT $settings --max_threads=1 -q "$heavy_cpu_query"
$CLICKHOUSE_CLIENT $settings -q "SYSTEM FLUSH LOGS"
$CLICKHOUSE_CLIENT $settings -q "
WITH
any(query_duration_ms*1000) AS duration,
sumIf(PV, PN = 'RealTimeMicroseconds') AS threads_realtime,
sumIf(PV, PN IN ('UserTimeMicroseconds', 'SystemTimeMicroseconds', 'OSIOWaitMicroseconds', 'OSCPUWaitMicroseconds')) AS threads_time_user_system_io
SELECT
-- duration, threads_realtime, threads_time_user_system_io,
threads_realtime >= 0.99 * duration,
threads_realtime >= threads_time_user_system_io,
any(length(thread_ids)) >= 1
FROM
(SELECT * FROM system.query_log PREWHERE query='$heavy_cpu_query' WHERE event_date >= today()-1 AND type=2 ORDER BY event_time DESC LIMIT 1)
ARRAY JOIN ProfileEvents.Names AS PN, ProfileEvents.Values AS PV"
# Check per-thread and per-query ProfileEvents consistency
$CLICKHOUSE_CLIENT $settings --any_join_distinct_right_table_keys=1 -q "
SELECT PN, PVq, PVt FROM
(
SELECT PN, sum(PV) AS PVt
FROM system.query_thread_log
ARRAY JOIN ProfileEvents.Names AS PN, ProfileEvents.Values AS PV
WHERE event_date >= today()-1 AND query_id='$query_id'
GROUP BY PN
) js1
ANY INNER JOIN
(
SELECT PN, PV AS PVq
FROM system.query_log
ARRAY JOIN ProfileEvents.Names AS PN, ProfileEvents.Values AS PV
WHERE event_date >= today()-1 AND query_id='$query_id'
) js2
USING PN
WHERE
NOT PN IN ('ContextLock') AND
NOT (PVq <= PVt AND PVt <= 1.1 * PVq)
"
# Clean
rm "$server_logs_file"
|
#!/bin/bash
#SBATCH --account=def-dkulic
#SBATCH --mem=8000M # memory per node
#SBATCH --time=23:00:00 # time (DD-HH:MM)
#SBATCH --output=/project/6001934/lingheng/Double_DDPG_Job_output/continuous_RoboschoolInvertedPendulumSwingup-v1_doule_ddpg_hardcopy_action_noise_seed2_run4_%N-%j.out # %N for node name, %j for jobID
module load qt/5.9.6 python/3.6.3 nixpkgs/16.09 gcc/7.3.0 boost/1.68.0 cuda cudnn
source ~/tf_cpu/bin/activate
python ./ddpg_discrete_action.py --env RoboschoolInvertedPendulumSwingup-v1 --random-seed 2 --exploration-strategy action_noise --summary-dir ../Double_DDPG_Results_no_monitor/continuous/RoboschoolInvertedPendulumSwingup-v1/doule_ddpg_hardcopy_action_noise_seed2_run4 --continuous-act-space-flag --target-hard-copy-flag
|
import React, { createRef, ChangeEvent } from "react";
import { Spinner } from "../components/shared/Spinner";
import { Navbar } from "../components/ui/Navbar";
import Image from "next/image";
import { useMeQuery, useUpdateProfileMutation } from "../generated/graphql";
import { Form, Formik } from "formik";
import { Button } from "../components/ui/Button";
import { InputField } from "../components/ui/InputField";
import { useApolloClient } from "@apollo/client";
import Axios from "axios";
import { toErrorMap } from "../utils/toErrorMap";
interface PrefProps {}
const Pref: React.FC<PrefProps> = ({}) => {
const { data, loading } = useMeQuery();
const client = useApolloClient();
const fileInputRef = createRef<HTMLInputElement>();
const [updateProfile] = useUpdateProfileMutation();
const openFileInput = () => {
fileInputRef.current.click();
};
const uploadImage = async (event: ChangeEvent<HTMLInputElement>) => {
event.preventDefault();
const file = event.target.files[0];
const formData = new FormData();
formData.append("file", file);
formData.append("id", data.me.id.toString());
const transport = Axios.create({
withCredentials: true,
});
try {
await transport.post(
"http://localhost:4000/upload-profile",
formData,
{
headers: { "Content-Type": "multipart/form-data" },
}
);
await client.resetStore();
} catch (err) {
console.log(err);
}
};
return (
<>
<Navbar />
{data && !loading && (
<div className={"pref_page"} style={{ padding: "2vh 3vw" }}>
<h1 style={{ marginBottom: "3vh" }}>😐 Your Information</h1>
<div className={"pref_container"}>
<div>
<input
type="file"
hidden={true}
ref={fileInputRef}
onChange={uploadImage}
/>
<Image
alt={"Avatar Image"}
width={200}
className={"avatar_img"}
height={200}
src={data.me.avatarUrl}
onClick={openFileInput}
/>
<div style={{ marginTop: "12px" }}></div>
<p className={"profile_info"}>
<span>😃</span> {data.me.name}
</p>
<p className={"profile_info"}>
<span>💬 </span>
{data.me.username}
</p>
<p className={"profile_info"}>
<span>✉️</span> {data.me.email}
</p>
<p className={"profile_info"}>
<span>🤖</span>
{data.me.bio}
</p>
</div>
<Formik
initialValues={{
name: data.me.name,
email: data.me.email,
bio: data.me.bio,
username: data.me.username,
}}
onSubmit={async (values, { setErrors }) => {
const res = await updateProfile({
variables: {
...values,
},
});
if (res.data.updateProfile.errors) {
return setErrors(
toErrorMap(
res.data.updateProfile.errors
)
);
}
await client.resetStore();
}}
>
{({ isSubmitting }) => (
<Form className={"edit_profile_form"}>
<InputField
name="name"
placeholder="Name"
label="Name"
style={{ width: "350px" }}
/>
<InputField
name="email"
placeholder="Email"
label="Email"
style={{ width: "350px" }}
/>
<InputField
name="username"
placeholder="Username"
label="Username"
style={{ width: "350px" }}
/>
<InputField
name="bio"
placeholder="Bio"
label="Bio"
style={{ width: "350px" }}
/>
<Button
type="submit"
isLoading={isSubmitting}
style={{
border: "1px solid var(--gray-300)",
}}
>
Update Profile
</Button>
</Form>
)}
</Formik>
</div>
</div>
)}
{loading && <Spinner />}
</>
);
};
export default Pref;
|
#!/bin/bash
# Copyright 2017 The Openstack-Helm Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
set -xe
#NOTE: Lint and package chart
: ${OSH_INFRA_PATH:="../openstack-helm-infra"}
make -C ${OSH_INFRA_PATH} ingress
tee /tmp/ingress.yaml <<EOF
manifests:
network_policy: true
network_policy:
ingress:
ingress:
- from:
- podSelector:
matchLabels:
application: keystone
- podSelector:
matchLabels:
application: heat
- podSelector:
matchLabels:
application: glance
- podSelector:
matchLabels:
application: cinder
- podSelector:
matchLabels:
application: congress
- podSelector:
matchLabels:
application: barbican
- podSelector:
matchLabels:
application: ceilometer
- podSelector:
matchLabels:
application: horizon
- podSelector:
matchLabels:
application: ironic
- podSelector:
matchLabels:
application: magnum
- podSelector:
matchLabels:
application: mistral
- podSelector:
matchLabels:
application: nova
- podSelector:
matchLabels:
application: neutron
- podSelector:
matchLabels:
application: senlin
EOF
#NOTE: Deploy command
: ${OSH_INFRA_PATH:="../openstack-helm-infra"}
: ${OSH_EXTRA_HELM_ARGS:=""}
tee /tmp/ingress-kube-system.yaml << EOF
deployment:
mode: cluster
type: DaemonSet
network:
host_namespace: true
EOF
helm upgrade --install ingress-kube-system ${OSH_INFRA_PATH}/ingress \
--namespace=kube-system \
--values=/tmp/ingress-kube-system.yaml \
${OSH_EXTRA_HELM_ARGS} \
${OSH_EXTRA_HELM_ARGS_INGRESS_KUBE_SYSTEM}
#NOTE: Wait for deploy
./tools/deployment/common/wait-for-pods.sh kube-system
#NOTE: Display info
helm status ingress-kube-system
#NOTE: Deploy namespace ingress
helm upgrade --install ingress-openstack ${OSH_INFRA_PATH}/ingress \
--namespace=openstack \
--values=/tmp/ingress.yaml \
${OSH_EXTRA_HELM_ARGS} \
${OSH_EXTRA_HELM_ARGS_INGRESS_OPENSTACK}
#NOTE: Wait for deploy
./tools/deployment/common/wait-for-pods.sh openstack
#NOTE: Display info
helm status ingress-openstack
helm upgrade --install ingress-ceph ${OSH_INFRA_PATH}/ingress \
--namespace=ceph \
${OSH_EXTRA_HELM_ARGS} \
${OSH_EXTRA_HELM_ARGS_INGRESS_OPENSTACK}
#NOTE: Wait for deploy
./tools/deployment/common/wait-for-pods.sh ceph
#NOTE: Display info
helm status ingress-ceph
|
#!/bin/sh
# This fallback is for the cmake build, which won't have an FBCODE_DIR
# environment variable, and runs this from the runtime subdir.
DIR="$( cd "$( dirname "$0" )" && pwd )"
if [ x"$FBCODE_DIR" = x"" ] ; then
FBCODE_DIR="$DIR/../.."
INSTALL_DIR="$FBCODE_DIR/hphp/runtime"
fi
SCRIPT=$FBCODE_DIR/hphp/doc/generate-ir-opcodes.pl
SRC=$FBCODE_DIR/hphp/doc/ir.specification
OUTPUT=$INSTALL_DIR/ir-opcode-generated.h
perl $SCRIPT $SRC > $OUTPUT
|
#!/bin/bash
# Pass the host name
if [ ! -z "$1" ]; then
host=$1
# Use the stack name env variable
elif [ ! -z "$STACK_ID" ]; then
host=$STACK_ID
echo "$host"
else
echo 'usage: install.sh host_name'
exit 1
fi
sed -i -e "s/myhost/$host/g" apache/kibana.conf
apt-get install apache2-utils supervisor -y
a2enmod proxy_html
cp apache/kibana.conf /etc/apache2/sites-available/
certbot -q --apache --redirect --hsts --uir --agree-tos -m presales@nuxeo.com -d kibana-$host.cloud.nuxeo.com
a2ensite kibana
service apache2 restart
sudo cp supervisor/kibana.conf /etc/supervisor/conf.d/
mkdir /var/log/kibana
chown ubuntu:ubuntu /var/log/kibana
service supervisor restart
|
#!/bin/sh
################################################################################
# benchmarks/chaining/run.sh
#
# Part of Project Thrill - http://project-thrill.org
#
#
# All rights reserved. Published under the BSD-2 license in the LICENSE file.
################################################################################
this="${BASH_SOURCE-$0}"
build=$(cd -P -- "$(dirname -- "$this")" && pwd -P)/../../build/benchmarks/chaining
bench=$(cd -P -- "$(dirname -- "$this")" && pwd -P)
spark=$(cd -P -- "$(dirname -- "$this")" && pwd -P)/../../../../../Projects/Spark
flink=$(cd -P -- "$(dirname -- "$this")" && pwd -P)/../../../../../Projects/Flink
set -e
if [ -f ${build}/bench.log ]; then
> ${build}/bench.log
fi
export THRILL_LOCAL="1"
setup="1in10"
for i in `seq 1 10`;
do
echo "CACHE" >> ${build}/bench.log
${build}/cache_count_${setup} 10000 >> ${build}/bench.log
echo "COLLAPSE" >> ${build}/bench.log
${build}/collapse_count_${setup} 10000 >> ${build}/bench.log
echo "CHAIN" >> ${build}/bench.log
${build}/chain_count_${setup} 10000 >> ${build}/bench.log
echo "SPARK" >> ${build}/bench.log
${spark}/bin/spark-submit --class org.apache.spark.examples.LocalCount --master local[1] ${spark}/apps/LocalCounter/local-count-${setup}.jar 10000 >> ${build}/bench.log
echo "FLINK" >> ${build}/bench.log
${flink}/bin/start-local.sh
${flink}/bin/flink run ${flink}/apps/LocalCounter/local-count-${setup}.jar 10000 >> ${build}/bench.log
${flink}/bin/stop-local.sh
done
python ${bench}/evaluate.py ${build}/bench.log ${build}/${setup}.out
setup="10in1"
for i in `seq 1 10`;
do
echo "CACHE" >> ${build}/bench.log
${build}/cache_count_${setup} 10000 >> ${build}/bench.log
echo "COLLAPSE" >> ${build}/bench.log
${build}/collapse_count_${setup} 10000 >> ${build}/bench.log
echo "CHAIN" >> ${build}/bench.log
${build}/chain_count_${setup} 10000 >> ${build}/bench.log
echo "SPARK" >> ${build}/bench.log
${spark}/bin/spark-submit --class org.apache.spark.examples.LocalCount --master local[1] ${spark}/apps/LocalCounter/local-count-${setup}.jar 10000 >> ${build}/bench.log
echo "FLINK" >> ${build}/bench.log
${flink}/bin/start-local.sh
${flink}/bin/flink run ${flink}/apps/LocalCounter/local-count-${setup}.jar 10000 >> ${build}/bench.log
${flink}/bin/stop-local.sh
done
python ${bench}/evaluate.py ${build}/bench.log ${build}/${setup}.out
################################################################################
|
#!/usr/bin/env bash
# Copyright (C) 2013 Red Hat, Inc. All rights reserved.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions
# of the GNU General Public License v.2.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
test_description="ensure pvmove works with all common segment types"
SKIP_WITH_LVMLOCKD=1
. lib/inittest
which md5sum || skip
aux prepare_vg 5 20
# Each of the following tests does:
# 1) Create two LVs - one linear and one other segment type
# The two LVs will share a PV.
# 2) Move both LVs together
# 3) Move only the second LV by name
# Testing pvmove of linear LV
lvcreate -aey -l 2 -n ${lv1}_foo $vg "$dev1"
lvcreate -aey -l 2 -n $lv1 $vg "$dev1"
lvextend -l+2 $vg/${lv1}_foo "$dev1"
lvextend -l+2 $vg/${lv1} "$dev1"
lvextend -l+2 $vg/${lv1}_foo "$dev2"
lvextend -l+2 $vg/${lv1} "$dev3"
check lv_tree_on $vg ${lv1}_foo "$dev1" "$dev2"
check lv_tree_on $vg $lv1 "$dev1" "$dev3"
check lv_field $vg/${lv1}_foo seg_count 3
check lv_field $vg/$lv1 seg_count 3
aux mkdev_md5sum $vg $lv1
dmsetup table
pvmove --atomic "$dev1" "$dev5"
check lv_tree_on $vg ${lv1}_foo "$dev2" "$dev5"
check lv_tree_on $vg $lv1 "$dev3" "$dev5"
# Also check 2 segments from $dev1 were merged on $dev5
check lv_field $vg/${lv1}_foo seg_count 2
check lv_field $vg/$lv1 seg_count 2
check dev_md5sum $vg $lv1
pvmove -n $lv1 "$dev5" "$dev4"
check lv_tree_on $vg $lv1 "$dev3" "$dev4"
check lv_tree_on $vg ${lv1}_foo "$dev2" "$dev5"
check dev_md5sum $vg $lv1
lvremove -ff $vg
# Testing pvmove of stripe LV
lvcreate -aey -l 2 -n ${lv1}_foo $vg "$dev1"
lvcreate -aey -l 4 -i 2 -n $lv1 $vg "$dev1" "$dev2"
check lv_tree_on $vg ${lv1}_foo "$dev1"
check lv_tree_on $vg $lv1 "$dev1" "$dev2"
aux mkdev_md5sum $vg $lv1
pvmove "$dev1" "$dev5"
check lv_tree_on $vg ${lv1}_foo "$dev5"
check lv_tree_on $vg $lv1 "$dev2" "$dev5"
check dev_md5sum $vg $lv1
pvmove -n $lv1 "$dev5" "$dev4"
check lv_tree_on $vg $lv1 "$dev2" "$dev4"
check lv_tree_on $vg ${lv1}_foo "$dev5"
check dev_md5sum $vg $lv1
lvremove -ff $vg
if test -e LOCAL_CLVMD ; then
#FIXME these tests currently fail end require cmirrord
echo "$(should false)FIXME!!! pvmove in clustered VG not fully supported!"
else
# Testing pvmove of mirror LV
lvcreate -aey -l 2 -n ${lv1}_foo $vg "$dev1"
lvcreate -aey -l 2 --type mirror -m 1 -n $lv1 $vg "$dev1" "$dev2"
check lv_tree_on $vg ${lv1}_foo "$dev1"
check lv_tree_on $vg $lv1 "$dev1" "$dev2"
aux mkdev_md5sum $vg $lv1
pvmove "$dev1" "$dev5"
check lv_tree_on $vg ${lv1}_foo "$dev5"
check lv_tree_on $vg $lv1 "$dev2" "$dev5"
check dev_md5sum $vg $lv1
pvmove -n $lv1 "$dev5" "$dev4"
check lv_tree_on $vg $lv1 "$dev2" "$dev4"
check lv_tree_on $vg ${lv1}_foo "$dev5"
check dev_md5sum $vg $lv1
lvremove -ff $vg
# Dummy LV and snap share dev1, while origin is on dev2
# Testing pvmove of snapshot LV
lvcreate -aey -l 2 -n ${lv1}_foo $vg "$dev1"
lvcreate -aey -l 2 -n $lv1 $vg "$dev2"
lvcreate -s $vg/$lv1 -l 2 -n snap "$dev1"
check lv_tree_on $vg ${lv1}_foo "$dev1"
check lv_tree_on $vg snap "$dev1"
aux mkdev_md5sum $vg snap
pvmove "$dev1" "$dev5"
check lv_tree_on $vg ${lv1}_foo "$dev5"
check lv_tree_on $vg snap "$dev5"
check dev_md5sum $vg snap
pvmove -n snap "$dev5" "$dev4"
check lv_tree_on $vg snap "$dev4"
check lv_tree_on $vg ${lv1}_foo "$dev5"
check dev_md5sum $vg snap
lvremove -ff $vg
fi
vgremove -ff $vg
|
import time
sentence = "Hello World"
while True:
print(sentence)
time.sleep(10) |
<gh_stars>0
package io.cattle.platform.inator.launchconfig.impl;
import io.cattle.platform.core.constants.InstanceConstants;
import io.cattle.platform.core.model.VolumeTemplate;
import io.cattle.platform.inator.Unit;
import io.cattle.platform.inator.UnitRef;
import io.cattle.platform.inator.factory.InatorServices;
import io.cattle.platform.inator.unit.VolumeUnit;
import io.cattle.platform.util.type.CollectionUtils;
import java.util.HashMap;
import java.util.Map;
public class DataVolumes {
Map<String, Object> lc;
InatorServices svc;
public DataVolumes(Map<String, Object> lc, InatorServices svc) {
super();
this.lc = lc;
this.svc = svc;
}
public Map<UnitRef, Unit> getVolumes(Map<String, VolumeTemplate> templates) {
Map<UnitRef, Unit> result = new HashMap<>();
for (Object volumeMapping : CollectionUtils.toList(lc.get(InstanceConstants.FIELD_DATA_VOLUMES))) {
String[] parts = volumeMapping.toString().split(":", 2);
if (!isNamedVolume(parts) || !templates.containsKey(parts[0])) {
continue;
}
VolumeUnit volumeUnit = new VolumeUnit(templates.get(parts[0]), svc);
result.put(volumeUnit.getRef(), volumeUnit);
}
return result;
}
protected boolean isNamedVolume(String[] parts) {
return parts.length == 2 && parts[0].length() > 0 && parts[0].charAt(0) != '/';
}
}
|
(self["webpackChunk"] = self["webpackChunk"] || []).push([["resources_js_pages_Admin_ClientProject_index_vue"],{
/***/ "./node_modules/babel-loader/lib/index.js??clonedRuleSet-5[0].rules[0].use[0]!./node_modules/vue-loader/lib/index.js??vue-loader-options!./resources/js/layouts/Admin/Layout.vue?vue&type=script&lang=js&":
/*!****************************************************************************************************************************************************************************************************************!*\
!*** ./node_modules/babel-loader/lib/index.js??clonedRuleSet-5[0].rules[0].use[0]!./node_modules/vue-loader/lib/index.js??vue-loader-options!./resources/js/layouts/Admin/Layout.vue?vue&type=script&lang=js& ***!
\****************************************************************************************************************************************************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (__WEBPACK_DEFAULT_EXPORT__)
/* harmony export */ });
/* harmony import */ var _layouts_nav_ProfileMenu__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! @/layouts/nav/ProfileMenu */ "./resources/js/layouts/nav/ProfileMenu.vue");
/* harmony import */ var _layouts_nav_MainNav__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! @/layouts/nav/MainNav */ "./resources/js/layouts/nav/MainNav.vue");
/* harmony import */ var _layouts_Admin_nav_AdminNav__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! @/layouts/Admin/nav/AdminNav */ "./resources/js/layouts/Admin/nav/AdminNav.vue");
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = ({
data: function data() {
return {};
},
components: {
AdminNav: _layouts_Admin_nav_AdminNav__WEBPACK_IMPORTED_MODULE_2__["default"],
MainNav: _layouts_nav_MainNav__WEBPACK_IMPORTED_MODULE_1__["default"],
ProfileMenu: _layouts_nav_ProfileMenu__WEBPACK_IMPORTED_MODULE_0__["default"]
}
});
/***/ }),
/***/ "./node_modules/babel-loader/lib/index.js??clonedRuleSet-5[0].rules[0].use[0]!./node_modules/vue-loader/lib/index.js??vue-loader-options!./resources/js/layouts/Admin/nav/AdminNav.vue?vue&type=script&lang=js&":
/*!**********************************************************************************************************************************************************************************************************************!*\
!*** ./node_modules/babel-loader/lib/index.js??clonedRuleSet-5[0].rules[0].use[0]!./node_modules/vue-loader/lib/index.js??vue-loader-options!./resources/js/layouts/Admin/nav/AdminNav.vue?vue&type=script&lang=js& ***!
\**********************************************************************************************************************************************************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (__WEBPACK_DEFAULT_EXPORT__)
/* harmony export */ });
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = ({
name: 'AdminNav',
data: function data() {
return {
sidebarDrawer: true,
toggleMini: false,
items: [{
title: 'Dashboard',
link: '/admin/dashboard',
icon: 'mdi-view-dashboard'
}, {
title: 'Users',
link: '/admin/users',
icon: 'mdi-account'
}, {
title: 'Clients',
active: true,
link: '/admin/clients',
icon: 'mdi-account-box-multiple'
}, {
title: 'Invoices',
link: '/admin/invoices',
icon: 'mdi-receipt'
}, {
title: 'Pages',
link: '/admin/pages',
icon: 'mdi-file-document-multiple-outline'
}, {
title: 'Settings',
link: '/admin/settings',
icon: 'mdi-card-bulleted-settings-outline'
}]
};
},
computed: {
mini: {
get: function get() {
return (this.toggleMini || this.$vuetify.breakpoint.smAndDown) && this.toggleMini;
},
set: function set(value) {
return value;
}
}
}
});
/***/ }),
/***/ "./node_modules/babel-loader/lib/index.js??clonedRuleSet-5[0].rules[0].use[0]!./node_modules/vue-loader/lib/index.js??vue-loader-options!./resources/js/pages/Admin/ClientProject/index.vue?vue&type=script&lang=js&":
/*!***************************************************************************************************************************************************************************************************************************!*\
!*** ./node_modules/babel-loader/lib/index.js??clonedRuleSet-5[0].rules[0].use[0]!./node_modules/vue-loader/lib/index.js??vue-loader-options!./resources/js/pages/Admin/ClientProject/index.vue?vue&type=script&lang=js& ***!
\***************************************************************************************************************************************************************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (__WEBPACK_DEFAULT_EXPORT__)
/* harmony export */ });
/* harmony import */ var _layouts_Admin_Layout__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! @/layouts/Admin/Layout */ "./resources/js/layouts/Admin/Layout.vue");
/* harmony import */ var _helper__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! @/helper */ "./resources/js/helper.js");
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = ({
layout: _layouts_Admin_Layout__WEBPACK_IMPORTED_MODULE_0__["default"],
props: ['client', 'project', 'templates'],
data: function data() {
return {
currentDocument: this.templates[0],
form: this.$inertia.form({
project: this.project.id,
project_data: this.project.project_data
}),
showDelete: false
};
},
computed: {
documentTemplateType: function documentTemplateType() {
var _this = this;
return function () {
return __webpack_require__("./resources/js/pages/Admin/ClientProject/Types lazy recursive ^\\.\\/.*\\/index\\.vue$")("./".concat(_this.project.type, "/index.vue"));
};
}
},
methods: {
pascalToTitleCase: _helper__WEBPACK_IMPORTED_MODULE_1__.pascalToTitleCase,
onPrint: function onPrint() {
window.open("/admin/client/".concat(this.client.id, "/project/").concat(this.project.id, "/print"), '_blank');
},
onConfirmDelete: function onConfirmDelete() {
this.showDelete = false;
this.form["delete"](route('admin.projects.destroy', {
project: this.project
}));
}
}
});
/***/ }),
/***/ "./resources/js/helper.js":
/*!********************************!*\
!*** ./resources/js/helper.js ***!
\********************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "toTitleCase": () => (/* binding */ toTitleCase),
/* harmony export */ "toKebabCase": () => (/* binding */ toKebabCase),
/* harmony export */ "score": () => (/* binding */ score),
/* harmony export */ "toPascalCase": () => (/* binding */ toPascalCase),
/* harmony export */ "pascalToTitleCase": () => (/* binding */ pascalToTitleCase)
/* harmony export */ });
var toTitleCase = function toTitleCase(str) {
return str.replace(/\w\S*/g, function (txt) {
return txt.charAt(0).toUpperCase() + txt.substr(1).toLowerCase();
});
};
var toKebabCase = function toKebabCase(str) {
return str.replace(/([a-z])([A-Z])/g, '$1-$2').replace(/\s+/g, '-').toLowerCase();
};
var score = function score(a, b) {
if (this === a) return 1;
if (a === '') return 0;
var c = 0;
var d = a.length;
var e = this;
var f = e.length;
var g;
var h;
var i = 1;
var j;
for (var k = 0, l, m, n, o, p, q; k < d; ++k) {
n = a.charAt(k), o = e.indexOf(n.toLowerCase()), p = e.indexOf(n.toUpperCase()), q = Math.min(o, p), m = q > -1 ? q : Math.max(o, p);
if (m === -1) {
if (b) {
i += 1 - b;
continue;
}
return 0;
}
l = 0.1, e[m] === n && (l += 0.1), m === 0 ? (l += 0.6, k === 0 && (g = 1)) : e.charAt(m - 1) === ' ' && (l += 0.8), e = e.substring(m + 1, f), c += l;
}
h = c / d, j = (h * (d / f) + h) / 2, j /= i, g && j + 0.15 < 1 && (j += 0.15);
return j;
};
var toPascalCase = function toPascalCase(str) {
return str.replace(/[-_\s]+/g, ' ').split(' ').map(function (word) {
return word[0].toUpperCase() + word.slice(1);
}).join('');
};
var pascalToTitleCase = function pascalToTitleCase(str) {
return str.replace(/([A-Z])/g, ' $1').replace(/^\s/, '');
};
/***/ }),
/***/ "./resources/js/layouts/Admin/Layout.vue":
/*!***********************************************!*\
!*** ./resources/js/layouts/Admin/Layout.vue ***!
\***********************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (__WEBPACK_DEFAULT_EXPORT__)
/* harmony export */ });
/* harmony import */ var _Layout_vue_vue_type_template_id_93bfcd24___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./Layout.vue?vue&type=template&id=93bfcd24& */ "./resources/js/layouts/Admin/Layout.vue?vue&type=template&id=93bfcd24&");
/* harmony import */ var _Layout_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./Layout.vue?vue&type=script&lang=js& */ "./resources/js/layouts/Admin/Layout.vue?vue&type=script&lang=js&");
/* harmony import */ var _node_modules_vue_loader_lib_runtime_componentNormalizer_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! !../../../../node_modules/vue-loader/lib/runtime/componentNormalizer.js */ "./node_modules/vue-loader/lib/runtime/componentNormalizer.js");
/* normalize component */
;
var component = (0,_node_modules_vue_loader_lib_runtime_componentNormalizer_js__WEBPACK_IMPORTED_MODULE_2__["default"])(
_Layout_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_1__["default"],
_Layout_vue_vue_type_template_id_93bfcd24___WEBPACK_IMPORTED_MODULE_0__.render,
_Layout_vue_vue_type_template_id_93bfcd24___WEBPACK_IMPORTED_MODULE_0__.staticRenderFns,
false,
null,
null,
null
)
/* hot reload */
if (false) { var api; }
component.options.__file = "resources/js/layouts/Admin/Layout.vue"
/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = (component.exports);
/***/ }),
/***/ "./resources/js/layouts/Admin/nav/AdminNav.vue":
/*!*****************************************************!*\
!*** ./resources/js/layouts/Admin/nav/AdminNav.vue ***!
\*****************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (__WEBPACK_DEFAULT_EXPORT__)
/* harmony export */ });
/* harmony import */ var _AdminNav_vue_vue_type_template_id_5057c904___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./AdminNav.vue?vue&type=template&id=5057c904& */ "./resources/js/layouts/Admin/nav/AdminNav.vue?vue&type=template&id=5057c904&");
/* harmony import */ var _AdminNav_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./AdminNav.vue?vue&type=script&lang=js& */ "./resources/js/layouts/Admin/nav/AdminNav.vue?vue&type=script&lang=js&");
/* harmony import */ var _node_modules_vue_loader_lib_runtime_componentNormalizer_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! !../../../../../node_modules/vue-loader/lib/runtime/componentNormalizer.js */ "./node_modules/vue-loader/lib/runtime/componentNormalizer.js");
/* normalize component */
;
var component = (0,_node_modules_vue_loader_lib_runtime_componentNormalizer_js__WEBPACK_IMPORTED_MODULE_2__["default"])(
_AdminNav_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_1__["default"],
_AdminNav_vue_vue_type_template_id_5057c904___WEBPACK_IMPORTED_MODULE_0__.render,
_AdminNav_vue_vue_type_template_id_5057c904___WEBPACK_IMPORTED_MODULE_0__.staticRenderFns,
false,
null,
null,
null
)
/* hot reload */
if (false) { var api; }
component.options.__file = "resources/js/layouts/Admin/nav/AdminNav.vue"
/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = (component.exports);
/***/ }),
/***/ "./resources/js/pages/Admin/ClientProject/index.vue":
/*!**********************************************************!*\
!*** ./resources/js/pages/Admin/ClientProject/index.vue ***!
\**********************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (__WEBPACK_DEFAULT_EXPORT__)
/* harmony export */ });
/* harmony import */ var _index_vue_vue_type_template_id_ac35b890___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./index.vue?vue&type=template&id=ac35b890& */ "./resources/js/pages/Admin/ClientProject/index.vue?vue&type=template&id=ac35b890&");
/* harmony import */ var _index_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./index.vue?vue&type=script&lang=js& */ "./resources/js/pages/Admin/ClientProject/index.vue?vue&type=script&lang=js&");
/* harmony import */ var _node_modules_vue_loader_lib_runtime_componentNormalizer_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! !../../../../../node_modules/vue-loader/lib/runtime/componentNormalizer.js */ "./node_modules/vue-loader/lib/runtime/componentNormalizer.js");
/* normalize component */
;
var component = (0,_node_modules_vue_loader_lib_runtime_componentNormalizer_js__WEBPACK_IMPORTED_MODULE_2__["default"])(
_index_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_1__["default"],
_index_vue_vue_type_template_id_ac35b890___WEBPACK_IMPORTED_MODULE_0__.render,
_index_vue_vue_type_template_id_ac35b890___WEBPACK_IMPORTED_MODULE_0__.staticRenderFns,
false,
null,
null,
null
)
/* hot reload */
if (false) { var api; }
component.options.__file = "resources/js/pages/Admin/ClientProject/index.vue"
/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = (component.exports);
/***/ }),
/***/ "./resources/js/layouts/Admin/Layout.vue?vue&type=script&lang=js&":
/*!************************************************************************!*\
!*** ./resources/js/layouts/Admin/Layout.vue?vue&type=script&lang=js& ***!
\************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (__WEBPACK_DEFAULT_EXPORT__)
/* harmony export */ });
/* harmony import */ var _node_modules_babel_loader_lib_index_js_clonedRuleSet_5_0_rules_0_use_0_node_modules_vue_loader_lib_index_js_vue_loader_options_Layout_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!../../../../node_modules/babel-loader/lib/index.js??clonedRuleSet-5[0].rules[0].use[0]!../../../../node_modules/vue-loader/lib/index.js??vue-loader-options!./Layout.vue?vue&type=script&lang=js& */ "./node_modules/babel-loader/lib/index.js??clonedRuleSet-5[0].rules[0].use[0]!./node_modules/vue-loader/lib/index.js??vue-loader-options!./resources/js/layouts/Admin/Layout.vue?vue&type=script&lang=js&");
/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = (_node_modules_babel_loader_lib_index_js_clonedRuleSet_5_0_rules_0_use_0_node_modules_vue_loader_lib_index_js_vue_loader_options_Layout_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_0__["default"]);
/***/ }),
/***/ "./resources/js/layouts/Admin/nav/AdminNav.vue?vue&type=script&lang=js&":
/*!******************************************************************************!*\
!*** ./resources/js/layouts/Admin/nav/AdminNav.vue?vue&type=script&lang=js& ***!
\******************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (__WEBPACK_DEFAULT_EXPORT__)
/* harmony export */ });
/* harmony import */ var _node_modules_babel_loader_lib_index_js_clonedRuleSet_5_0_rules_0_use_0_node_modules_vue_loader_lib_index_js_vue_loader_options_AdminNav_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!../../../../../node_modules/babel-loader/lib/index.js??clonedRuleSet-5[0].rules[0].use[0]!../../../../../node_modules/vue-loader/lib/index.js??vue-loader-options!./AdminNav.vue?vue&type=script&lang=js& */ "./node_modules/babel-loader/lib/index.js??clonedRuleSet-5[0].rules[0].use[0]!./node_modules/vue-loader/lib/index.js??vue-loader-options!./resources/js/layouts/Admin/nav/AdminNav.vue?vue&type=script&lang=js&");
/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = (_node_modules_babel_loader_lib_index_js_clonedRuleSet_5_0_rules_0_use_0_node_modules_vue_loader_lib_index_js_vue_loader_options_AdminNav_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_0__["default"]);
/***/ }),
/***/ "./resources/js/pages/Admin/ClientProject/index.vue?vue&type=script&lang=js&":
/*!***********************************************************************************!*\
!*** ./resources/js/pages/Admin/ClientProject/index.vue?vue&type=script&lang=js& ***!
\***********************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (__WEBPACK_DEFAULT_EXPORT__)
/* harmony export */ });
/* harmony import */ var _node_modules_babel_loader_lib_index_js_clonedRuleSet_5_0_rules_0_use_0_node_modules_vue_loader_lib_index_js_vue_loader_options_index_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!../../../../../node_modules/babel-loader/lib/index.js??clonedRuleSet-5[0].rules[0].use[0]!../../../../../node_modules/vue-loader/lib/index.js??vue-loader-options!./index.vue?vue&type=script&lang=js& */ "./node_modules/babel-loader/lib/index.js??clonedRuleSet-5[0].rules[0].use[0]!./node_modules/vue-loader/lib/index.js??vue-loader-options!./resources/js/pages/Admin/ClientProject/index.vue?vue&type=script&lang=js&");
/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = (_node_modules_babel_loader_lib_index_js_clonedRuleSet_5_0_rules_0_use_0_node_modules_vue_loader_lib_index_js_vue_loader_options_index_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_0__["default"]);
/***/ }),
/***/ "./resources/js/layouts/Admin/Layout.vue?vue&type=template&id=93bfcd24&":
/*!******************************************************************************!*\
!*** ./resources/js/layouts/Admin/Layout.vue?vue&type=template&id=93bfcd24& ***!
\******************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "render": () => (/* reexport safe */ _node_modules_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_node_modules_vue_loader_lib_index_js_vue_loader_options_Layout_vue_vue_type_template_id_93bfcd24___WEBPACK_IMPORTED_MODULE_0__.render),
/* harmony export */ "staticRenderFns": () => (/* reexport safe */ _node_modules_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_node_modules_vue_loader_lib_index_js_vue_loader_options_Layout_vue_vue_type_template_id_93bfcd24___WEBPACK_IMPORTED_MODULE_0__.staticRenderFns)
/* harmony export */ });
/* harmony import */ var _node_modules_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_node_modules_vue_loader_lib_index_js_vue_loader_options_Layout_vue_vue_type_template_id_93bfcd24___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!../../../../node_modules/vue-loader/lib/loaders/templateLoader.js??vue-loader-options!../../../../node_modules/vue-loader/lib/index.js??vue-loader-options!./Layout.vue?vue&type=template&id=93bfcd24& */ "./node_modules/vue-loader/lib/loaders/templateLoader.js??vue-loader-options!./node_modules/vue-loader/lib/index.js??vue-loader-options!./resources/js/layouts/Admin/Layout.vue?vue&type=template&id=93bfcd24&");
/***/ }),
/***/ "./resources/js/layouts/Admin/nav/AdminNav.vue?vue&type=template&id=5057c904&":
/*!************************************************************************************!*\
!*** ./resources/js/layouts/Admin/nav/AdminNav.vue?vue&type=template&id=5057c904& ***!
\************************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "render": () => (/* reexport safe */ _node_modules_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_node_modules_vue_loader_lib_index_js_vue_loader_options_AdminNav_vue_vue_type_template_id_5057c904___WEBPACK_IMPORTED_MODULE_0__.render),
/* harmony export */ "staticRenderFns": () => (/* reexport safe */ _node_modules_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_node_modules_vue_loader_lib_index_js_vue_loader_options_AdminNav_vue_vue_type_template_id_5057c904___WEBPACK_IMPORTED_MODULE_0__.staticRenderFns)
/* harmony export */ });
/* harmony import */ var _node_modules_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_node_modules_vue_loader_lib_index_js_vue_loader_options_AdminNav_vue_vue_type_template_id_5057c904___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!../../../../../node_modules/vue-loader/lib/loaders/templateLoader.js??vue-loader-options!../../../../../node_modules/vue-loader/lib/index.js??vue-loader-options!./AdminNav.vue?vue&type=template&id=5057c904& */ "./node_modules/vue-loader/lib/loaders/templateLoader.js??vue-loader-options!./node_modules/vue-loader/lib/index.js??vue-loader-options!./resources/js/layouts/Admin/nav/AdminNav.vue?vue&type=template&id=5057c904&");
/***/ }),
/***/ "./resources/js/pages/Admin/ClientProject/index.vue?vue&type=template&id=ac35b890&":
/*!*****************************************************************************************!*\
!*** ./resources/js/pages/Admin/ClientProject/index.vue?vue&type=template&id=ac35b890& ***!
\*****************************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "render": () => (/* reexport safe */ _node_modules_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_node_modules_vue_loader_lib_index_js_vue_loader_options_index_vue_vue_type_template_id_ac35b890___WEBPACK_IMPORTED_MODULE_0__.render),
/* harmony export */ "staticRenderFns": () => (/* reexport safe */ _node_modules_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_node_modules_vue_loader_lib_index_js_vue_loader_options_index_vue_vue_type_template_id_ac35b890___WEBPACK_IMPORTED_MODULE_0__.staticRenderFns)
/* harmony export */ });
/* harmony import */ var _node_modules_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_node_modules_vue_loader_lib_index_js_vue_loader_options_index_vue_vue_type_template_id_ac35b890___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!../../../../../node_modules/vue-loader/lib/loaders/templateLoader.js??vue-loader-options!../../../../../node_modules/vue-loader/lib/index.js??vue-loader-options!./index.vue?vue&type=template&id=ac35b890& */ "./node_modules/vue-loader/lib/loaders/templateLoader.js??vue-loader-options!./node_modules/vue-loader/lib/index.js??vue-loader-options!./resources/js/pages/Admin/ClientProject/index.vue?vue&type=template&id=ac35b890&");
/***/ }),
/***/ "./node_modules/vue-loader/lib/loaders/templateLoader.js??vue-loader-options!./node_modules/vue-loader/lib/index.js??vue-loader-options!./resources/js/layouts/Admin/Layout.vue?vue&type=template&id=93bfcd24&":
/*!*********************************************************************************************************************************************************************************************************************!*\
!*** ./node_modules/vue-loader/lib/loaders/templateLoader.js??vue-loader-options!./node_modules/vue-loader/lib/index.js??vue-loader-options!./resources/js/layouts/Admin/Layout.vue?vue&type=template&id=93bfcd24& ***!
\*********************************************************************************************************************************************************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "render": () => (/* binding */ render),
/* harmony export */ "staticRenderFns": () => (/* binding */ staticRenderFns)
/* harmony export */ });
var render = function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c(
"div",
[
_c(
"v-app",
[
_c(
"v-app-bar",
{ attrs: { "clipped-left": "", app: "" } },
[
_c("MainNav"),
_vm._v(" "),
_c("v-spacer"),
_vm._v(" "),
_c("ProfileMenu")
],
1
),
_vm._v(" "),
_c("AdminNav"),
_vm._v(" "),
_c(
"v-main",
{ staticClass: "warm", staticStyle: { "padding-top": "64px" } },
[
_c(
"v-fade-transition",
{ attrs: { mode: "out-in" } },
[_vm._t("default")],
2
)
],
1
)
],
1
),
_vm._v(" "),
_c("portal-target", { attrs: { name: "modals", multiple: "" } })
],
1
)
}
var staticRenderFns = []
render._withStripped = true
/***/ }),
/***/ "./node_modules/vue-loader/lib/loaders/templateLoader.js??vue-loader-options!./node_modules/vue-loader/lib/index.js??vue-loader-options!./resources/js/layouts/Admin/nav/AdminNav.vue?vue&type=template&id=5057c904&":
/*!***************************************************************************************************************************************************************************************************************************!*\
!*** ./node_modules/vue-loader/lib/loaders/templateLoader.js??vue-loader-options!./node_modules/vue-loader/lib/index.js??vue-loader-options!./resources/js/layouts/Admin/nav/AdminNav.vue?vue&type=template&id=5057c904& ***!
\***************************************************************************************************************************************************************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "render": () => (/* binding */ render),
/* harmony export */ "staticRenderFns": () => (/* binding */ staticRenderFns)
/* harmony export */ });
var render = function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c(
"v-navigation-drawer",
{
staticStyle: { "padding-top": "64px" },
attrs: {
"mini-variant": _vm.mini,
app: "",
dark: "",
permanent: "",
stateless: "",
clipped: ""
},
on: {
"update:miniVariant": function($event) {
_vm.mini = $event
},
"update:mini-variant": function($event) {
_vm.mini = $event
}
},
model: {
value: _vm.sidebarDrawer,
callback: function($$v) {
_vm.sidebarDrawer = $$v
},
expression: "sidebarDrawer"
}
},
[
_c(
"v-list",
{ attrs: { dense: "", flat: "", outlined: "" } },
[
_c(
"v-list-item",
{
staticClass: "pointer",
on: {
click: function($event) {
_vm.toggleMini = !_vm.toggleMini
}
}
},
[
!_vm.mini
? _c("v-icon", [_vm._v("mdi-menu-open")])
: _c("v-icon", [_vm._v("mdi-menu-open mdi-rotate-180")])
],
1
),
_vm._v(" "),
_vm._l(_vm.items, function(item) {
return _c(
"div",
{ key: item.title, staticClass: "pointer" },
[
item.items
? _c("v-list-group", {
attrs: { "prepend-icon": item.icon },
scopedSlots: _vm._u(
[
{
key: "activator",
fn: function() {
return [
_c(
"v-list-item-content",
[
_c(
"v-list-item-title",
[
_c(
"inertia-link",
{
attrs: {
href: item.link || "",
as: "span"
}
},
[_vm._v(_vm._s(item.title))]
)
],
1
)
],
1
)
]
},
proxy: true
},
{
key: "default",
fn: function() {
return _vm._l(item.items, function(child) {
return _c(
"v-list-item",
{
key: child.title,
staticClass: "grey darken-1 "
},
[
child.link
? _c(
"inertia-link",
{
attrs: {
as: "div",
href: child.link
}
},
[
_c("span", [
_vm._v(_vm._s(child.title))
])
]
)
: _c(
"v-list-item-content",
[
_c("v-list-item-title", {
domProps: {
textContent: _vm._s(child.title)
}
})
],
1
)
],
1
)
})
},
proxy: true
}
],
null,
true
),
model: {
value: item.active,
callback: function($$v) {
_vm.$set(item, "active", $$v)
},
expression: "item.active"
}
})
: _c(
"inertia-link",
{
staticClass: "pointer",
attrs: { href: item.link || "", as: "span" }
},
[
_c(
"v-list-item",
[
_c(
"v-list-item-icon",
[
_c("v-icon", {
domProps: { textContent: _vm._s(item.icon) }
})
],
1
),
_vm._v(" "),
_c(
"v-list-item-content",
[
_c("v-list-item-title", [
_vm._v(
"\n " +
_vm._s(item.title) +
"\n "
)
])
],
1
)
],
1
)
],
1
)
],
1
)
})
],
2
)
],
1
)
}
var staticRenderFns = []
render._withStripped = true
/***/ }),
/***/ "./node_modules/vue-loader/lib/loaders/templateLoader.js??vue-loader-options!./node_modules/vue-loader/lib/index.js??vue-loader-options!./resources/js/pages/Admin/ClientProject/index.vue?vue&type=template&id=ac35b890&":
/*!********************************************************************************************************************************************************************************************************************************!*\
!*** ./node_modules/vue-loader/lib/loaders/templateLoader.js??vue-loader-options!./node_modules/vue-loader/lib/index.js??vue-loader-options!./resources/js/pages/Admin/ClientProject/index.vue?vue&type=template&id=ac35b890& ***!
\********************************************************************************************************************************************************************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "render": () => (/* binding */ render),
/* harmony export */ "staticRenderFns": () => (/* binding */ staticRenderFns)
/* harmony export */ });
var render = function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c(
"div",
{ staticStyle: { "margin-top": "64px" } },
[
_c(
"v-app-bar",
{
staticClass: "primary rounded-0 text--white",
staticStyle: { top: "64px" },
attrs: { dark: "", flat: "", app: "", fixed: "" }
},
[
_c(
"v-row",
[
_c(
"v-col",
{ staticClass: "d-flex align-center", attrs: { xs3: "" } },
[
_c(
"inertia-link",
{ attrs: { href: "/admin/clients", as: "button" } },
[
_c("v-icon", { attrs: { color: "white" } }, [
_vm._v("mdi-menu-left")
])
],
1
),
_vm._v(" "),
_c("span", { staticClass: "heading" }, [
_vm._v(_vm._s(_vm.project.project_data.name))
])
],
1
),
_vm._v(" "),
_c(
"v-col",
{ attrs: { xs3: "" } },
[
_c(
"v-btn",
{ attrs: { icon: "" }, on: { click: _vm.onPrint } },
[
_c("v-icon", [_vm._v("mdi-printer")]),
_vm._v(" "),
_c("div", [_vm._v("Print")])
],
1
)
],
1
),
_vm._v(" "),
_c(
"v-col",
{ attrs: { xs3: "" } },
[
_c("v-select", {
attrs: {
"hide-details": "auto",
"prepend-icon": "mdi-file-document-multiple-outline",
dark: "",
dense: "",
filled: "",
"max-width": "200",
items: _vm.templates
},
model: {
value: _vm.currentDocument,
callback: function($$v) {
_vm.currentDocument = $$v
},
expression: "currentDocument"
}
})
],
1
),
_vm._v(" "),
_c(
"v-col",
{ staticClass: "d-flex justify-end", attrs: { xs3: "" } },
[
_c(
"v-dialog",
{
attrs: { "max-width": "500" },
scopedSlots: _vm._u([
{
key: "activator",
fn: function(ref) {
var on = ref.on
var attrs = ref.attrs
return [
_c(
"v-btn",
_vm._g(
_vm._b(
{
attrs: { dark: "", text: "" },
on: {
click: function($event) {
_vm.showDelete = !_vm.showDelete
}
}
},
"v-btn",
attrs,
false
),
on
),
[_c("v-icon", [_vm._v("mdi-delete")])],
1
)
]
}
}
]),
model: {
value: _vm.showDelete,
callback: function($$v) {
_vm.showDelete = $$v
},
expression: "showDelete"
}
},
[
_vm._v(" "),
_c(
"v-card",
[
_c("v-card-title", [
_c("span", { staticClass: "headline" }, [
_vm._v(
_vm._s("Are you sure you want to delete this?")
)
])
]),
_vm._v(" "),
_c(
"v-card-text",
[
_c(
"v-btn",
{
attrs: { color: "error", text: "" },
on: {
click: function($event) {
$event.preventDefault()
_vm.showDelete = false
}
}
},
[_vm._v(_vm._s("Cancel"))]
),
_vm._v(" "),
_c(
"v-btn",
{
attrs: { color: "error" },
nativeOn: {
click: function($event) {
return _vm.onConfirmDelete.apply(
null,
arguments
)
}
}
},
[_vm._v("Delete")]
)
],
1
)
],
1
)
],
1
)
],
1
)
],
1
)
],
1
),
_vm._v(" "),
_c(_vm.documentTemplateType, {
tag: "component",
attrs: {
project: _vm.project,
client: _vm.client,
"current-document": _vm.currentDocument
},
on: {
"update:project": function($event) {
_vm.project = $event
},
"update:client": function($event) {
_vm.client = $event
},
"update:currentDocument": function($event) {
_vm.currentDocument = $event
},
"update:current-document": function($event) {
_vm.currentDocument = $event
}
}
})
],
1
)
}
var staticRenderFns = []
render._withStripped = true
/***/ }),
/***/ "./resources/js/pages/Admin/ClientProject/Types lazy recursive ^\\.\\/.*\\/index\\.vue$":
/*!**************************************************************************************************!*\
!*** ./resources/js/pages/Admin/ClientProject/Types/ lazy ^\.\/.*\/index\.vue$ namespace object ***!
\**************************************************************************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
var map = {
"./DeclarationOfStatus/Documents/index.vue": [
"./resources/js/pages/Admin/ClientProject/Types/DeclarationOfStatus/Documents/index.vue",
"resources_js_pages_Admin_ClientProject_Types_DeclarationOfStatus_Documents_index_vue"
],
"./DeclarationOfStatus/index.vue": [
"./resources/js/pages/Admin/ClientProject/Types/DeclarationOfStatus/index.vue",
"resources_js_pages_Admin_ClientProject_Types_DeclarationOfStatus_index_vue"
],
"./Trust/Documents/index.vue": [
"./resources/js/pages/Admin/ClientProject/Types/Trust/Documents/index.vue",
"resources_js_pages_Admin_ClientProject_Types_Trust_Documents_index_vue"
],
"./Trust/index.vue": [
"./resources/js/pages/Admin/ClientProject/Types/Trust/index.vue",
"resources_js_pages_Admin_ClientProject_Types_Trust_index_vue"
]
};
function webpackAsyncContext(req) {
if(!__webpack_require__.o(map, req)) {
return Promise.resolve().then(() => {
var e = new Error("Cannot find module '" + req + "'");
e.code = 'MODULE_NOT_FOUND';
throw e;
});
}
var ids = map[req], id = ids[0];
return __webpack_require__.e(ids[1]).then(() => {
return __webpack_require__(id);
});
}
webpackAsyncContext.keys = () => (Object.keys(map));
webpackAsyncContext.id = "./resources/js/pages/Admin/ClientProject/Types lazy recursive ^\\.\\/.*\\/index\\.vue$";
module.exports = webpackAsyncContext;
/***/ })
}]); |
import re
def parse_requirements(cron_string):
requirements = cron_string.split() # Split the cron string by spaces to extract individual requirements
return requirements
# Test cases
cron_string_1 = "CHECKHOURS0-6 CHECKMINUTES0-30 WEEKDAYS MINNUM10 MAXNUM50 LOOKBACKSECONDS1800"
cron_string_2 = "CHECKHOURS8-12 CHECKMINUTES15-45 WEEKENDS MINNUM5 MAXNUM20 LOOKBACKSECONDS3600"
cron_string_3 = "CHECKHOURS12-18 CHECKMINUTES50-55 WEEKDAYS MINNUM5 MAXNUM20 LOOKBACKSECONDS3600"
requirements_1 = parse_requirements(cron_string_1)
requirements_2 = parse_requirements(cron_string_2)
requirements_3 = parse_requirements(cron_string_3)
print(requirements_1) # Output: ['CHECKHOURS0-6', 'CHECKMINUTES0-30', 'WEEKDAYS', 'MINNUM10', 'MAXNUM50', 'LOOKBACKSECONDS1800']
print(requirements_2) # Output: ['CHECKHOURS8-12', 'CHECKMINUTES15-45', 'WEEKENDS', 'MINNUM5', 'MAXNUM20', 'LOOKBACKSECONDS3600']
print(requirements_3) # Output: ['CHECKHOURS12-18', 'CHECKMINUTES50-55', 'WEEKDAYS', 'MINNUM5', 'MAXNUM20', 'LOOKBACKSECONDS3600'] |
#!/bin/bash
XSD2PGSCHEMA=../extlibs/xsd2pgschema.jar
if [ ! -e $XSD2PGSCHEMA ] ; then
( cd ..; ./scripts/update_extlibs.sh )
fi
XML_SCHEMA=wwpdb_validation_v6.00.xsd
DB_SCHEMA=wwpdb_validation_v6.00.sql
java -classpath ../extlibs/xsd2pgschema.jar xsd2pgschema --xsd $XML_SCHEMA --ddl $DB_SCHEMA
echo Generated: $DB_SCHEMA
|
def post(self, request, *args, **kwargs):
"""
Marks a membership as declined. In addition deletes now unnecessary information.
:param request: HTTP request object containing the data to process
:param uuid: share_right_id (assuming this parameter is required for processing)
:param args: additional positional arguments
:param kwargs: additional keyword arguments
:return: HTTP response with status code 200 if data is valid, else 403
"""
# Use the MembershipDeclineSerializer to validate the request data
serializer = MembershipDeclineSerializer(data=request.data, context=self.get_serializer_context())
if serializer.is_valid():
# Process the decline request and delete unnecessary information
# Mark the membership as declined and perform necessary cleanup
# Assuming the logic for processing decline and cleanup is implemented elsewhere
# Example: membership.mark_as_declined() and membership.delete_unnecessary_info()
# Return HTTP response with status code 200
return Response(status=status.HTTP_200_OK)
else:
# Return HTTP response with status code 403 for invalid data
return Response(status=status.HTTP_403_FORBIDDEN) |
<filename>src/len.js
const callable = require('./callable')
const enumerate = require('./enumerate')
module.exports = object => {
if (callable(object.__len__)) {
return object.__len__()
}
// E.g. built-in 'Array'
if (typeof(object.length) === 'number') {
return object.length
}
// E.g. built-in 'Map'
if (typeof(object.size) === 'number') {
return object.size
}
// assume object
return enumerate(object).length
}
|
<gh_stars>1-10
package io.github.vampirestudios.obsidian.api.obsidian.enchantments;
import net.minecraft.entity.EntityGroup;
public class AttackDamage {
public int level = 1;
public String entity_group;
public float attack_damage;
public EntityGroup getEntityGroup() {
return switch (entity_group) {
default -> EntityGroup.DEFAULT;
case "undead" -> EntityGroup.UNDEAD;
case "arthropod" -> EntityGroup.ARTHROPOD;
case "illager" -> EntityGroup.ILLAGER;
case "aquatic" -> EntityGroup.AQUATIC;
};
}
}
|
<reponame>ArcanjoQueiroz/keycloak-authentication-service<gh_stars>1-10
import axios from 'axios';
const authServerBaseUrl = process.env.AUTH_SERVER_BASE_URL || 'http://localhost:9999/auth';
const realm = process.env.REALM || 'test';
const clientId = process.env.CLIENT_ID || 'test';
const clientSecret = process.env.CLIENT_SECRET || 'a167e1f1-870d-4926-89d8-738a8d214817';
const username = process.env.USERNAME || 'alexandre';
const password = process.env.PASSWORD || '<PASSWORD>';
const getAccessToken = async (clientId, clientSecret, username, password) => {
const url = `${authServerBaseUrl}/realms/${realm}/protocol/openid-connect/token`;
const form = `client_id=${clientId}&client_secret=${clientSecret}` +
`&username=${username}&password=${password}&grant_type=password`;
console.log(`Access Token Request URL: ${url}`);
console.log(`Access Token Data: ${form}`);
const res = await axios.post(url, form, {
headers: {'Content-Type': 'application/x-www-form-urlencoded', 'Cache-Control': 'no-cache'},
});
return res.data;
};
const getUserInfo = async (accessToken) => {
const url = `${authServerBaseUrl}/realms/${realm}/protocol/openid-connect/userinfo`;
const authorization = `${accessToken.token_type} ${accessToken.access_token}`;
console.log(`User Info Request URL: ${url}`);
console.log(`User Info Authorization: ${authorization}`);
const res = await axios.get(url, {
headers: {'Authorization': authorization, 'Cache-Control': 'no-cache'},
});
return res.data;
};
const introspectAccessToken = async (clientId, clientSecret, accessToken) => {
const url = `${authServerBaseUrl}/realms/${realm}/protocol/openid-connect/token/introspect`;
const form = `client_id=${clientId}&client_secret=${clientSecret}` +
`&token=${accessToken.access_token}`;
console.log(`Introspect Request URL: ${url}`);
console.log(`Introspect Data: ${form}`);
const res = await axios.post(url, form, {
headers: {'Content-Type': 'application/x-www-form-urlencoded', 'Cache-Control': 'no-cache'},
});
return res.data;
};
getAccessToken(clientId, clientSecret, username, password)
.then((accessToken) => {
introspectAccessToken(clientId, clientSecret, accessToken)
.then( (i) => console.log('Introspect: ' + JSON.stringify(i)))
.catch((e) => console.log(e));
return accessToken;
})
.then((accessToken) => getUserInfo(accessToken))
.then((userInfo) => console.log('UserInfo: ' + JSON.stringify(userInfo)))
.catch((e) => console.log(e.message));
|
import React from 'react'
import Routes from './routes'
import GlobalStyles from './global/styles.global'
const App = () => (
<>
<GlobalStyles />
<Routes />
</>
)
export default App
|
def remove_duplicates(my_list):
# create a new list
uniques = []
# iterate over the list and add the elements that are not present in uniques list
for element in my_list:
if element not in uniques:
uniques.append(element)
return uniques |
<reponame>smagill/opensphere-desktop
/**
* Package for the WMS URL builder.
*/
package io.opensphere.wms.util;
|
<reponame>parkingomat/namecheap<filename>types/typedefs/users/index.js<gh_stars>1-10
export {}
/* typal types/api/users/get-pricing.xml noSuppress */
/**
* @typedef {_namecheap.GetPricing} GetPricing Options to get pricing info. https://www.namecheap.com/support/api/methods/users/get-pricing.aspx
*/
/**
* @typedef {Object} _namecheap.GetPricing Options to get pricing info. https://www.namecheap.com/support/api/methods/users/get-pricing.aspx
* @prop {string} type Product Type to get pricing information.
* One of `DOMAIN`, `SSLCERTIFICATE`, `WHOISGUARD`.
* @prop {string} [category] Specific category within a product type, e.g., `DOMAINS`, `COMODO`, `WHOISGUARD`.
* @prop {string} [promoCode] Promotional (coupon) code for the user.
* @prop {string} [action] Specific action within a product type.
* One of `REGISTER`, `PURCHASE`, `RENEW`, `REACTIVATE`, `TRANSFER`.
* @prop {string} [product] The name of the product within a product type, e.g., `COM`, `INSTANTSSL`, `WHOISGUARD-PROTECT-ONE`.
*/
/**
* @typedef {_namecheap.Pricing} Pricing The pricing information returned as an object. The data is split into 3 types: `Domain`, `SSL` and `Whois`.
*/
/**
* @typedef {Object} _namecheap.Pricing The pricing information returned as an object. The data is split into 3 types: `Domain`, `SSL` and `Whois`.
* @prop {_namecheap.DomainPricing} domains The pricing of domains.
* @prop {_namecheap.SSLPricing} ssl The pricing of certificates.
* @prop {_namecheap.WhoisPricing} whoisguard The pricing of the Whois Guard.
*/
/**
* @typedef {_namecheap.DomainPricing} DomainPricing The pricing of domains as an object, where the requested zone is a key.
*/
/**
* @typedef {Object} _namecheap.DomainPricing The pricing of domains as an object, where the requested zone is a key.
* @prop {Object<string, _namecheap.Product>} register The pricing to register domains.
* @prop {Object<string, _namecheap.Product>} renew The pricing to renew domains.
* @prop {Object<string, _namecheap.Product>} reactivate The pricing to reactivate domains.
* @prop {Object<string, _namecheap.Product>} transfer The pricing to transfer domains.
*/
/**
* @typedef {_namecheap.SSLPricing} SSLPricing The pricing of certificates.
*/
/**
* @typedef {Object} _namecheap.SSLPricing The pricing of certificates.
* @prop {_namecheap.SSLPurchase} purchase The pricing to purchase certificates.
* @prop {_namecheap.SSLRenew} renew The pricing to renew certificates.
*/
/**
* @typedef {_namecheap.WhoisPricing} WhoisPricing The pricing of the Whois Guard.
*/
/**
* @typedef {Object} _namecheap.WhoisPricing The pricing of the Whois Guard.
* @prop {_namecheap.WhoisPurchase} purchase The pricing to purchase WHOIS guards.
* @prop {_namecheap.WhoisRenew} renew The pricing to renew WHOIS guards.
*/
/**
* @typedef {_namecheap.Product} Product A product consists of an array of prices for different durations.
*/
/**
* @typedef {!Array<!_namecheap.Price>} _namecheap.Product A product consists of an array of prices for different durations.
*/
/**
* @typedef {_namecheap.Price} Price Price data for a product accoding to the duration of an action.
*/
/**
* @typedef {Object} _namecheap.Price Price data for a product accoding to the duration of an action.
* @prop {number} Duration The duration of the product, e.g., `1`.
* @prop {string} DurationType The duration type of the product, e.g., `YEAR`.
* @prop {string} Price Indicates Final price (it can be from regular, userprice, special price,promo price, tier price), e.g., `20.88`.
* @prop {string} PricingType Either `MULTIPLE` or `ABSOULTE`.
* @prop {string} [AdditionalCost] Any additional costs, such as ICANN fee for a domain registration, e.g., `0.18`.
* @prop {string} RegularPrice Indicates regular price, e.g., `39.00`.
* @prop {string} RegularPriceType Either `MULTIPLE` or `ABSOULTE`.
* @prop {string} [RegularAdditionalCost] Any additional costs, such as ICANN fee for a domain registration, e.g., `0.18`.
* @prop {string} [RegularAdditionalCostType] Either `MULTIPLE` or `ABSOULTE`.
* @prop {string} YourPrice The user’s price for the product, e.g., `20.88`.
* @prop {string} YourPriceType Either `MULTIPLE` or `ABSOULTE`.
* @prop {string} [YourAdditonalCost] Any additional costs, such as ICANN fee for a domain registration, e.g., `0.18`.
* @prop {string} [YourAdditonalCostType] Either `MULTIPLE` or `ABSOULTE`.
* @prop {string} PromotionPrice Price with coupon enabled.
* @prop {string} Currency Currency in which the price is listed, e.g., `USD`.
*/
/* typal types/api/users/pricing/ssl.xml noSuppress */
/**
* @typedef {_namecheap.SSLPurchase} SSLPurchase `@record` The pricing to purchase certificates.
*/
/**
* @typedef {Object} _namecheap.SSLPurchase `@record` The pricing to purchase certificates.
* @prop {_namecheap.Product} instantssl _InstantSSL_ https://www.namecheap.com/security/ssl-certificates/comodo/instantssl.aspx. 1-year purchase: `20.88 USD`
* @prop {_namecheap.Product} positivessl _PositiveSSL_ https://www.namecheap.com/security/ssl-certificates/comodo/positivessl.aspx. 1-year purchase: `8.88 USD`
* @prop {_namecheap.Product} positivesslWildcard _PositiveSSL Wildcard_ https://www.namecheap.com/security/ssl-certificates/comodo/positivessl-wildcard.aspx. 1-year purchase: `76.88 USD`
* @prop {_namecheap.Product} premiumssl _PremiumSSL_ https://www.namecheap.com/security/ssl-certificates/comodo/premiumssl.aspx. 1-year purchase: `79.00 USD`
* @prop {_namecheap.Product} quicksslPremium 1-year purchase: `56.88 USD`
* @prop {_namecheap.Product} rapidssl 1-year purchase: `10.95 USD`
* @prop {_namecheap.Product} rapidsslWildcard 1-year purchase: `148.88 USD`
* @prop {_namecheap.Product} secureSite 1-year purchase: `285.88 USD`
* @prop {_namecheap.Product} secureSitePro 1-year purchase: `675.88 USD`
* @prop {_namecheap.Product} secureSiteProWithEv 1-year purchase: `961.88 USD`
* @prop {_namecheap.Product} secureSiteWithEv 1-year purchase: `666.88 USD`
* @prop {_namecheap.Product} trueBusinessid 1-year purchase: `98.00 USD`
* @prop {_namecheap.Product} trueBusinessidWildcard 1-year purchase: `389.00 USD`
* @prop {_namecheap.Product} trueBusinessidWithEv 1-year purchase: `179.00 USD`
* @prop {_namecheap.Product} premiumsslWildcard _PremiumSSL Wildcard_ https://www.namecheap.com/security/ssl-certificates/comodo/premiumssl-wildcard.aspx. 1-year purchase: `169.00 USD`
* @prop {_namecheap.Product} essentialssl _EssentialSSL_ https://www.namecheap.com/security/ssl-certificates/comodo/essentialssl.aspx. 1-year purchase: `18.88 USD`
* @prop {_namecheap.Product} essentialsslWildcard _EssentialSSL Wildcard_ https://www.namecheap.com/security/ssl-certificates/comodo/essentialssl-wildcard.aspx. 1-year purchase: `74.88 USD`
* @prop {_namecheap.Product} evSsl _EV SSL_ https://www.namecheap.com/security/ssl-certificates/comodo/ev.aspx. 1-year purchase: `78.88 USD`
* @prop {_namecheap.Product} instantsslPro _InstantSSL Pro_ https://www.namecheap.com/security/ssl-certificates/comodo/instantssl-pro.aspx. 1-year purchase: `38.88 USD`
* @prop {_namecheap.Product} ssl123 1-year purchase: `39.00 USD`
* @prop {_namecheap.Product} sslWebServer 1-year purchase: `88.88 USD`
* @prop {_namecheap.Product} sslWebserverEv 1-year purchase: `163.88 USD`
* @prop {_namecheap.Product} comodossl 1-year purchase: `35.00 USD`
* @prop {_namecheap.Product} comodosslWildcard 1-year purchase: `170.00 USD`
* @prop {_namecheap.Product} comodosslMultiDomainSsl _Multi-Domain SSL_ https://www.namecheap.com/security/ssl-certificates/comodo/multi-domain-ssl.aspx. 1-year purchase: `89.88 USD`
* @prop {_namecheap.Product} comodosslMultiDomainSslMoresans 1-year purchase: `0.00 USD`
* @prop {_namecheap.Product} comodosslEvMultiDomainSsl _EV Multi-Domain SSL_ https://www.namecheap.com/security/ssl-certificates/comodo/ev-multi-domain-ssl.aspx. 1-year purchase: `168.88 USD`
* @prop {_namecheap.Product} comodosslEvMultiDomainSslMoresans 1-year purchase: `0.00 USD`
* @prop {_namecheap.Product} positivesslMultiDomain _PositiveSSL Multi-Domain_ https://www.namecheap.com/security/ssl-certificates/comodo/positivessl-multi-domain.aspx. 1-year purchase: `29.88 USD`
* @prop {_namecheap.Product} positivesslMultiDomainMoresans 1-year purchase: `0.00 USD`
* @prop {_namecheap.Product} trueBusinessidMultiDomain 1-year purchase: `179.88 USD`
* @prop {_namecheap.Product} trueBusinessidMultiDomainMoresans 1-year purchase: `0.00 USD`
* @prop {_namecheap.Product} trueBusinessidWithEvMultiDomain 1-year purchase: `237.88 USD`
* @prop {_namecheap.Product} trueBusinessidWithEvMultiDomainMoresans 1-year purchase: `0.00 USD`
* @prop {_namecheap.Product} unifiedCommunications _Unified Communications_ https://www.namecheap.com/security/ssl-certificates/comodo/unified-communications.aspx. 1-year purchase: `89.88 USD`
* @prop {_namecheap.Product} unifiedCommunicationsMoresans 1-year purchase: `0.00 USD`
* @prop {_namecheap.Product} secureSiteMoresans 1-year purchase: `0.00 USD`
* @prop {_namecheap.Product} quicksslPremiumMoresans 1-year purchase: `0.00 USD`
* @prop {_namecheap.Product} secureSiteProMoresans 1-year purchase: `0.00 USD`
* @prop {_namecheap.Product} secureSiteProWithEvMoresans 1-year purchase: `0.00 USD`
* @prop {_namecheap.Product} secureSiteWithEvMoresans 1-year purchase: `0.00 USD`
* @prop {_namecheap.Product} sgcSuperCertsMoresans 1-year purchase: `0.00 USD`
* @prop {_namecheap.Product} sslWebServerMoresans 1-year purchase: `0.00 USD`
* @prop {_namecheap.Product} sslWebserverEvMoresans 1-year purchase: `0.00 USD`
*/
/**
* @typedef {_namecheap.SSLRenew} SSLRenew `@record` The pricing to renew certificates.
*/
/**
* @typedef {Object} _namecheap.SSLRenew `@record` The pricing to renew certificates.
* @prop {_namecheap.Product} instantssl _InstantSSL_ https://www.namecheap.com/security/ssl-certificates/comodo/instantssl.aspx. 1-year renewal: `31.98 USD`
* @prop {_namecheap.Product} positivessl _PositiveSSL_ https://www.namecheap.com/security/ssl-certificates/comodo/positivessl.aspx. 1-year renewal: `7.28 USD`
* @prop {_namecheap.Product} positivesslWildcard _PositiveSSL Wildcard_ https://www.namecheap.com/security/ssl-certificates/comodo/positivessl-wildcard.aspx. 1-year renewal: `77.08 USD`
* @prop {_namecheap.Product} premiumssl _PremiumSSL_ https://www.namecheap.com/security/ssl-certificates/comodo/premiumssl.aspx. 1-year renewal: `64.78 USD`
* @prop {_namecheap.Product} quicksslPremium 1-year renewal: `46.64 USD`
* @prop {_namecheap.Product} rapidssl 1-year renewal: `8.98 USD`
* @prop {_namecheap.Product} rapidsslWildcard 1-year renewal: `122.08 USD`
* @prop {_namecheap.Product} secureSite 1-year renewal: `234.42 USD`
* @prop {_namecheap.Product} secureSitePro 1-year renewal: `554.22 USD`
* @prop {_namecheap.Product} secureSiteProWithEv 1-year renewal: `788.74 USD`
* @prop {_namecheap.Product} secureSiteWithEv 1-year renewal: `546.84 USD`
* @prop {_namecheap.Product} trueBusinessid 1-year renewal: `80.36 USD`
* @prop {_namecheap.Product} trueBusinessidWildcard 1-year renewal: `318.98 USD`
* @prop {_namecheap.Product} trueBusinessidWithEv 1-year renewal: `146.78 USD`
* @prop {_namecheap.Product} ssl123 1-year renewal: `31.98 USD`
* @prop {_namecheap.Product} sslWebServer 1-year renewal: `72.88 USD`
* @prop {_namecheap.Product} sslWebserverEv 1-year renewal: `134.38 USD`
* @prop {_namecheap.Product} essentialssl _EssentialSSL_ https://www.namecheap.com/security/ssl-certificates/comodo/essentialssl.aspx. 1-year renewal: `18.88 USD`
* @prop {_namecheap.Product} essentialsslWildcard _EssentialSSL Wildcard_ https://www.namecheap.com/security/ssl-certificates/comodo/essentialssl-wildcard.aspx. 1-year renewal: `74.88 USD`
* @prop {_namecheap.Product} evSsl _EV SSL_ https://www.namecheap.com/security/ssl-certificates/comodo/ev.aspx. 1-year renewal: `118.90 USD`
* @prop {_namecheap.Product} instantsslPro _InstantSSL Pro_ https://www.namecheap.com/security/ssl-certificates/comodo/instantssl-pro.aspx. 1-year renewal: `48.38 USD`
* @prop {_namecheap.Product} premiumsslWildcard _PremiumSSL Wildcard_ https://www.namecheap.com/security/ssl-certificates/comodo/premiumssl-wildcard.aspx. 1-year renewal: `138.58 USD`
* @prop {_namecheap.Product} comodossl 1-year renewal: `28.70 USD`
* @prop {_namecheap.Product} comodosslMultiDomainSsl _Multi-Domain SSL_ https://www.namecheap.com/security/ssl-certificates/comodo/multi-domain-ssl.aspx. 1-year renewal: `73.70 USD`
* @prop {_namecheap.Product} comodosslEvMultiDomainSsl _EV Multi-Domain SSL_ https://www.namecheap.com/security/ssl-certificates/comodo/ev-multi-domain-ssl.aspx. 1-year renewal: `203.26 USD`
* @prop {_namecheap.Product} positivesslMultiDomain _PositiveSSL Multi-Domain_ https://www.namecheap.com/security/ssl-certificates/comodo/positivessl-multi-domain.aspx. 1-year renewal: `24.50 USD`
* @prop {_namecheap.Product} trueBusinessidMultiDomain 1-year renewal: `147.50 USD`
* @prop {_namecheap.Product} trueBusinessidWithEvMultiDomain 1-year renewal: `195.06 USD`
* @prop {_namecheap.Product} unifiedCommunications _Unified Communications_ https://www.namecheap.com/security/ssl-certificates/comodo/unified-communications.aspx. 1-year renewal: `73.70 USD`
*/
/* typal types/api/users/pricing/whois.xml noSuppress */
/**
* @typedef {_namecheap.WhoisPurchase} WhoisPurchase `@record` The pricing to purchase WHOIS guards.
*/
/**
* @typedef {Object} _namecheap.WhoisPurchase `@record` The pricing to purchase WHOIS guards.
* @prop {_namecheap.Product} whoisguard5Pack 1-year purchase: `7.88 USD`
* @prop {_namecheap.Product} whoisguardDualPack 1-year purchase: `4.88 USD`
* @prop {_namecheap.Product} whoisguardProtectOne 1-year purchase: `0.00 USD`
*/
/**
* @typedef {_namecheap.WhoisRenew} WhoisRenew `@record` The pricing to renew WHOIS guards.
*/
/**
* @typedef {Object} _namecheap.WhoisRenew `@record` The pricing to renew WHOIS guards.
* @prop {_namecheap.Product} whoisguardProtectOne 1-year renewal: `0.00 USD`
*/
|
<filename>v1x1/orm/accountEx.go
package orm
import (
"errors"
)
func AddFav( id, pid int64 ) (*AccountEx, error) {
a := &AccountEx{}
has, e := engine_account.Table("account_ex").Where("account_id = ?",id).Get(a)
if e != nil {
return nil, e
} else if !has {
return nil, errors.New("account not found")
}
if a.FavPosts == nil {
a.FavPosts = []int64{}
}
a.FavPosts = append(a.FavPosts, pid)
if _, e = engine_account.Table("account_ex").Where("account_id = ?",id).AllCols().Update( a ); e != nil {
return nil, e
}
return a, nil
}
func RemoveFav( id, pid int64 ) (*AccountEx, error) {
a := &AccountEx{}
has, e := engine_account.Table("account_ex").Where("account_id = ?",id).Get(a)
if e != nil {
return nil, e
} else if !has {
return nil, errors.New("account not found")
}
// 从列表中移除
for i, f := range a.FavPosts {
if f == pid {
a.FavPosts = append( a.FavPosts[:i], a.FavPosts[i+1:]... )
break
}
}
if _, e = engine_account.Table("account_ex").Where("account_id = ?",id).AllCols().Update( a ); e != nil {
return nil, e
}
return a, nil
}
func IsPostFav( id, pid int64 ) ( isFav bool, e error ) {
a := &AccountEx{}
has, e := engine_account.Table("account_ex").Where("account_id = ?",id).Get(a)
if e != nil {
return false, e
} else if !has {
return false, errors.New("account not found")
}
// 从列表中移除
for _, f := range a.FavPosts {
if f == pid {
return true, nil
}
}
return false, nil
}
func UpdateFav( id int64, mdFavList []int64 ) (*AccountEx, error) {
a := &AccountEx{}
has, e := engine_account.Table("account_ex").Where("account_id = ?",id).Get(a)
if e != nil {
return nil, e
} else if !has {
return nil, errors.New("account not found")
}
a.FavPosts = mdFavList
if _, e = engine_account.Table("account_ex").Where("account_id = ?",id).AllCols().Update( a ); e != nil {
return nil, e
}
return a, nil
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sshd.common.keyprovider;
import java.util.ArrayList;
import java.util.List;
import org.apache.sshd.util.test.JUnit4ClassRunnerWithParametersFactory;
import org.apache.sshd.util.test.JUnitTestSupport;
import org.apache.sshd.util.test.NoIoTestCase;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.MethodSorters;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.junit.runners.Parameterized.UseParametersRunnerFactory;
import org.mockito.Mockito;
/**
* TODO Add javadoc
*
* @author <a href="mailto:<EMAIL>">Apache MINA SSHD Project</a>
*/
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
@RunWith(Parameterized.class) // see https://github.com/junit-team/junit/wiki/Parameterized-tests
@UseParametersRunnerFactory(JUnit4ClassRunnerWithParametersFactory.class)
@Category({ NoIoTestCase.class })
public class KeyIdentityProviderResolutionTest extends JUnitTestSupport {
private final KeyIdentityProvider p1;
private final KeyIdentityProvider p2;
private final KeyIdentityProvider expected;
public KeyIdentityProviderResolutionTest(
KeyIdentityProvider p1, KeyIdentityProvider p2, KeyIdentityProvider expected) {
this.p1 = p1;
this.p2 = p2;
this.expected = expected;
}
@Parameters(name = "p1={0}, p2={1}, expected={2}")
public static List<Object[]> parameters() {
return new ArrayList<Object[]>() {
// Not serializing it
private static final long serialVersionUID = 1L;
{
add(new Object[] { null, null, null });
add(new Object[] { null, KeyIdentityProvider.EMPTY_KEYS_PROVIDER, KeyIdentityProvider.EMPTY_KEYS_PROVIDER });
add(new Object[] { KeyIdentityProvider.EMPTY_KEYS_PROVIDER, null, KeyIdentityProvider.EMPTY_KEYS_PROVIDER });
add(new Object[] {
KeyIdentityProvider.EMPTY_KEYS_PROVIDER, KeyIdentityProvider.EMPTY_KEYS_PROVIDER,
KeyIdentityProvider.EMPTY_KEYS_PROVIDER });
KeyIdentityProvider p = createKeyIdentityProvider("MOCK");
add(new Object[] { null, p, p });
add(new Object[] { KeyIdentityProvider.EMPTY_KEYS_PROVIDER, p, p });
add(new Object[] { p, null, p });
add(new Object[] { p, KeyIdentityProvider.EMPTY_KEYS_PROVIDER, p });
}
private KeyIdentityProvider createKeyIdentityProvider(String name) {
KeyIdentityProvider p = Mockito.mock(KeyIdentityProvider.class);
Mockito.when(p.toString()).thenReturn(name);
return p;
}
};
}
@Test
public void testResolveKeyIdentityProvider() {
assertSame(expected, KeyIdentityProvider.resolveKeyIdentityProvider(p1, p2));
}
}
|
<filename>elasta-composer/src/main/java/jpatest/models/Ac.java
package jpatest.models;
import javax.persistence.*;
/**
* Created by Jango on 10/2/2016.
*/
@Entity
@Inheritance(strategy = InheritanceType.SINGLE_TABLE)
public class Ac extends Employee {
@OneToOne(cascade = CascadeType.ALL)
private Area area;
public Area getArea() {
return area;
}
public void setArea(Area area) {
this.area = area;
}
@Override
public String toString() {
return "Ac{" +
"area=" + area +
'}';
}
}
|
<reponame>liamdawson/DIM<filename>src/app/search/search-filters.ts<gh_stars>0
import _ from 'lodash';
import idx from 'idx';
import { compareBy, chainComparator, reverseComparator } from '../comparators';
import { DimItem, D1Item, D2Item } from '../inventory/item-types';
import { DimStore } from '../inventory/store-types';
import { Loadout, dimLoadoutService } from '../loadout/loadout.service';
import { DestinyAmmunitionType, DestinyCollectibleState } from 'bungie-api-ts/destiny2';
import { createSelector } from 'reselect';
import { destinyVersionSelector } from '../accounts/reducer';
import { D1Categories } from '../destiny1/d1-buckets.service';
import { D2Categories } from '../destiny2/d2-buckets.service';
import { querySelector } from '../shell/reducer';
import { sortedStoresSelector } from '../inventory/reducer';
import { maxLightLoadout } from '../loadout/auto-loadouts';
import { itemTags } from '../inventory/dim-item-info';
import { characterSortSelector } from '../settings/character-sort';
import store from '../store/store';
import { loadoutsSelector } from '../loadout/reducer';
import { InventoryCuratedRoll } from '../curated-rolls/curatedRollService';
import { inventoryCuratedRollsSelector } from '../curated-rolls/reducer';
import { D2SeasonInfo } from '../inventory/d2-season-info';
import { D2EventPredicateLookup } from 'data/d2/d2-event-info';
import memoizeOne from 'memoize-one';
import { getRating, ratingsSelector, ReviewsState, shouldShowRating } from '../item-review/reducer';
import { RootState } from '../store/reducers';
import Sources from 'data/d2/source-info';
/** Make a Regexp that searches starting at a word boundary */
const startWordRegexp = memoizeOne((predicate: string) =>
// Only some languages effectively use the \b regex word boundary
['de', 'en', 'es', 'es-mx', 'fr', 'it', 'pl', 'pt-br'].includes(
store.getState().settings.language
)
? new RegExp(`\\b${escapeRegExp(predicate)}`, 'i')
: new RegExp(escapeRegExp(predicate), 'i')
);
export const searchConfigSelector = createSelector(
destinyVersionSelector,
buildSearchConfig
);
/**
* A selector for the search config for a particular destiny version.
*/
export const searchFiltersConfigSelector = createSelector(
searchConfigSelector,
sortedStoresSelector,
loadoutsSelector,
inventoryCuratedRollsSelector,
ratingsSelector,
(state: RootState) => state.inventory.newItems,
characterSortSelector,
(searchConfig, stores, loadouts, curations, ratings, newItems) => {
return searchFilters(searchConfig, stores, loadouts, curations, ratings, newItems);
}
);
/**
* A selector for a predicate function for searching items, given the current search query.
*/
// TODO: this also needs to depend on:
// * settings
// * loadouts
// * current character
// * all items (for dupes)
// * itemInfo
// * ratings
// * newItems
// * and maybe some other stuff?
export const searchFilterSelector = createSelector(
querySelector,
searchFiltersConfigSelector,
(query, filters) => filters.filterFunction(query)
);
export interface SearchConfig {
destinyVersion: 1 | 2;
keywords: string[];
categoryHashFilters: { [key: string]: number };
keywordToFilter: { [key: string]: string };
}
/**
* Builds an object that describes the available search keywords and category mappings.
*/
export function buildSearchConfig(destinyVersion: 1 | 2): SearchConfig {
const categories = destinyVersion === 1 ? D1Categories : D2Categories;
const itemTypes = Object.values(categories).flatMap((l: string[]) =>
l.map((v) => v.toLowerCase())
);
// Add new ItemCategoryHash hashes to this (or down below in the D2 area) to add new category searches
let categoryHashFilters: { [key: string]: number } = {
autorifle: 5,
handcannon: 6,
pulserifle: 7,
scoutrifle: 8,
fusionrifle: 9,
sniperrifle: 10,
shotgun: 11,
machinegun: 12,
rocketlauncher: 13,
sidearm: 14,
sword: 54
};
const stats = [
'charge',
'impact',
'range',
'stability',
'reload',
'magazine',
'aimassist',
'equipspeed',
'handling',
'blastradius',
'recoildirection',
'velocity',
'zoom'
];
const source = Sources.SourceList;
if (destinyVersion === 1) {
stats.push('rof');
} else {
categoryHashFilters = {
...categoryHashFilters,
grenadelauncher: 153950757,
tracerifle: 2489664120,
linearfusionrifle: 1504945536,
submachine: 3954685534,
bow: 3317538576,
transmat: 208981632,
weaponmod: 610365472,
armormod: 4104513227,
reptoken: 2088636411
};
stats.push('rpm', 'mobility', 'recovery', 'resilience', 'drawtime', 'inventorysize');
}
/**
* Filter translation sets. Left-hand is the filter to run from filterFns, right side are possible filterResult
* values that will set the left-hand to the "match."
*/
const filterTrans: {
[key: string]: string[];
} = {
dmg: ['arc', 'solar', 'void', 'kinetic', 'heroic'],
type: itemTypes,
tier: [
'common',
'uncommon',
'rare',
'legendary',
'exotic',
'white',
'green',
'blue',
'purple',
'yellow'
],
classType: ['titan', 'hunter', 'warlock'],
dupe: ['dupe', 'duplicate'],
dupelower: ['dupelower'],
locked: ['locked'],
unlocked: ['unlocked'],
stackable: ['stackable'],
weapon: ['weapon'],
armor: ['armor'],
categoryHash: Object.keys(categoryHashFilters),
inloadout: ['inloadout'],
maxpower: ['maxpower'],
new: ['new'],
tag: ['tagged'],
level: ['level'],
equipment: ['equipment', 'equippable'],
postmaster: ['postmaster', 'inpostmaster'],
equipped: ['equipped'],
transferable: ['transferable', 'movable'],
infusable: ['infusable', 'infuse'],
owner: ['invault', 'incurrentchar'],
location: ['inleftchar', 'inmiddlechar', 'inrightchar'],
cosmetic: ['cosmetic']
};
if (destinyVersion === 1) {
Object.assign(filterTrans, {
hasLight: ['light', 'haslight'],
tracked: ['tracked'],
untracked: ['untracked'],
sublime: ['sublime'],
incomplete: ['incomplete'],
complete: ['complete'],
xpcomplete: ['xpcomplete'],
xpincomplete: ['xpincomplete', 'needsxp'],
upgraded: ['upgraded'],
unascended: ['unascended', 'unassended', 'unasscended'],
ascended: ['ascended', 'assended', 'asscended'],
reforgeable: ['reforgeable', 'reforge', 'rerollable', 'reroll'],
ornament: ['ornamentable', 'ornamentmissing', 'ornamentunlocked'],
engram: ['engram'],
stattype: ['intellect', 'discipline', 'strength'],
glimmer: ['glimmeritem', 'glimmerboost', 'glimmersupply'],
vendor: [
'fwc',
'do',
'nm',
'speaker',
'variks',
'shipwright',
'vanguard',
'osiris',
'xur',
'shaxx',
'cq',
'eris',
'ev',
'gunsmith'
],
activity: [
'vanilla',
'trials',
'ib',
'qw',
'cd',
'srl',
'vog',
'ce',
'ttk',
'kf',
'roi',
'wotm',
'poe',
'coe',
'af',
'dawning',
'aot'
]
});
} else {
Object.assign(filterTrans, {
reacquirable: ['reacquirable'],
hasLight: ['light', 'haslight', 'haspower'],
complete: ['goldborder', 'yellowborder', 'complete'],
curated: ['curated'],
wishlist: ['wishlist'],
wishlistdupe: ['wishlistdupe'],
masterwork: ['masterwork', 'masterworks'],
hasShader: ['shaded', 'hasshader'],
hasMod: ['modded', 'hasmod'],
ikelos: ['ikelos'],
randomroll: ['randomroll'],
ammoType: ['special', 'primary', 'heavy'],
event: ['dawning', 'crimsondays', 'solstice', 'fotl', 'revelry']
});
}
if ($featureFlags.reviewsEnabled) {
filterTrans.hasRating = ['rated', 'hasrating'];
}
const keywords: string[] = Object.values(filterTrans)
.flat()
.flatMap((word) => [`is:${word}`, `not:${word}`]);
itemTags.forEach((tag) => {
if (tag.type) {
keywords.push(`tag:${tag.type}`);
} else {
keywords.push('tag:none');
}
});
// Filters that operate on ranges (>, <, >=, <=)
const comparisons = [':<', ':>', ':<=', ':>=', ':='];
stats.forEach((word) => {
const filter = `stat:${word}`;
comparisons.forEach((comparison) => {
keywords.push(filter + comparison);
});
});
source.forEach((word) => {
const filter = `source:${word}`;
keywords.push(filter);
});
const ranges = ['light', 'power', 'level', 'stack', 'count', 'year'];
if (destinyVersion === 1) {
ranges.push('quality', 'percentage');
}
if (destinyVersion === 2) {
ranges.push('masterwork');
ranges.push('season');
keywords.push('source:');
}
if ($featureFlags.reviewsEnabled) {
ranges.push('rating');
ranges.push('ratingcount');
}
ranges.forEach((range) => {
comparisons.forEach((comparison) => {
keywords.push(range + comparison);
});
});
// free form notes on items
keywords.push('notes:');
keywords.push('perk:');
keywords.push('perkname:');
keywords.push('name:');
keywords.push('description:');
// Build an inverse mapping of keyword to function name
const keywordToFilter: { [key: string]: string } = {};
_.forIn(filterTrans, (keywords, functionName) => {
for (const keyword of keywords) {
keywordToFilter[keyword] = functionName;
}
});
return {
keywordToFilter,
keywords: [...new Set(keywords)],
destinyVersion,
categoryHashFilters
};
}
// The comparator for sorting dupes - the first item will be the "best" and all others are "dupelower".
const dupeComparator = reverseComparator(
chainComparator(
// primary stat
compareBy((item: DimItem) => item.primStat && item.primStat.value),
compareBy((item: DimItem) => item.masterwork),
compareBy((item: DimItem) => item.locked),
compareBy(
(item: DimItem) =>
item.dimInfo && item.dimInfo.tag && ['favorite', 'keep'].includes(item.dimInfo.tag)
),
compareBy((i: DimItem) => i.id) // tiebreak by ID
)
);
export interface SearchFilters {
filters: {
[predicate: string]: (
item: DimItem,
predicate?: string
) => boolean | '' | null | undefined | false | number;
};
filterFunction(query: string): (item: DimItem) => boolean;
}
const alwaysTrue = () => true;
/**
* This builds an object that can be used to generate filter functions from search queried.
*
*/
function searchFilters(
searchConfig: SearchConfig,
stores: DimStore[],
loadouts: Loadout[],
inventoryCuratedRolls: { [key: string]: InventoryCuratedRoll },
ratings: ReviewsState['ratings'],
newItems: Set<string>
): SearchFilters {
let _duplicates: { [hash: number]: DimItem[] } | null = null; // Holds a map from item hash to count of occurrances of that hash
const _maxPowerItems: string[] = [];
const _lowerDupes = {};
let _loadoutItemIds: Set<string> | undefined;
const getLoadouts = _.once(() => dimLoadoutService.getLoadouts());
function initDupes() {
if (_duplicates === null) {
_duplicates = {};
for (const store of stores) {
for (const i of store.items) {
if (!_duplicates[i.hash]) {
_duplicates[i.hash] = [];
}
_duplicates[i.hash].push(i);
}
}
_.forIn(_duplicates, (dupes: DimItem[]) => {
if (dupes.length > 1) {
dupes.sort(dupeComparator);
const bestDupe = dupes[0];
for (const dupe of dupes) {
if (
dupe.bucket &&
(dupe.bucket.sort === 'Weapons' || dupe.bucket.sort === 'Armor') &&
!dupe.notransfer
) {
_lowerDupes[dupe.id] = dupe !== bestDupe;
}
}
}
});
}
}
const curatedPlugsWhitelist = [
7906839, // frames
683359327, // guards
1041766312, // blades
1202604782, // tubes
1257608559, // arrows
1757026848, // batteries
1806783418, // magazines
2619833294, // scopes
2718120384, // magazines_gl
2833605196, // barrels
3809303875 // bowstring
];
const statHashes = new Set([
1480404414, // D2 Attack
3897883278, // D1 & D2 Defense
368428387 // D1 Attack
]);
const cosmeticTypes = new Set([
'Shader',
'Shaders',
'Ornaments',
'Modifications',
'Emote',
'Emotes',
'Emblem',
'Emblems',
'Vehicle',
'Horn',
'Ship',
'Ships',
'ClanBanners'
]);
const D2Sources = Sources.Sources;
const ikelosHash = new Set([847450546, 1723472487, 1887808042, 3866356643, 4036115577]);
// This refactored method filters items by stats
// * statType = [aa|impact|range|stability|rof|reload|magazine|equipspeed|mobility|resilience|recovery]
const filterByStats = (statType) => {
const statHash = {
rpm: 4284893193,
rof: 4284893193,
charge: 2961396640,
impact: 4043523819,
range: 1240592695,
stability: 155624089,
reload: 4188031367,
magazine: 3871231066,
aimassist: 1345609583,
equipspeed: 943549884,
mobility: 2996146975,
resilience: 392767087,
recovery: 1943323491,
velocity: 2523465841,
blastradius: 3614673599,
recoildirection: 2715839340,
drawtime: 447667954,
zoom: 3555269338,
inventorysize: 1931675084
}[statType];
return (item: DimItem, predicate: string) => {
const foundStatHash = item.stats && item.stats.find((s) => s.statHash === statHash);
return foundStatHash && compareByOperand(foundStatHash.value, predicate);
};
};
function compareByOperand(compare = 0, predicate: string) {
if (predicate.length === 0) {
return false;
}
const operands = ['<=', '>=', '=', '>', '<'];
let operand = 'none';
operands.forEach((element) => {
if (predicate.substring(0, element.length) === element) {
operand = element;
predicate = predicate.substring(element.length);
return false;
} else {
return true;
}
});
const predicateValue = parseFloat(predicate);
switch (operand) {
case 'none':
case '=':
return compare === predicateValue;
case '<':
return compare < predicateValue;
case '<=':
return compare <= predicateValue;
case '>':
return compare > predicateValue;
case '>=':
return compare >= predicateValue;
}
return false;
}
// reset, filterFunction, and filters
return {
/**
* Build a complex predicate function from a full query string.
*/
filterFunction(query: string): (item: DimItem) => boolean {
if (!query.length) {
return alwaysTrue;
}
query = query.trim().toLowerCase();
// http://blog.tatedavies.com/2012/08/28/replace-microsoft-chars-in-javascript/
query = query.replace(/[\u2018|\u2019|\u201A]/g, "'");
query = query.replace(/[\u201C|\u201D|\u201E]/g, '"');
// could probably tidy this regex, just a quick hack to support multi term:
// [^\s]*?"[^"]+?" -> match is:"stuff here"
// [^\s]*?'[^']+?' -> match is:'stuff here'
// [^\s"']+' -> match is:stuff
const searchTerms = query.match(/[^\s]*?"[^"]+?"|[^\s]*?'[^']+?'|[^\s"']+/g) || [];
interface Filter {
invert: boolean;
value: string;
predicate: string;
orFilters?: Filter[];
}
const filters: Filter[] = [];
// The entire implementation of "or" is a dirty hack - we should really
// build an expression tree instead. But here, we flip a flag when we see
// an "or" token, and then on the next filter we instead combine the filter
// with the previous one in a hacked-up "or" node that we'll handle specially.
let or = false;
function addPredicate(predicate: string, filter: string, invert: boolean) {
const filterDef: Filter = { predicate, value: filter, invert };
if (or && filters.length) {
const lastFilter = filters.pop();
filters.push({
predicate: 'or',
invert: false,
value: '',
orFilters: [...(lastFilter!.orFilters! || [lastFilter]), filterDef]
});
} else {
filters.push(filterDef);
}
or = false;
}
for (const search of searchTerms) {
const invert = search.startsWith('-');
const term = search.replace(/^-/, '');
if (term === 'or') {
or = true;
} else if (term.startsWith('is:')) {
const filter = term.replace('is:', '');
const predicate = searchConfig.keywordToFilter[filter];
if (predicate) {
addPredicate(predicate, filter, invert);
}
} else if (term.startsWith('not:')) {
const filter = term.replace('not:', '');
const predicate = searchConfig.keywordToFilter[filter];
if (predicate) {
addPredicate(predicate, filter, !invert);
}
} else if (term.startsWith('tag:')) {
const filter = term.replace('tag:', '');
addPredicate('itemtags', filter, invert);
} else if (term.startsWith('notes:')) {
const filter = term.replace('notes:', '').replace(/(^['"]|['"]$)/g, '');
addPredicate('notes', filter, invert);
} else if (term.startsWith('perk:')) {
const filter = term.replace('perk:', '').replace(/(^['"]|['"]$)/g, '');
addPredicate('perk', filter, invert);
} else if (term.startsWith('perkname:')) {
const filter = term.replace('perkname:', '').replace(/(^['"]|['"]$)/g, '');
addPredicate('perkname', filter, invert);
} else if (term.startsWith('name:')) {
const filter = term.replace('name:', '').replace(/(^['"]|['"]$)/g, '');
addPredicate('name', filter, invert);
} else if (term.startsWith('description:')) {
const filter = term.replace('description:', '').replace(/(^['"]|['"]$)/g, '');
addPredicate('description', filter, invert);
} else if (term.startsWith('light:') || term.startsWith('power:')) {
const filter = term.replace('light:', '').replace('power:', '');
addPredicate('light', filter, invert);
} else if (term.startsWith('masterwork:')) {
const filter = term.replace('masterwork:', '');
addPredicate('masterworkValue', filter, invert);
} else if (term.startsWith('season:')) {
const filter = term.replace('season:', '');
addPredicate('seasonValue', filter, invert);
} else if (term.startsWith('year:')) {
const filter = term.replace('year:', '');
addPredicate('yearValue', filter, invert);
} else if (term.startsWith('stack:')) {
const filter = term.replace('stack:', '');
addPredicate('stack', filter, invert);
} else if (term.startsWith('count:')) {
const filter = term.replace('count:', '');
addPredicate('count', filter, invert);
} else if (term.startsWith('level:')) {
const filter = term.replace('level:', '');
addPredicate('level', filter, invert);
} else if (term.startsWith('quality:') || term.startsWith('percentage:')) {
const filter = term.replace('quality:', '').replace('percentage:', '');
addPredicate('quality', filter, invert);
} else if (term.startsWith('rating:')) {
const filter = term.replace('rating:', '');
addPredicate('rating', filter, invert);
} else if (term.startsWith('ratingcount:')) {
const filter = term.replace('ratingcount:', '');
addPredicate('ratingcount', filter, invert);
} else if (term.startsWith('id:')) {
const filter = term.replace('id:', '');
addPredicate('id', filter, invert);
} else if (term.startsWith('hash:')) {
const filter = term.replace('hash:', '');
addPredicate('hash', filter, invert);
} else if (term.startsWith('stat:')) {
// Avoid console.error by checking if all parameters are typed
const pieces = term.split(':');
if (pieces.length === 3) {
const filter = pieces[1];
addPredicate(filter, pieces[2], invert);
}
} else if (term.startsWith('source:')) {
const filter = term.replace('source:', '');
addPredicate('source', filter, invert);
} else if (!/^\s*$/.test(term)) {
addPredicate('keyword', term.replace(/(^['"]|['"]$)/g, ''), invert);
}
}
return (item: DimItem) => {
return filters.every((filter) => {
let result;
if (filter.orFilters) {
result = filter.orFilters.some((filter) => {
const result =
this.filters[filter.predicate] &&
this.filters[filter.predicate](item, filter.value);
return filter.invert ? !result : result;
});
} else {
result =
this.filters[filter.predicate] && this.filters[filter.predicate](item, filter.value);
}
return filter.invert ? !result : result;
});
};
},
/**
* Each entry in this map is a filter function that will be provided the normalized
* query term and an item, and should return whether or not it matches the filter.
* @param predicate The predicate - for example, is:arc gets the 'elemental' filter function, with predicate='arc'
* @param item The item to test against.
* @return Returns true for a match, false for a non-match
*/
filters: {
id(item: DimItem, predicate: string) {
return item.id === predicate;
},
hash(item: DimItem, predicate: string) {
return item.hash.toString() === predicate;
},
dmg(item: DimItem, predicate: string) {
return item.dmg === predicate;
},
type(item: DimItem, predicate: string) {
return item.type && item.type.toLowerCase() === predicate;
},
tier(item: DimItem, predicate: string) {
const tierMap = {
white: 'common',
green: 'uncommon',
blue: 'rare',
purple: 'legendary',
yellow: 'exotic'
};
return item.tier.toLowerCase() === (tierMap[predicate] || predicate);
},
sublime(item: DimItem) {
const sublimeEngrams = [
1986458096, // -gauntlet
2218811091,
2672986950, // -body-armor
779347563,
3497374572, // -class-item
808079385,
3592189221, // -leg-armor
738642122,
3797169075, // -helmet
838904328
];
return sublimeEngrams.includes(item.hash);
},
// Incomplete will show items that are not fully leveled.
incomplete(item: DimItem) {
return item.talentGrid && !item.complete;
},
// Complete shows items that are fully leveled.
complete(item: DimItem) {
return item.complete;
},
// Upgraded will show items that have enough XP to unlock all
// their nodes and only need the nodes to be purchased.
upgraded(item: D1Item) {
return item.talentGrid && item.talentGrid.xpComplete && !item.complete;
},
xpincomplete(item: D1Item) {
return item.talentGrid && !item.talentGrid.xpComplete;
},
xpcomplete(item: D1Item) {
return item.talentGrid && item.talentGrid.xpComplete;
},
ascended(item: D1Item) {
return item.talentGrid && item.talentGrid.hasAscendNode && item.talentGrid.ascended;
},
unascended(item: D1Item) {
return item.talentGrid && item.talentGrid.hasAscendNode && !item.talentGrid.ascended;
},
reforgeable(item: DimItem) {
return item.talentGrid && item.talentGrid.nodes.some((n) => n.hash === 617082448);
},
ornament(item: D1Item, predicate: string) {
const complete = item.talentGrid && item.talentGrid.nodes.some((n) => n.ornament);
const missing = item.talentGrid && item.talentGrid.nodes.some((n) => !n.ornament);
if (predicate === 'ornamentunlocked') {
return complete;
} else if (predicate === 'ornamentmissing') {
return missing;
} else {
return complete || missing;
}
},
untracked(item: D1Item) {
return item.trackable && !item.tracked;
},
tracked(item: D1Item) {
return item.trackable && item.tracked;
},
unlocked(item: DimItem) {
return !item.locked;
},
locked(item: DimItem) {
return item.locked;
},
masterwork(item: DimItem) {
return item.masterwork;
},
maxpower(item: DimItem) {
if (!_maxPowerItems.length) {
stores.forEach((store) => {
_maxPowerItems.push(
..._.flatten(
Object.values(maxLightLoadout(store.getStoresService(), store).items)
).map((i) => i.id)
);
});
}
return _maxPowerItems.includes(item.id);
},
dupelower(item: DimItem) {
initDupes();
return _lowerDupes[item.id];
},
reacquirable(item: DimItem) {
if (
item.isDestiny2() &&
item.collectibleState !== null &&
!(item.collectibleState & DestinyCollectibleState.NotAcquired) &&
!(item.collectibleState & DestinyCollectibleState.PurchaseDisabled)
) {
return true;
}
return false;
},
dupe(item: DimItem) {
initDupes();
// We filter out the "Default Shader" because everybody has one per character
return (
_duplicates &&
item.hash !== 4248210736 &&
_duplicates[item.hash] &&
_duplicates[item.hash].length > 1
);
},
count(item: DimItem, predicate: string) {
initDupes();
return (
_duplicates &&
compareByOperand(_duplicates[item.hash] ? _duplicates[item.hash].length : 0, predicate)
);
},
owner(item: DimItem, predicate: string) {
let desiredStore = '';
switch (predicate) {
case 'invault':
desiredStore = 'vault';
break;
case 'incurrentchar': {
const activeStore = stores[0].getStoresService().getActiveStore();
if (activeStore) {
desiredStore = activeStore.id;
} else {
return false;
}
}
}
return item.owner === desiredStore;
},
location(item: DimItem, predicate: string) {
let storeIndex = 0;
switch (predicate) {
case 'inleftchar':
storeIndex = 0;
break;
case 'inmiddlechar':
if (stores.length === 4) {
storeIndex = 1;
}
break;
case 'inrightchar':
if (stores.length > 2) {
storeIndex = stores.length - 2;
}
break;
default:
return false;
}
return item.bucket.accountWide
? item.owner !== 'vault'
: item.owner === stores[storeIndex].id;
},
classType(item: DimItem, predicate: string) {
const classes = ['titan', 'hunter', 'warlock'];
if (item.classified) {
return false;
}
return item.classType === classes.indexOf(predicate);
},
glimmer(item: DimItem, predicate: string) {
const boosts = [
1043138475, // -black-wax-idol
1772853454, // -blue-polyphage
3783295803, // -ether-seeds
3446457162 // -resupply-codes
];
const supplies = [
269776572, // -house-banners
3632619276, // -silken-codex
2904517731, // -axiomatic-beads
1932910919 // -network-keys
];
switch (predicate) {
case 'glimmerboost':
return boosts.includes(item.hash);
case 'glimmersupply':
return supplies.includes(item.hash);
case 'glimmeritem':
return boosts.includes(item.hash) || supplies.includes(item.hash);
}
return false;
},
itemtags(item: DimItem, predicate: string) {
return (
item.dimInfo &&
(item.dimInfo.tag === predicate ||
(item.dimInfo.tag === undefined && predicate === 'none'))
);
},
notes(item: DimItem, predicate: string) {
return (
item.dimInfo &&
item.dimInfo.notes &&
item.dimInfo.notes.toLocaleLowerCase().includes(predicate.toLocaleLowerCase())
);
},
stattype(item: DimItem, predicate: string) {
return (
item.stats &&
item.stats.some((s) =>
Boolean(s.name.toLowerCase() === predicate && s.value && s.value > 0)
)
);
},
stackable(item: DimItem) {
return item.maxStackSize > 1;
},
stack(item: DimItem, predicate: string) {
return compareByOperand(item.amount, predicate);
},
engram(item: DimItem) {
return item.isEngram;
},
infusable(item: DimItem) {
return item.infusable;
},
categoryHash(item: D2Item, predicate: string) {
const categoryHash =
searchConfig.categoryHashFilters[predicate.toLowerCase().replace(/\s/g, '')];
if (!categoryHash) {
return false;
}
return item.itemCategoryHashes.includes(categoryHash);
},
keyword(item: DimItem, predicate: string) {
return (
item.name.toLowerCase().includes(predicate) ||
item.description.toLowerCase().includes(predicate) ||
// Search notes field
(item.dimInfo &&
item.dimInfo.notes &&
item.dimInfo.notes.toLocaleLowerCase().includes(predicate.toLocaleLowerCase())) ||
// Search for typeName (itemTypeDisplayName of modifications)
item.typeName.toLowerCase().includes(predicate) ||
// Search perks as well
this.perk(item, predicate)
);
},
// name and description searches to narrow search down from "keyword"
name(item: DimItem, predicate: string) {
return item.name.toLowerCase().includes(predicate);
},
description(item: DimItem, predicate: string) {
return item.description.toLowerCase().includes(predicate);
},
perk(item: DimItem, predicate: string) {
const regex = startWordRegexp(predicate);
return (
(item.talentGrid &&
item.talentGrid.nodes.some((node) => {
// Fixed #798 by searching on the description too.
return regex.test(node.name) || regex.test(node.description);
})) ||
(item.isDestiny2() &&
item.sockets &&
item.sockets.sockets.some((socket) =>
socket.plugOptions.some(
(plug) =>
regex.test(plug.plugItem.displayProperties.name) ||
regex.test(plug.plugItem.displayProperties.description) ||
plug.perks.some((perk) =>
Boolean(
(perk.displayProperties.name && regex.test(perk.displayProperties.name)) ||
(perk.displayProperties.description &&
regex.test(perk.displayProperties.description))
)
)
)
))
);
},
perkname(item: DimItem, predicate: string) {
const regex = startWordRegexp(predicate);
return (
(item.talentGrid &&
item.talentGrid.nodes.some((node) => {
return regex.test(node.name);
})) ||
(item.isDestiny2() &&
item.sockets &&
item.sockets.sockets.some((socket) =>
socket.plugOptions.some(
(plug) =>
regex.test(plug.plugItem.displayProperties.name) ||
plug.perks.some((perk) =>
Boolean(perk.displayProperties.name && regex.test(perk.displayProperties.name))
)
)
))
);
},
light(item: DimItem, predicate: string) {
if (!item.primStat) {
return false;
}
return compareByOperand(item.primStat.value, predicate);
},
masterworkValue(item: D2Item, predicate: string) {
if (!item.masterworkInfo) {
return false;
}
return compareByOperand(
item.masterworkInfo.statValue && item.masterworkInfo.statValue < 11
? item.masterworkInfo.statValue
: 10,
predicate
);
},
seasonValue(item: D2Item, predicate: string) {
return compareByOperand(item.season, predicate);
},
yearValue(item: DimItem, predicate: string) {
if (item.isDestiny1()) {
return compareByOperand(item.year, predicate);
} else if (item.isDestiny2()) {
return compareByOperand(D2SeasonInfo[item.season].year, predicate);
}
},
level(item: DimItem, predicate: string) {
return compareByOperand(item.equipRequiredLevel, predicate);
},
quality(item: D1Item, predicate: string) {
if (!item.quality) {
return false;
}
return compareByOperand(item.quality.min, predicate);
},
hasRating(item: DimItem, predicate: string) {
const dtrRating = getRating(item, ratings);
return predicate.length !== 0 && dtrRating && dtrRating.overallScore;
},
randomroll(item: D2Item) {
return item.sockets && item.sockets.sockets.some((s) => s.hasRandomizedPlugItems);
},
rating(item: DimItem, predicate: string) {
const dtrRating = getRating(item, ratings);
const showRating = dtrRating && shouldShowRating(dtrRating) && dtrRating.overallScore;
return showRating && compareByOperand(dtrRating && dtrRating.overallScore, predicate);
},
ratingcount(item: DimItem, predicate: string) {
const dtrRating = getRating(item, ratings);
return (
dtrRating && dtrRating.ratingCount && compareByOperand(dtrRating.ratingCount, predicate)
);
},
event(item: D2Item, predicate: string) {
if (!item || !D2EventPredicateLookup[predicate] || !item.event) {
return false;
}
return D2EventPredicateLookup[predicate] === item.event;
},
// filter on what vendor an item can come from. Currently supports
// * Future War Cult (fwc)
// * Dead Orbit (do)
// * New Monarchy (nm)
// * Speaker (speaker)
// * Variks (variks)
// * Shipwright (shipwright)
// * Osiris: (osiris)
// * Xur: (xur)
// * Shaxx: (shaxx)
// * Crucible Quartermaster (cq)
// * <NAME> (eris)
// * Eververse (ev)
vendor(item: D1Item, predicate: string) {
const vendorHashes = {
// identifier
required: {
fwc: [995344558], // SOURCE_VENDOR_FUTURE_WAR_CULT
do: [103311758], // SOURCE_VENDOR_DEAD_ORBIT
nm: [3072854931], // SOURCE_VENDOR_NEW_MONARCHY
speaker: [4241664776], // SOURCE_VENDOR_SPEAKER
variks: [512830513], // SOURCE_VENDOR_FALLEN
shipwright: [3721473564], // SOURCE_VENDOR_SHIPWRIGHT
vanguard: [1482793537], // SOURCE_VENDOR_VANGUARD
osiris: [3378481830], // SOURCE_VENDOR_OSIRIS
xur: [2179714245], // SOURCE_VENDOR_BLACK_MARKET
shaxx: [4134961255], // SOURCE_VENDOR_CRUCIBLE_HANDLER
cq: [1362425043], // SOURCE_VENDOR_CRUCIBLE_QUARTERMASTER
eris: [1374970038], // SOURCE_VENDOR_CROTAS_BANE
ev: [3559790162], // SOURCE_VENDOR_SPECIAL_ORDERS
gunsmith: [353834582] // SOURCE_VENDOR_GUNSMITH
},
restricted: {
fwc: [353834582], // remove motes of light & strange coins
do: [353834582],
nm: [353834582],
speaker: [353834582],
cq: [353834582, 2682516238] // remove ammo synths and planetary materials
}
};
if (!item) {
return false;
}
if (vendorHashes.restricted[predicate]) {
return (
vendorHashes.required[predicate].some((vendorHash) =>
item.sourceHashes.includes(vendorHash)
) &&
!vendorHashes.restricted[predicate].some((vendorHash) =>
item.sourceHashes.includes(vendorHash)
)
);
} else {
return vendorHashes.required[predicate].some((vendorHash) =>
item.sourceHashes.includes(vendorHash)
);
}
},
source(item: D2Item, predicate: string) {
if (!item || !item.source || !D2Sources[predicate]) {
return false;
}
return (
D2Sources[predicate].sourceHashes.includes(item.source) ||
D2Sources[predicate].itemHashes.includes(item.hash)
);
},
// filter on what activity an item can come from. Currently supports
// * Vanilla (vanilla)
// * Trials (trials)
// * Iron Banner (ib)
// * Queen's Wrath (qw)
// * Crimson Doubles (cd)
// * Sparrow Racing League (srl)
// * Vault of Glass (vog)
// * Crota's End (ce)
// * The Taken King (ttk)
// * King's Fall (kf)
// * Rise of Iron (roi)
// * Wrath of the Machine (wotm)
// * Prison of Elders (poe)
// * Challenge of Elders (coe)
// * Archon Forge (af)
activity(item: D1Item, predicate: string) {
const activityHashes = {
// identifier
required: {
trials: [2650556703], // SOURCE_TRIALS_OF_OSIRIS
ib: [1322283879], // SOURCE_IRON_BANNER
qw: [1983234046], // SOURCE_QUEENS_EMISSARY_QUEST
cd: [2775576620], // SOURCE_CRIMSON_DOUBLES
srl: [1234918199], // SOURCE_SRL
vog: [440710167], // SOURCE_VAULT_OF_GLASS
ce: [2585003248], // SOURCE_CROTAS_END
ttk: [2659839637], // SOURCE_TTK
kf: [1662673928], // SOURCE_KINGS_FALL
roi: [2964550958], // SOURCE_RISE_OF_IRON
wotm: [4160622434], // SOURCE_WRATH_OF_THE_MACHINE
poe: [2784812137], // SOURCE_PRISON_ELDERS
coe: [1537575125], // SOURCE_POE_ELDER_CHALLENGE
af: [3667653533], // SOURCE_ARCHON_FORGE
dawning: [3131490494], // SOURCE_DAWNING
aot: [3068521220, 4161861381, 440710167] // SOURCE_AGES_OF_TRIUMPH && SOURCE_RAID_REPRISE
},
restricted: {
trials: [2179714245, 2682516238, 560942287], // remove xur exotics and patrol items
ib: [3602080346], // remove engrams and random blue drops (Strike)
qw: [3602080346], // remove engrams and random blue drops (Strike)
cd: [3602080346], // remove engrams and random blue drops (Strike)
kf: [2179714245, 2682516238, 560942287], // remove xur exotics and patrol items
wotm: [2179714245, 2682516238, 560942287], // remove xur exotics and patrol items
poe: [3602080346, 2682516238], // remove engrams
coe: [3602080346, 2682516238], // remove engrams
af: [2682516238], // remove engrams
dawning: [2682516238, 1111209135], // remove engrams, planetary materials, & chroma
aot: [2964550958, 2659839637, 353834582, 560942287] // Remove ROI, TTK, motes, & glimmer items
}
};
if (!item) {
return false;
}
if (predicate === 'vanilla') {
return item.year === 1;
} else if (activityHashes.restricted[predicate]) {
return (
activityHashes.required[predicate].some((sourceHash) =>
item.sourceHashes.includes(sourceHash)
) &&
!activityHashes.restricted[predicate].some((sourceHash) =>
item.sourceHashes.includes(sourceHash)
)
);
} else {
return activityHashes.required[predicate].some((sourceHash) =>
item.sourceHashes.includes(sourceHash)
);
}
},
inloadout(item: DimItem) {
// Lazy load loadouts and re-trigger
if (!_loadoutItemIds) {
if (loadouts.length === 0) {
getLoadouts();
return false;
}
_loadoutItemIds = new Set<string>();
for (const loadout of loadouts) {
if (loadout.destinyVersion === searchConfig.destinyVersion) {
_.forIn(loadout.items, (items) => {
for (const item of items) {
_loadoutItemIds!.add(item.id);
}
});
}
}
}
return _loadoutItemIds && _loadoutItemIds.has(item.id);
},
new(item: DimItem) {
return newItems.has(item.id);
},
tag(item: DimItem) {
return item.dimInfo && item.dimInfo.tag !== undefined;
},
hasLight(item: DimItem) {
return item.primStat && statHashes.has(item.primStat.statHash);
},
curated(item: D2Item) {
if (!item) {
return false;
}
// TODO: remove if there are no false positives, as this precludes maintaining a list for curatedNonMasterwork
// const masterWork = item.masterworkInfo && item.masterworkInfo.statValue === 10;
// const curatedNonMasterwork = [792755504, 3356526253, 2034817450].includes(item.hash); // Nightshade, Wishbringer, Distant Relation
const legendaryWeapon =
item.bucket && item.bucket.sort === 'Weapons' && item.tier.toLowerCase() === 'legendary';
const oneSocketPerPlug =
item.sockets &&
item.sockets.sockets
.filter((socket) =>
curatedPlugsWhitelist.includes(
idx(socket, (s) => s.plug.plugItem.plug.plugCategoryHash) || 0
)
)
.every((socket) => socket && socket.plugOptions.length === 1);
return (
legendaryWeapon &&
// (masterWork || curatedNonMasterwork) && // checks for masterWork(10) or on curatedNonMasterWork list
oneSocketPerPlug
);
},
weapon(item: DimItem) {
return item.bucket && item.bucket.sort === 'Weapons';
},
armor(item: DimItem) {
return item.bucket && item.bucket.sort === 'Armor';
},
ikelos(item: D2Item) {
return ikelosHash.has(item.hash);
},
cosmetic(item: DimItem) {
return cosmeticTypes.has(item.type);
},
equipment(item: DimItem) {
return item.equipment;
},
postmaster(item: DimItem) {
return item.location && item.location.inPostmaster;
},
equipped(item: DimItem) {
return item.equipped;
},
transferable(item: DimItem) {
return !item.notransfer;
},
hasShader(item: D2Item) {
return (
item.sockets &&
item.sockets.sockets.some((socket) => {
return Boolean(
socket.plug &&
socket.plug.plugItem.plug &&
socket.plug.plugItem.plug.plugCategoryHash === 2973005342 &&
socket.plug.plugItem.hash !== 4248210736
);
})
);
},
hasMod(item: D2Item) {
return (
item.sockets &&
item.sockets.sockets.some((socket) => {
return !!(
socket.plug &&
![2323986101, 2600899007, 1835369552, 3851138800, 791435474].includes(
socket.plug.plugItem.hash
) &&
socket.plug.plugItem.plug &&
socket.plug.plugItem.plug.plugCategoryIdentifier.match(
/(v400.weapon.mod_(guns|damage|magazine)|enhancements.)/
)
);
})
);
},
wishlist(item: D2Item) {
return Boolean(inventoryCuratedRolls[item.id]);
},
wishlistdupe(item: D2Item) {
if (!this.dupe(item) || !_duplicates) {
return false;
}
const itemDupes = _duplicates[item.hash];
return itemDupes.some(this.wishlist);
},
ammoType(item: D2Item, predicate: string) {
return (
item.ammoType ===
{
primary: DestinyAmmunitionType.Primary,
special: DestinyAmmunitionType.Special,
heavy: DestinyAmmunitionType.Heavy
}[predicate]
);
},
rpm: filterByStats('rpm'),
charge: filterByStats('charge'),
rof: filterByStats('rof'),
impact: filterByStats('impact'),
range: filterByStats('range'),
stability: filterByStats('stability'),
reload: filterByStats('reload'),
magazine: filterByStats('magazine'),
aimassist: filterByStats('aimassist'),
equipspeed: filterByStats('equipspeed'),
handling: filterByStats('equipspeed'), // Synonym
mobility: filterByStats('mobility'),
recovery: filterByStats('recovery'),
resilience: filterByStats('resilience'),
blastradius: filterByStats('blastradius'),
drawtime: filterByStats('drawtime'),
inventorysize: filterByStats('inventorysize'),
recoildirection: filterByStats('recoildirection'),
velocity: filterByStats('velocity'),
zoom: filterByStats('zoom')
}
};
}
function escapeRegExp(s: string) {
return s.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string
}
|
package dev.webfx.kit.mapper.peers.javafxgraphics.markers;
import javafx.beans.property.DoubleProperty;
/**
* @author <NAME>
*/
public interface HasMinHeightProperty {
DoubleProperty minHeightProperty();
default void setMinHeight(Number minHeight) { minHeightProperty().setValue(minHeight); }
default Double getMinHeight() { return minHeightProperty().getValue(); }
}
|
<filename>Objective-Gems/KSMemoryPool.c
//
// KSMemoryPool.c
//
#include "KSMemoryPool.h"
void ksmempool_init(KSMemoryPool* pool,
void* memory,
unsigned int size,
unsigned int chunkSize)
{
pool->memory = memory;
pool->chunkSize = chunkSize;
unsigned int totalChunks = size / chunkSize;
totalChunks = (size - totalChunks *
(sizeof(*pool->memoryAllocations) +
sizeof(*pool->allocatedChunks))) / chunkSize;
pool->totalChunks = totalChunks;
unsigned char* ptr = pool->memory + size;
ptr -= sizeof(*pool->memoryAllocations) * totalChunks;
pool->memoryAllocations = (void*)ptr;
ptr -= sizeof(*pool->allocatedChunks) * totalChunks;
pool->allocatedChunks = (void*)ptr;
KSMemoryAllocation empty = {0};
for(unsigned int i = 0; i < totalChunks; i++)
{
pool->memoryAllocations[i] = empty;
pool->allocatedChunks[i] = 0;
}
}
void ksmempool_free(KSMemoryPool* pool, void* memory)
{
// Check all allocation blocks
KSMemoryAllocation* alloc = pool->memoryAllocations;
KSMemoryAllocation* allocEnd = alloc + pool->totalChunks;
for(; alloc < allocEnd; alloc++)
{
if(pool->memory + (alloc->firstChunk * pool->chunkSize) == memory)
{
// Found a match. Clear out allocation chunk map.
unsigned int endChunk = alloc->firstChunk + alloc->numChunks;
for(unsigned int chunk = alloc->firstChunk; chunk < endChunk; chunk++)
{
pool->allocatedChunks[chunk] = 0;
}
// Mark this allocation block free.
alloc->numChunks = 0;
break;
}
}
}
void* ksmempool_alloc(KSMemoryPool* pool, unsigned int size)
{
if(size == 0)
{
return 0;
}
unsigned int requiredChunks = size / pool->chunkSize +
(size % pool->chunkSize > 0 ? 1 : 0);
if(requiredChunks > pool->totalChunks)
{
return 0;
}
// Find a free allocation block.
KSMemoryAllocation* allocation = 0;
for(unsigned int i = 0; i < pool->totalChunks; i++)
{
if(pool->memoryAllocations[i].numChunks == 0)
{
allocation = &pool->memoryAllocations[i];
break;
}
}
if(allocation == 0)
{
return 0;
}
// Look for enough contiguous memory chunks
unsigned int chunk = 0;
unsigned int lastChunk = pool->totalChunks - requiredChunks;
for(; chunk <= lastChunk; chunk++)
{
if(pool->allocatedChunks[chunk] == 0)
{
// Found an unallocated chunk. Look for a big enough run.
unsigned int chunkEnd = chunk + requiredChunks;
unsigned int tstChunk = chunk;
for(; tstChunk < chunkEnd; tstChunk++)
{
if(pool->allocatedChunks[tstChunk] != 0)
{
break;
}
}
if(tstChunk >= chunkEnd)
{
// Found it!
break;
}
// Not enough free chunks. Skip ahead and try again.
chunk = tstChunk;
}
}
if(chunk > lastChunk)
{
return 0;
}
// Mark the chunks as "allocated".
for(unsigned int i = chunk; i < chunk + requiredChunks; i++)
{
pool->allocatedChunks[i] = 1;
}
// Mark the allocation block as "in use".
allocation->firstChunk = chunk;
allocation->numChunks = requiredChunks;
return pool->memory + allocation->firstChunk * pool->chunkSize;
}
|
package mock;
import com.linkedin.data.schema.RecordDataSchema;
import com.linkedin.data.template.RecordTemplate;
import com.linkedin.metadata.models.AspectSpec;
import com.linkedin.metadata.models.RelationshipFieldSpec;
import com.linkedin.metadata.models.SearchScoreFieldSpec;
import com.linkedin.metadata.models.SearchableFieldSpec;
import com.linkedin.metadata.models.TimeseriesFieldCollectionSpec;
import com.linkedin.metadata.models.TimeseriesFieldSpec;
import com.linkedin.metadata.models.annotation.AspectAnnotation;
import java.util.List;
import javax.annotation.Nonnull;
public class MockAspectSpec extends AspectSpec {
public MockAspectSpec(@Nonnull AspectAnnotation aspectAnnotation,
@Nonnull List<SearchableFieldSpec> searchableFieldSpecs,
@Nonnull List<SearchScoreFieldSpec> searchScoreFieldSpecs,
@Nonnull List<RelationshipFieldSpec> relationshipFieldSpecs,
@Nonnull List<TimeseriesFieldSpec> timeseriesFieldSpecs,
@Nonnull List<TimeseriesFieldCollectionSpec> timeseriesFieldCollectionSpecs, RecordDataSchema schema,
Class<RecordTemplate> aspectClass) {
super(aspectAnnotation, searchableFieldSpecs, searchScoreFieldSpecs, relationshipFieldSpecs, timeseriesFieldSpecs,
timeseriesFieldCollectionSpecs, schema, aspectClass);
}
}
|
<reponame>tangem/tangem-web<filename>src/components/Home/SectionFaq/index.js<gh_stars>0
import React, {useEffect, useRef, useState} from 'react'
import classNames from 'classnames'
import styles from './faq.module.scss'
import {t} from "i18next";
const Accordion = ({ head, body }) => {
const [isActive, setIsActive] = useState(false);
const ref = useRef();
useEffect(() => {
if(!ref.current) {
return function empty() {
//
}
}
ref.current.style.maxHeight = isActive ? ref.current.scrollHeight + "px" : null;
}, [isActive]);
return (
<>
<div className={classNames(styles.item, isActive && styles.active )} onClick={() => setIsActive((v) => !v)}>
<div className={styles.head}>
<span>{head}</span>
<button className={styles.button}></button>
</div>
<div ref={ref} className={styles.body}>{body}</div>
</div>
</>
);
};
const SectionFaq = () => {
const accordionData = [...Array(5)].map((e, i) => {
return {
head: t(`faq.items.${i}.question`),
body: t(`faq.items.${i}.answer`)
}
});
return (
<section className={styles.faq}>
<h2 className={styles.title}>{ t('faq.title') }</h2>
<div className={styles.qustions}>
{accordionData?.map(({ head, body }, idx) => (
<Accordion key={idx} head={head} body={body} />
))}
</div>
</section>
)
}
export default SectionFaq
|
#!/usr/bin/bash
#
# error.sh.in - error variable definitions for makepkg
#
# Copyright (c) 2006-2018 Pacman Development Team <pacman-dev@archlinux.org>
# Copyright (c) 2002-2006 by Judd Vinet <jvinet@zeroflux.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
[[ -n "$LIBMAKEPKG_UTIL_ERROR_SH" ]] && return
LIBMAKEPKG_UTIL_ERROR_SH=1
E_OK=0
E_FAIL=1 # Generic error
E_CONFIG_ERROR=2
E_INVALID_OPTION=3
E_USER_FUNCTION_FAILED=4
E_PACKAGE_FAILED=5
E_MISSING_FILE=6
E_MISSING_PKGDIR=7
E_INSTALL_DEPS_FAILED=8
E_REMOVE_DEPS_FAILED=9
E_ROOT=10
E_FS_PERMISSIONS=11
E_PKGBUILD_ERROR=12
E_ALREADY_BUILT=13
E_INSTALL_FAILED=14
E_MISSING_MAKEPKG_DEPS=15
E_PRETTY_BAD_PRIVACY=16
|
#!/bin/bash
LAB_HOME=${LAB_HOME:-`pwd`}
source $LAB_HOME/install/funcs.sh
docker_compose="sudo docker-compose -f docker-compose-registry-proxy.yml"
function registry-proxy::init {
insecure_registries=($(get_insecure_registries))
my_registry=${insecure_registries[0]}
if ensure_os_linux && [ ! -f /etc/docker/daemon.json ]; then
target::step "Set up insecure registries"
cat << EOF | sudo tee /etc/docker/daemon.json
{
"insecure-registries" : [$(get_insecure_registries_text)]
}
EOF
sudo systemctl daemon-reload
sudo systemctl restart docker
sudo systemctl show --property=Environment docker
fi
target::step "Set up registries network and volume"
sudo docker network inspect net-registries &>/dev/null || \
sudo docker network create net-registries
sudo docker volume create vol-registries
pushd $LAB_HOME
REGISTRY_REMOTE=${REGISTRY_REMOTE:-$(netstat -rn | grep "^0.0.0.0 " | cut -d " " -f10)}
[ -z $REGISTRY_REMOTE ] && target::log '$REGISTRY_REMOTE must not be empty' && exit 1
echo "REGISTRY_PROXY_REMOTEURL=http://$REGISTRY_REMOTE:5000" >.env
target::step "Take all registry proxies up"
$docker_compose up -d
popd
}
function registry-proxy::up {
pushd $LAB_HOME
target::step "Take all registry proxies up"
$docker_compose up -d
popd
}
function registry-proxy::down {
pushd $LAB_HOME
target::step "Take all registry proxies down"
$docker_compose down
popd
}
target::command $@
|
//! Map particle mass, momentum and inertia to nodes
template <unsigned Tdim>
void mpm::Particle<Tdim>::map_mass_momentum_inertia_to_nodes() noexcept {
// Map mass and momentum to nodes
this->map_mass_momentum_to_nodes();
// Map inertia to nodes
for (unsigned i = 0; i < nodes_.size(); ++i) {
nodes_[i]->update_inertia(true, mpm::ParticlePhase::Solid,
mass_ * shapefn_[i] * acceleration_);
}
}
//! Map inertial force
template <unsigned Tdim>
void mpm::Particle<Tdim>::map_inertial_force() noexcept {
// Check if particle has a valid cell ptr
assert(cell_ != nullptr);
// Compute nodal inertial forces
for (unsigned i = 0; i < nodes_.size(); ++i)
nodes_[i]->update_external_force(
true, mpm::ParticlePhase::Solid,
(-1. * nodes_[i]->acceleration(mpm::ParticlePhase::Solid) * mass_ *
shapefn_(i)));
}
//! Map material stiffness matrix to cell (used in equilibrium equation LHS)
template <unsigned Tdim>
inline bool mpm::Particle<Tdim>::map_material_stiffness_matrix_to_cell() {
bool status = true;
try {
// Check if material ptr is valid
assert(this->material() != nullptr);
// Calculate constitutive relations matrix
Eigen::MatrixXd dmatrix;
dmatrix =
(this->material())
->compute_dmatrix(stress_, dstrain_, this,
&state_variables_[mpm::ParticlePhase::Solid]);
// Reduce constitutive relations matrix depending on the dimension
Eigen::MatrixXd reduced_dmatrix;
reduced_dmatrix = this->reduce_dmatrix(dmatrix);
// Calculate B matrix
Eigen::MatrixXd bmatrix;
bmatrix = this->compute_bmatrix();
// Compute local material stiffness matrix
cell_->compute_local_material_stiffness_matrix(bmatrix, reduced_dmatrix,
volume_);
} catch (std::exception& exception) {
console_->error("{} #{}: {}\n", __FILE__, __LINE__, exception.what());
status = false;
}
return status;
}
// Compute B matrix
template <>
inline Eigen::MatrixXd mpm::Particle<1>::compute_bmatrix() noexcept {
Eigen::MatrixXd bmatrix;
bmatrix.resize(1, this->nodes_.size());
bmatrix.setZero();
for (unsigned i = 0; i < this->nodes_.size(); ++i) {
bmatrix(0, i) = dn_dx_(i, 0);
}
return bmatrix;
}
// Compute B matrix
template <>
inline Eigen::MatrixXd mpm::Particle<2>::compute_bmatrix() noexcept {
Eigen::MatrixXd bmatrix;
bmatrix.resize(3, 2 * this->nodes_.size());
bmatrix.setZero();
for (unsigned i = 0; i < this->nodes_.size(); ++i) {
bmatrix(0, 2 * i) = dn_dx_(i, 0);
bmatrix(2, 2 * i) = dn_dx_(i, 1);
bmatrix(1, 2 * i + 1) = dn_dx_(i, 1);
bmatrix(2, 2 * i + 1) = dn_dx_(i, 0);
}
return bmatrix;
}
// Compute B matrix
template <>
inline Eigen::MatrixXd mpm::Particle<3>::compute_bmatrix() noexcept {
Eigen::MatrixXd bmatrix;
bmatrix.resize(6, 3 * this->nodes_.size());
bmatrix.setZero();
for (unsigned i = 0; i < this->nodes_.size(); ++i) {
bmatrix(0, 3 * i) = dn_dx_(i, 0);
bmatrix(3, 3 * i) = dn_dx_(i, 1);
bmatrix(5, 3 * i) = dn_dx_(i, 2);
bmatrix(1, 3 * i + 1) = dn_dx_(i, 1);
bmatrix(3, 3 * i + 1) = dn_dx_(i, 0);
bmatrix(4, 3 * i + 1) = dn_dx_(i, 2);
bmatrix(2, 3 * i + 2) = dn_dx_(i, 2);
bmatrix(4, 3 * i + 2) = dn_dx_(i, 1);
bmatrix(5, 3 * i + 2) = dn_dx_(i, 0);
}
return bmatrix;
}
//! Reduce constitutive relations matrix depending on the dimension
template <>
inline Eigen::MatrixXd mpm::Particle<1>::reduce_dmatrix(
const Eigen::MatrixXd& dmatrix) noexcept {
// Convert to 1x1 matrix in 1D
Eigen::MatrixXd dmatrix1x1;
dmatrix1x1.resize(1, 1);
dmatrix1x1(0, 0) = dmatrix(0, 0);
return dmatrix1x1;
}
//! Reduce constitutive relations matrix depending on the dimension
template <>
inline Eigen::MatrixXd mpm::Particle<2>::reduce_dmatrix(
const Eigen::MatrixXd& dmatrix) noexcept {
// Convert to 3x3 matrix in 2D
Eigen::MatrixXd dmatrix3x3;
dmatrix3x3.resize(3, 3);
dmatrix3x3(0, 0) = dmatrix(0, 0);
dmatrix3x3(0, 1) = dmatrix(0, 1);
dmatrix3x3(0, 2) = dmatrix(0, 4);
dmatrix3x3(1, 0) = dmatrix(1, 0);
dmatrix3x3(1, 1) = dmatrix(1, 1);
dmatrix3x3(1, 2) = dmatrix(1, 4);
dmatrix3x3(2, 0) = dmatrix(4, 0);
dmatrix3x3(2, 1) = dmatrix(4, 1);
dmatrix3x3(2, 2) = dmatrix(4, 4);
return dmatrix3x3;
}
//! Reduce constitutive relations matrix depending on the dimension
template <>
inline Eigen::MatrixXd mpm::Particle<3>::reduce_dmatrix(
const Eigen::MatrixXd& dmatrix) noexcept {
return dmatrix;
}
//! Map mass matrix to cell (used in poisson equation LHS)
template <unsigned Tdim>
inline bool mpm::Particle<Tdim>::map_mass_matrix_to_cell(double newmark_beta,
double dt) {
bool status = true;
try {
// Check if material ptr is valid
assert(this->material() != nullptr);
// Compute local mass matrix
cell_->compute_local_mass_matrix(shapefn_, volume_,
mass_density_ / (newmark_beta * dt * dt));
} catch (std::exception& exception) {
console_->error("{} #{}: {}\n", __FILE__, __LINE__, exception.what());
status = false;
}
return status;
}
// Compute strain increment of the particle
template <>
inline Eigen::Matrix<double, 6, 1> mpm::Particle<1>::compute_strain_increment(
const Eigen::MatrixXd& dn_dx, unsigned phase) noexcept {
// Define strain rincrement
Eigen::Matrix<double, 6, 1> strain_increment =
Eigen::Matrix<double, 6, 1>::Zero();
for (unsigned i = 0; i < this->nodes_.size(); ++i) {
Eigen::Matrix<double, 1, 1> displacement = nodes_[i]->displacement(phase);
strain_increment[0] += dn_dx(i, 0) * displacement[0];
}
if (std::fabs(strain_increment(0)) < 1.E-15) strain_increment[0] = 0.;
return strain_increment;
}
// Compute strain increment of the particle
template <>
inline Eigen::Matrix<double, 6, 1> mpm::Particle<2>::compute_strain_increment(
const Eigen::MatrixXd& dn_dx, unsigned phase) noexcept {
// Define strain increment
Eigen::Matrix<double, 6, 1> strain_increment =
Eigen::Matrix<double, 6, 1>::Zero();
for (unsigned i = 0; i < this->nodes_.size(); ++i) {
Eigen::Matrix<double, 2, 1> displacement = nodes_[i]->displacement(phase);
strain_increment[0] += dn_dx(i, 0) * displacement[0];
strain_increment[1] += dn_dx(i, 1) * displacement[1];
strain_increment[3] +=
dn_dx(i, 1) * displacement[0] + dn_dx(i, 0) * displacement[1];
}
if (std::fabs(strain_increment[0]) < 1.E-15) strain_increment[0] = 0.;
if (std::fabs(strain_increment[1]) < 1.E-15) strain_increment[1] = 0.;
if (std::fabs(strain_increment[3]) < 1.E-15) strain_increment[3] = 0.;
return strain_increment;
}
// Compute strain increment of the particle
template <>
inline Eigen::Matrix<double, 6, 1> mpm::Particle<3>::compute_strain_increment(
const Eigen::MatrixXd& dn_dx, unsigned phase) noexcept {
// Define strain increment
Eigen::Matrix<double, 6, 1> strain_increment =
Eigen::Matrix<double, 6, 1>::Zero();
for (unsigned i = 0; i < this->nodes_.size(); ++i) {
Eigen::Matrix<double, 3, 1> displacement = nodes_[i]->displacement(phase);
strain_increment[0] += dn_dx(i, 0) * displacement[0];
strain_increment[1] += dn_dx(i, 1) * displacement[1];
strain_increment[2] += dn_dx(i, 2) * displacement[2];
strain_increment[3] +=
dn_dx(i, 1) * displacement[0] + dn_dx(i, 0) * displacement[1];
strain_increment[4] +=
dn_dx(i, 2) * displacement[1] + dn_dx(i, 1) * displacement[2];
strain_increment[5] +=
dn_dx(i, 2) * displacement[0] + dn_dx(i, 0) * displacement[2];
}
for (unsigned i = 0; i < strain_increment.size(); ++i)
if (std::fabs(strain_increment[i]) < 1.E-15) strain_increment[i] = 0.;
return strain_increment;
}
// Compute strain of the particle using nodal displacement
template <unsigned Tdim>
void mpm::Particle<Tdim>::compute_strain_newmark() noexcept {
// Compute strain increment from previous time step
this->dstrain_ =
this->compute_strain_increment(dn_dx_, mpm::ParticlePhase::Solid);
}
// Compute stress using implicit updating scheme
template <unsigned Tdim>
void mpm::Particle<Tdim>::compute_stress_newmark() noexcept {
// Check if material ptr is valid
assert(this->material() != nullptr);
// Calculate stress
this->stress_ =
(this->material())
->compute_stress(previous_stress_, dstrain_, this,
&state_variables_[mpm::ParticlePhase::Solid]);
}
// Compute updated position of the particle by Newmark scheme
template <unsigned Tdim>
void mpm::Particle<Tdim>::compute_updated_position_newmark(double dt) noexcept {
// Check if particle has a valid cell ptr
assert(cell_ != nullptr);
// Get interpolated nodal displacement and acceleration
Eigen::Matrix<double, Tdim, 1> nodal_displacement =
Eigen::Matrix<double, Tdim, 1>::Zero();
Eigen::Matrix<double, Tdim, 1> nodal_acceleration =
Eigen::Matrix<double, Tdim, 1>::Zero();
for (unsigned i = 0; i < nodes_.size(); ++i) {
nodal_displacement +=
shapefn_[i] * nodes_[i]->displacement(mpm::ParticlePhase::Solid);
nodal_acceleration +=
shapefn_[i] * nodes_[i]->acceleration(mpm::ParticlePhase::Solid);
}
// Update particle velocity from interpolated nodal acceleration
this->velocity_ += 0.5 * (this->acceleration_ + nodal_acceleration) * dt;
// Update acceleration
this->acceleration_ = nodal_acceleration;
// New position current position + displacement increment
this->coordinates_ += nodal_displacement;
// Update displacement
this->displacement_ += nodal_displacement;
}
// Update stress and strain after convergence of Newton-Raphson iteration
template <unsigned Tdim>
void mpm::Particle<Tdim>::update_stress_strain() noexcept {
// Update initial stress of the time step
this->previous_stress_ = this->stress_;
// Update total strain
this->strain_ += this->dstrain_;
// Volumetric strain increment
this->dvolumetric_strain_ = this->dstrain_.head(Tdim).sum();
// Update volumetric strain at particle position (not at centroid)
this->volumetric_strain_centroid_ += this->dvolumetric_strain_;
// Reset strain increment
this->dstrain_.setZero();
}
// Assign acceleration to the particle
template <unsigned Tdim>
bool mpm::Particle<Tdim>::assign_acceleration(
const Eigen::Matrix<double, Tdim, 1>& acceleration) {
// Assign acceleration
acceleration_ = acceleration;
return true;
} |
#!/bin/sh
sqlplus $1 @04_transform_row_concepts.sql
sqlplus $1 @04_load_concepts.sql
sqlplus $1 @04_transform_row_relations.sql
sqlplus $1 @04_load_relations.sql
sqlplus $1 @04_transform_row_maps.sql
sqlplus $1 @04_load_maps.sql |
#ifndef TOKENIZER_H
#define TOKENIZER_H
#define MAX_TOK_LEN 4096
#define MAX_UNGETC 8
#include <stdint.h>
#include <stddef.h>
#include <stdio.h>
struct tokenizer_getc_buf {
int buf[MAX_UNGETC];
size_t cnt, buffered;
};
enum markertype {
MT_SINGLELINE_COMMENT_START = 0,
MT_MULTILINE_COMMENT_START = 1,
MT_MULTILINE_COMMENT_END = 2,
MT_MAX = MT_MULTILINE_COMMENT_END
};
#define MAX_CUSTOM_TOKENS 32
enum tokentype {
TT_IDENTIFIER = 1,
TT_SQSTRING_LIT,
TT_DQSTRING_LIT,
TT_ELLIPSIS,
TT_HEX_INT_LIT,
TT_OCT_INT_LIT,
TT_DEC_INT_LIT,
TT_FLOAT_LIT,
TT_SEP,
/* errors and similar */
TT_UNKNOWN,
TT_OVERFLOW,
TT_WIDECHAR_LIT,
TT_WIDESTRING_LIT,
TT_EOF,
TT_CUSTOM = 1000 /* start user defined tokentype values */
};
const char* tokentype_to_str(enum tokentype tt);
struct token {
enum tokentype type;
int value;
int64_t line;
int64_t column;
};
enum tokenizer_flags {
TF_PARSE_STRINGS = 1 << 0,
TF_PARSE_WIDE_STRINGS = 1 << 1,
};
struct tokenizer {
FILE *input;
int64_t line;
int64_t column;
int flags;
int custom_count;
int peeking;
const char *custom_tokens[MAX_CUSTOM_TOKENS];
char buf[MAX_TOK_LEN];
size_t bufsize;
struct tokenizer_getc_buf getc_buf;
const char* marker[MT_MAX+1];
const char* filename;
struct token peek_token;
};
void tokenizer_init(struct tokenizer *t, FILE* in, int flags);
void tokenizer_set_filename(struct tokenizer *t, const char*);
void tokenizer_set_flags(struct tokenizer *t, int flags);
int tokenizer_get_flags(struct tokenizer *t);
int64_t tokenizer_ftello(struct tokenizer *t);
void tokenizer_register_marker(struct tokenizer*, enum markertype, const char*);
void tokenizer_register_custom_token(struct tokenizer*, int tokentype, const char*);
int tokenizer_next(struct tokenizer *t, struct token* out);
int tokenizer_peek_token(struct tokenizer *t, struct token* out);
int tokenizer_peek(struct tokenizer *t);
void tokenizer_skip_until(struct tokenizer *t, const char *marker);
int tokenizer_skip_chars(struct tokenizer *t, const char *chars, int *count);
int tokenizer_read_until(struct tokenizer *t, const char* marker, int stop_at_nl);
int tokenizer_rewind(struct tokenizer *t);
#endif
|
function getRemainder(num1, num2) {
return num1 % num2;
}
const result = getRemainder(8, 4);
console.log(result); |
template <typename TRet>
void processValue(void* val, size_t bytes, std::vector<TRet>& result) {
if (bytes) {
result.push_back(*(TRet*)&val);
}
} |
class EventEmitter {
constructor() {
this.queue = {} //可触发多次的事件
this.onceQueue = {} //只能触发一次的事件
}
on(event, fn) { //监听事件,可以触发多次
if (!this.queue[event]) this.queue[event] = []
this.queue[event].push(fn)
}
once(event, fn) { //监听事件,只能触发一次
if (!this.onceQueue[event]) {
this.onceQueue[event] = {
fns: [],
hasFired: false
}
}
this.onceQueue[event].fns.push(fn)
}
fire() { //触发指定的事件
const event = [].shift.call(arguments), //取得事件名称
fns = this.queue[event], //取得该事件里所有的回调函数(可以触发多次的事件)
onceFns = this.onceQueue[event] //取得该事件里所有的回调函数(只能触发一次的事件)
if (fns && fns.length != 0) {
let i = 0,fn
while (fn = fns[i++]) {
fn.apply(this, arguments)
}
}
if (onceFns && !onceFns.hasFired) {
let i = 0,fn
while (fn = onceFns.fns[i++]) {
fn.apply(this, arguments)
}
this.onceQueue[event].hasFired = true
}
}
off(event, fn = null) { //可移除特定事件里的某个回调函数或者所有回调函数
const fns = this.queue[event]
if (!fns || fns.length == 0) return
if (fn) { //移除该事件特定的回调
this.queue[event] = fns.filter(item => {
return item !== fn
})
} else { //移除该事件所有的回调
this.queue[event] = []
}
}
} |
<gh_stars>10-100
from twitterbot.services.twitter_bot_names_service import TwitterBotNamesService
from twitterbot.services.twitter_bot_responses_service import TwitterBotResponsesService
from twitterbot.services.twitter_bot_tweets_service import TwitterBotTweetsService
from twitterbot.utils.log import bot_log
class TwitterBotService:
def __init__(self, client):
self.client = client
self.user = self.client.get_current_user()
self.tweets = []
self.names = []
def build_responses(self, tweet):
self.cache_all_related_tweets(tweet)
self.cache_names()
responses = self.get_responses()
recipients = self.get_recipients()
originating_tweet = self.get_originating_tweet()
bot_log('No. responses: {num}'.format(num=len(responses)))
bot_log('No. user responses: {num}'.format(num=len(responses) * len(recipients)))
for response in responses:
response.recipients = recipients
response.incoming_tweet = tweet
response.originating_tweet = originating_tweet
return responses
def cache_all_related_tweets(self, tweet):
"""
Cache list of all related tweets including original tweet
:param tweet: incoming tweet
:return: list of all tweets we want to process
"""
self.tweets = TwitterBotTweetsService(self.client).get_all_related_tweets(tweet)
def cache_names(self):
"""
Cache list of names we will check in our database to find officer
"""
self.names = TwitterBotNamesService(self.tweets).get_all_names()
def get_recipients(self):
"""
Get list of users we will reply to
"""
screen_names = []
for tweet in self.tweets:
screen_names.append(tweet.user.screen_name)
screen_names += [x['screen_name'] for x in tweet.entities['user_mentions']]
return [x for x in set(screen_names) if x != self.user.screen_name]
def get_responses(self):
"""
Get list of responses we will send to each user
"""
return TwitterBotResponsesService(self.names).build_responses()
def get_originating_tweet(self):
if len(self.tweets) > 1:
originating_tweet = self.tweets[0]
for tweet in self.tweets[1:]:
if tweet.created_at < originating_tweet.created_at:
originating_tweet = tweet
return originating_tweet
else:
return None
|
import { createStore, combineReducers, compose, applyMiddleware } from 'redux';
import thunk from 'redux-thunk';
import reducers from './reducers';
const rootReducer = combineReducers(reducers);
let composeEnhancers = compose;
export default createStore(
rootReducer,
composeEnhancers(applyMiddleware(thunk))
);
|
import json
import threading
from queue import Queue, Empty
try:
from queue import SimpleQueue
except ImportError: # Python 3.6 lacks SimpleQueue
SimpleQueue = Queue
import click
import dns.resolver
import dns.inet
from .dsscanner import do_cds_scan
def resolve_dns_and_cds(domain):
try:
ip_addresses = dns.resolver.resolve(domain, 'A')
for ip in ip_addresses:
cds_result = do_cds_scan(str(ip))
print(f"Domain: {domain}, IP: {ip}, CDS Scan Result: {cds_result}")
except dns.resolver.NXDOMAIN:
print(f"Domain: {domain}, Error: No such domain")
except dns.resolver.NoAnswer:
print(f"Domain: {domain}, Error: No DNS answer")
except dns.exception.DNSException as e:
print(f"Domain: {domain}, Error: DNS resolution failed - {e}")
def main(domain_list):
for domain in domain_list:
threading.Thread(target=resolve_dns_and_cds, args=(domain,)).start()
if __name__ == "__main__":
domain_list = ["example.com", "google.com", "nonexistentdomain123.com"]
main(domain_list) |
<reponame>LarsBehrenberg/e-wallet
import React, { Component } from 'react';
import {
Container,
InputAdornment,
Button,
TextField
} from '@material-ui/core';
import { Formik } from 'formik';
import * as Yup from 'yup';
import NameIcon from '@material-ui/icons/SupervisorAccount';
import LockIcon from '@material-ui/icons/Lock';
import EmailIcon from '@material-ui/icons/Email';
const validationSchema = Yup.object({
name: Yup.string('Enter a name').required('Name is required'),
email: Yup.string('Enter your email')
.email('Enter a valid email')
.required('Email is required'),
password: Yup.string('')
.min(8, 'Password must contain atleast 8 characters')
.required('Enter your password'),
confirmPassword: Yup.string('Enter your password')
.required('Confirm your password')
.oneOf([Yup.ref('password')], 'Password does not match')
});
const Form = (props) => {
const {
values: { name, email, password, confirmPassword },
errors,
touched,
handleSubmit,
handleChange,
isValid
} = props;
console.table(props);
return (
<Container className="py-4">
<form onSubmit={handleSubmit}>
<TextField
variant="outlined"
className="mb-4"
name="name"
helperText={touched.name ? errors.name : ''}
error={Boolean(errors.name)}
label="Name"
value={name}
onChange={handleChange}
fullWidth
InputProps={{
startAdornment: (
<InputAdornment position="start">
<NameIcon />
</InputAdornment>
)
}}
/>
<TextField
variant="outlined"
className="mb-4"
name="email"
helperText={touched.email ? errors.email : ''}
error={Boolean(errors.email)}
label="Email"
fullWidth
value={email}
onChange={handleChange}
InputProps={{
startAdornment: (
<InputAdornment position="start">
<EmailIcon />
</InputAdornment>
)
}}
/>
<TextField
variant="outlined"
className="mb-4"
name="password"
helperText={touched.password ? errors.password : ''}
error={Boolean(errors.password)}
label="Password"
fullWidth
type="password"
value={password}
onChange={handleChange}
InputProps={{
startAdornment: (
<InputAdornment position="start">
<LockIcon />
</InputAdornment>
)
}}
/>
<TextField
variant="outlined"
className="mb-4"
name="confirmPassword"
helperText={touched.confirmPassword ? errors.confirmPassword : ''}
error={Boolean(errors.confirmPassword)}
label="Confirm Password"
fullWidth
type="password"
value={confirmPassword}
onChange={handleChange}
InputProps={{
startAdornment: (
<InputAdornment position="start">
<LockIcon />
</InputAdornment>
)
}}
/>
<div className="text-center">
<Button
type="submit"
size="large"
className="btn-success"
disabled={!isValid}>
Validate Form
</Button>
</div>
</form>
</Container>
);
};
class LivePreviewExample extends Component {
constructor(props) {
super(props);
this.state = {};
}
submit = (data) => {
console.log(data);
};
render() {
const values = { name: '', email: '', confirmPassword: '', password: '' };
return (
<>
<Formik
render={(props) => <Form {...props} />}
initialValues={values}
validationSchema={validationSchema}
onSubmit={this.submit}
/>
</>
);
}
}
export default LivePreviewExample;
|
module HandleEnrollmentEvent
# Extract the changes to make to the members
class ExtractMemberChanges
include Interactor
# Context Requires:
# - enrollment_event_cv (Openhbx::Cv2::EnrollmentEvent)
# Context Outputs:
# - member_changes_collection (Array of HandleEnrollmentEvent::MemberChange)
def call
end
end
end
|
#!/bin/bash
SHADOWSOCKS_PORT=${SHADOWSOCKS_PORT:-"4433"}
SHADOWSOCKS_PASSWORD=${SHADOWSOCKS_PASSWORD:-"asdewq123"}
SHADOWSOCKS_CRYPTO=${SHADOWSOCKS_CRYPTO:-"chacha20"}
OBFS_PORT=${OBFS_PORT:-"993"}
OBFS_PROTOCOL=${OBFS_PROTOCOL:-"tls"}
KCPTUN_PORT=${KCPTUN_PORT:-"533"}
KCPTUN_MODE=${KCPTUN_MODE:-"fast"}
KCPTUN_KEY=${KCPTUN_KEY:-"asdewq123"}
KCPTUN_CRYPTO=${KCPTUN_CRYPTO:-"chacha20"}
if [ -z $KCPTUN_OVER_OBFS ]; then
KCPTUN_TARGET_PORT=$SHADOWSOCKS_PORT
else
KCPTUN_TARGET_PORT=$OBFS_PORT
fi
echo "Starting Shadowsocks Server on port $SHADOWSOCKS_PORT with crypto $SHADOWSOCKS_CRYPTO..."
ss-server -s 0.0.0.0 -p "$SHADOWSOCKS_PORT" -k "$SHADOWSOCKS_PASSWORD" -m "$SHADOWSOCKS_CRYPTO" --fast-open --reuse-port -u 1.1.1.1 &
echo "Starting Obfs Server on port $OBFS_PORT over $SHADOWSOCKS_PORT with protocol $OBFS_PROTOCOL..."
obfs-server -r "127.0.0.1:$SHADOWSOCKS_PORT" -p "$OBFS_PORT" --obfs "$OBFS_PROTOCOL" &
echo "Starting Kcptun Server on udp port $KCPTUN_PORT over $KCPTUN_TARGET_PORT with crypto $KCPTUN_CRYPTO..."
kcptun-server --target "127.0.0.1:$KCPTUN_TARGET_PORT" --listen ":$KCPTUN_PORT" --mode "$KCPTUN_MODE" --key "$KCPTUN_KEY" --crypt "$KCPTUN_CRYPTO" --mtu 1350 --sndwnd 1024 --rcvwnd 1024
|
goog.require('browser_instruments.AnalogGauge');
goog.require('browser_instruments.AssignFromScale');
goog.require('goog.array');
goog.require('goog.dom');
goog.require('goog.events');
goog.require('goog.net.WebSocket');
goog.require('goog.net.WebSocket.MessageEvent');
/**
* Main entry point into the client application.
* @export
*/
function main() {
console.debug("Connecting to websocket.");
var analogGaugeElements = goog.dom.getElementsByClass("AnalogGauge");
console.log("Num gauges: " + analogGaugeElements.length);
/** @type {!Array<!browser_instruments.AnalogGauge>} **/
var analogGauges = [];
goog.array.forEach(analogGaugeElements,
function (domElement) {
analogGauges.push(
new browser_instruments.AnalogGauge(domElement));
console.log("Created analog gauge.");
});
var domElement = goog.dom.getElement("phase_1_button");
if (domElement != null) {
domElement.onclick = function() {
browser_instruments.AssignFromScale("phase_1");
};
}
domElement = goog.dom.getElement("phase_2_button");
if (domElement != null) {
domElement.onclick = function() {
browser_instruments.AssignFromScale("phase_2");
};
}
domElement = goog.dom.getElement("phase_3_button");
if (domElement != null) {
domElement.onclick = function() {
browser_instruments.AssignFromScale("phase_3");
};
}
var ws = new goog.net.WebSocket(true);
goog.events.listen(ws, goog.net.WebSocket.EventType.MESSAGE,
/** @param {!goog.net.WebSocket.MessageEvent} e **/
function(e) {
var msg = JSON.parse(e.message);
goog.array.forEach(analogGauges, function (gauge) {
if (msg != null) {
/** @type {?Array<?Object>} **/
var updates = msg["Updates"];
if (updates != null) {
updates.forEach(function(item, index, array) {
if (item != null) {
gauge.updateInstrument(item);
}
});
}
}
});
ws.send("ACK");
});
var webSocket = "ws://" + location.host + "/ws";
console.log("Opening web socket at " + webSocket);
ws.open(webSocket);
}
|
def celsius_to_fahrenheit(celsius):
'''Returns the temperature in Fahrenheit given the temperature in Celsius.'''
return (celsius * 9/5) + 32 |
/* eslint-disable import/no-extraneous-dependencies */
/* eslint-disable no-console */
import gulp from 'gulp';
import del from 'del';
import eslint from 'gulp-eslint';
import flow from 'gulp-flowtype';
import webpack from 'webpack-stream';
import webpackConfig from './webpack.config.babel';
const paths = {
allSrcJs: 'src/**/*.js?(x)',
serverSrcJs: 'src/server/**/*.js?(x)',
sharedSrcJs: 'src/shared/**/*.js?(x)',
clientEntryPoint: 'src/client/app.js',
clientBundle: 'dist/client-bundle.js?(.map)',
gulpFile: 'gulpfile.babel.js',
webpackFile: 'webpack.config.babel.js',
libDir: 'lib',
distDir: 'dist',
};
gulp.task('main', ['lint', 'clean'], () =>
gulp.src(paths.clientEntryPoint)
.pipe(webpack(webpackConfig))
.pipe(gulp.dest(paths.distDir))
);
gulp.task('watch', () => {
gulp.watch(paths.allSrcJs, ['main']);
});
gulp.task('default', ['watch', 'main']);
gulp.task('lint', () =>
gulp.src([
paths.allSrcJs,
paths.gulpFile,
])
.pipe(eslint())
.pipe(eslint.format())
.pipe(eslint.failAfterError())
);
gulp.task('clean', () => del([
paths.libDir,
paths.clientBundle,
]));
gulp.task('lint', () =>
gulp.src([
paths.allSrcJs,
paths.gulpFile,
paths.webpackFile,
])
.pipe(eslint())
.pipe(eslint.format())
.pipe(eslint.failAfterError())
.pipe(flow({ abort: true })) // Add Flow here
);
|
#!/bin/sh
base='/Users/shahargino/Documents/ImageProcessing'
default_args='--batch --imgEnhancementMode=2 --mode="no_police" --ROI="(10,250,1700,500)" --confidence_thr=0.1 --PreprocessZoomIn=1.3 --PlateWidthPaddingFactor=1.2 --PreprocessMorphKernel="(1,1)"'
LPR_test() {
res=`build/lpr -i $1 $default_args $3 | grep "LPR Result: "`
pass=`echo $res | grep -w $2`
act=`echo $res | cut -d" " -f4`
info=`echo $res | cut -d" " -f5-`
((cnt++))
((imgs++))
if [[ -n "$pass" ]]; then
((pcnt++))
echo "$1 PASSED!\t(pass=$pcnt/$cnt)\t$info\t$3"
else
printf "$1 FAILED!\t(pass=$pcnt/$cnt)\t$info\t(ACT=$act EXP=$2)\t$4\n"
fi
lpr=`echo $res | cut -d" " -f4`
vals=("${vals[@]}" "$lpr")
}
StartCase() {
cnt=0;
pcnt=0;
vals=()
}
EndCase() {
((cases++))
passRate=`echo "scale=2; 100*$pcnt/$cnt" | bc`
uniqVals=$(echo "${vals[@]}" | tr ' ' '\n' | sort -u)
uniqVals=(${uniqVals//\n/ })
printf "Case #$cases: PassRate=$passRate%% ($pcnt/$cnt), Hist: "
casepass=1
for k in "${uniqVals[@]}"; do
hist=0
for v in "${vals[@]}"; do
if [ "$k" == "$v" ]; then ((hist++)); fi
done
printf "$k=$hist,"
if ([ "$hist" -gt "$pcnt" ] && [ "$k" != "N/A" ]) || [ "$pcnt" -eq 0 ]; then casepass=0; fi
done
if [ "$casepass" == 1 ]; then
printf " --> PASSED!"
((casespass++))
else
printf " --> FAILED!"
fi
printf "\n\n"
unset vals
}
SECONDS=0
cases=0
casespass=0
imgs=0
#--------------- I M A G E ----------------------------------------|-- Expected --|------- Arguments ------|---- Waivers ----
StartCase
LPR_test "$base/LPR/Database/Real_Images_150818/capture3747.jpeg" "6785631"
LPR_test "$base/LPR/Database/Real_Images_150818/capture3748.jpeg" "6785631"
LPR_test "$base/LPR/Database/Real_Images_150818/capture3749.jpeg" "6785631"
EndCase
StartCase
LPR_test "$base/LPR/Database/Real_Images_150818/capture3762.jpeg" "5248771"
LPR_test "$base/LPR/Database/Real_Images_150818/capture3763.jpeg" "5248771"
LPR_test "$base/LPR/Database/Real_Images_150818/capture3764.jpeg" "5248771" "--PreprocessMorphKernel='(3,3)'"
EndCase
StartCase
LPR_test "$base/LPR/Database/Real_Images_150818/capture3779.jpeg" "6265633" "--NoOcrKnnFixes" "KNN OCR Fix disabled"
LPR_test "$base/LPR/Database/Real_Images_150818/capture3781.jpeg" "6265633" "" "Angled image"
EndCase
StartCase
LPR_test "$base/LPR/Database/Real_Images_150818/capture3793.jpeg" "7304132"
EndCase
StartCase
LPR_test "$base/LPR/Database/Real_Images_150818/capture3825.jpeg" "3402439" "--NoOcrKnnFixes --PreprocessMorphKernel='(3,3)'" "Low quality"
LPR_test "$base/LPR/Database/Real_Images_150818/capture3826.jpeg" "3402439" "--NoOcrKnnFixes" "KNN OCR Fix disabled"
LPR_test "$base/LPR/Database/Real_Images_150818/capture3827.jpeg" "3402439" "" "Angled image, low quality"
LPR_test "$base/LPR/Database/Real_Images_150818/capture3828.jpeg" "3402439" "" "Angled image"
EndCase
StartCase
LPR_test "$base/LPR/Database/Real_Images_150818/capture3877.jpeg" "8732737"
LPR_test "$base/LPR/Database/Real_Images_150818/capture3878.jpeg" "8732737"
LPR_test "$base/LPR/Database/Real_Images_150818/capture3879.jpeg" "8732737" "" "Angled image"
EndCase
StartCase
LPR_test "$base/LPR/Database/Real_Images_150818/capture3898.jpeg" "18992401" "" "Low quality"
EndCase
StartCase
LPR_test "$base/LPR/Database/Real_Images_150818/capture3919.jpeg" "2464679" "" "Angled image, low quality"
EndCase
StartCase
LPR_test "$base/LPR/Database/Real_Images_150818/capture3930.jpeg" "4629411"
LPR_test "$base/LPR/Database/Real_Images_150818/capture3931.jpeg" "4629411"
EndCase
StartCase
LPR_test "$base/LPR/Database/Real_Images_150818/capture3940.jpeg" "4136230"
LPR_test "$base/LPR/Database/Real_Images_150818/capture3941.jpeg" "4136230"
EndCase
StartCase
LPR_test "$base/LPR/Database/Real_Images_150818/capture3970.jpeg" "7144379"
LPR_test "$base/LPR/Database/Real_Images_150818/capture3971.jpeg" "7144379"
LPR_test "$base/LPR/Database/Real_Images_150818/capture3974.jpeg" "7144379" "--PreprocessZoomIn=1.4" "ZoomIn x4"
EndCase
StartCase
LPR_test "$base/LPR/Database/Real_Images_150818/capture3992.jpeg" "14628501" "--PreprocessZoomIn=1.4" "ZoomIn x4"
LPR_test "$base/LPR/Database/Real_Images_150818/capture3993.jpeg" "14628501" "--PreprocessZoomIn=1.4" "ZoomIn x4"
EndCase
StartCase
LPR_test "$base/LPR/Database/Real_Images_150818/capture4029.jpeg" "6320874" "--PreprocessZoomIn=1.4 --PlateWidthPaddingFactor=1.2 --NoOcrKnnFixes --PreprocessGaussKernel='(1,1)'" "Angled image"
LPR_test "$base/LPR/Database/Real_Images_150818/capture4030.jpeg" "6320874" "" "Angled image, low quality"
EndCase
StartCase
LPR_test "$base/LPR/Database/Real_Images_150818/capture4120.jpeg" "28214201" "--PreprocessZoomIn=1.4 --NoOcrKnnFixes" "Angled image"
LPR_test "$base/LPR/Database/Real_Images_150818/capture4121.jpeg" "28214201" "--NoOcrKnnFixes --PreprocessMorphKernel='(3,3)'" "KNN OCR Fix disabled"
LPR_test "$base/LPR/Database/Real_Images_150818/capture4122.jpeg" "28214201" "" "Angled image"
EndCase
StartCase
LPR_test "$base/LPR/Database/Real_Images_150818/capture4355.jpeg" "4883330"
EndCase
StartCase
LPR_test "$base/LPR/Database/Real_Images_150818/capture4380.jpeg" "7774250" "--NoOcrKnnFixes --PreprocessMorphKernel='(3,3)'" "KNN OCR Fix disabled"
LPR_test "$base/LPR/Database/Real_Images_150818/capture4381.jpeg" "7774250" "" "Blurred image"
EndCase
StartCase
LPR_test "$base/LPR/Database/Real_Images_150818/capture4513.jpeg" "2777885"
EndCase
StartCase
LPR_test "$base/LPR/Database/Real_Images_150818/capture4586.jpeg" "6356554"
EndCase
StartCase
LPR_test "$base/LPR/Database/Real_Images_150818/capture4683.jpeg" "6807312"
EndCase
secPerImg=`echo "scale=2; 100*$SECONDS/$imgs" | bc`
casespassRate=`echo "scale=2; 100*$casespass/$cases" | bc`
printf "Summary: Cases PassRate=$casespassRate%% ($casespass/$cases)\n"
printf "Elapsed time: ${SECONDS}sec (${secPerImg}sec per image )\n\n"
|
<gh_stars>1-10
// Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: lorawan-stack/api/devicerepository.proto
package ttnpb
import (
context "context"
fmt "fmt"
_ "github.com/TheThingsIndustries/protoc-gen-go-json/annotations"
_ "github.com/envoyproxy/protoc-gen-validate/validate"
_ "github.com/gogo/protobuf/gogoproto"
proto "github.com/gogo/protobuf/proto"
types "github.com/gogo/protobuf/types"
golang_proto "github.com/golang/protobuf/proto"
_ "google.golang.org/genproto/googleapis/api/annotations"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
math "math"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = golang_proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package
type KeyProvisioning int32
const (
// Unknown Key Provisioning.
KeyProvisioning_KEY_PROVISIONING_UNKNOWN KeyProvisioning = 0
// Custom Key Provisioning.
KeyProvisioning_KEY_PROVISIONING_CUSTOM KeyProvisioning = 1
// Key Provisioning from the Global Join Server.
KeyProvisioning_KEY_PROVISIONING_JOIN_SERVER KeyProvisioning = 2
// Key Provisioning from Manifest.
KeyProvisioning_KEY_PROVISIONING_MANIFEST KeyProvisioning = 3
)
var KeyProvisioning_name = map[int32]string{
0: "KEY_PROVISIONING_UNKNOWN",
1: "KEY_PROVISIONING_CUSTOM",
2: "KEY_PROVISIONING_JOIN_SERVER",
3: "KEY_PROVISIONING_MANIFEST",
}
var KeyProvisioning_value = map[string]int32{
"KEY_PROVISIONING_UNKNOWN": 0,
"KEY_PROVISIONING_CUSTOM": 1,
"KEY_PROVISIONING_JOIN_SERVER": 2,
"KEY_PROVISIONING_MANIFEST": 3,
}
func (x KeyProvisioning) String() string {
return proto.EnumName(KeyProvisioning_name, int32(x))
}
func (KeyProvisioning) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_c0145ad4e3f42c22, []int{0}
}
type KeySecurity int32
const (
// Unknown key security.
KeySecurity_KEY_SECURITY_UNKNOWN KeySecurity = 0
// No key security.
KeySecurity_KEY_SECURITY_NONE KeySecurity = 1
// Read Protected key security.
KeySecurity_KEY_SECURITY_READ_PROTECTED KeySecurity = 2
// Key security using the Security Element.
KeySecurity_KEY_SECURITY_SECURE_ELEMENT KeySecurity = 3
)
var KeySecurity_name = map[int32]string{
0: "KEY_SECURITY_UNKNOWN",
1: "KEY_SECURITY_NONE",
2: "KEY_SECURITY_READ_PROTECTED",
3: "KEY_SECURITY_SECURE_ELEMENT",
}
var KeySecurity_value = map[string]int32{
"KEY_SECURITY_UNKNOWN": 0,
"KEY_SECURITY_NONE": 1,
"KEY_SECURITY_READ_PROTECTED": 2,
"KEY_SECURITY_SECURE_ELEMENT": 3,
}
func (x KeySecurity) String() string {
return proto.EnumName(KeySecurity_name, int32(x))
}
func (KeySecurity) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_c0145ad4e3f42c22, []int{1}
}
type EndDeviceBrand struct {
// Brand identifier, as specified in the Device Repository.
BrandId string `protobuf:"bytes,1,opt,name=brand_id,json=brandId,proto3" json:"brand_id,omitempty"`
// Brand name.
Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"`
// Private Enterprise Number (PEN) assigned by IANA.
PrivateEnterpriseNumber uint32 `protobuf:"varint,3,opt,name=private_enterprise_number,json=privateEnterpriseNumber,proto3" json:"private_enterprise_number,omitempty"`
// Organization Unique Identifiers (OUI) assigned by IEEE.
OrganizationUniqueIdentifiers []string `protobuf:"bytes,4,rep,name=organization_unique_identifiers,json=organizationUniqueIdentifiers,proto3" json:"organization_unique_identifiers,omitempty"`
// VendorID managed by the LoRa Alliance, as defined in TR005.
LoraAllianceVendorId uint32 `protobuf:"varint,5,opt,name=lora_alliance_vendor_id,json=loraAllianceVendorId,proto3" json:"lora_alliance_vendor_id,omitempty"`
// Brand website URL.
Website string `protobuf:"bytes,6,opt,name=website,proto3" json:"website,omitempty"`
// Contact email address.
Email string `protobuf:"bytes,7,opt,name=email,proto3" json:"email,omitempty"`
// Path to brand logo.
Logo string `protobuf:"bytes,8,opt,name=logo,proto3" json:"logo,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *EndDeviceBrand) Reset() { *m = EndDeviceBrand{} }
func (m *EndDeviceBrand) String() string { return proto.CompactTextString(m) }
func (*EndDeviceBrand) ProtoMessage() {}
func (*EndDeviceBrand) Descriptor() ([]byte, []int) {
return fileDescriptor_c0145ad4e3f42c22, []int{0}
}
func (m *EndDeviceBrand) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_EndDeviceBrand.Unmarshal(m, b)
}
func (m *EndDeviceBrand) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_EndDeviceBrand.Marshal(b, m, deterministic)
}
func (m *EndDeviceBrand) XXX_Merge(src proto.Message) {
xxx_messageInfo_EndDeviceBrand.Merge(m, src)
}
func (m *EndDeviceBrand) XXX_Size() int {
return xxx_messageInfo_EndDeviceBrand.Size(m)
}
func (m *EndDeviceBrand) XXX_DiscardUnknown() {
xxx_messageInfo_EndDeviceBrand.DiscardUnknown(m)
}
var xxx_messageInfo_EndDeviceBrand proto.InternalMessageInfo
func (m *EndDeviceBrand) GetBrandId() string {
if m != nil {
return m.BrandId
}
return ""
}
func (m *EndDeviceBrand) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *EndDeviceBrand) GetPrivateEnterpriseNumber() uint32 {
if m != nil {
return m.PrivateEnterpriseNumber
}
return 0
}
func (m *EndDeviceBrand) GetOrganizationUniqueIdentifiers() []string {
if m != nil {
return m.OrganizationUniqueIdentifiers
}
return nil
}
func (m *EndDeviceBrand) GetLoraAllianceVendorId() uint32 {
if m != nil {
return m.LoraAllianceVendorId
}
return 0
}
func (m *EndDeviceBrand) GetWebsite() string {
if m != nil {
return m.Website
}
return ""
}
func (m *EndDeviceBrand) GetEmail() string {
if m != nil {
return m.Email
}
return ""
}
func (m *EndDeviceBrand) GetLogo() string {
if m != nil {
return m.Logo
}
return ""
}
type EndDeviceModel struct {
// Brand identifier, as defined in the Device Repository.
BrandId string `protobuf:"bytes,1,opt,name=brand_id,json=brandId,proto3" json:"brand_id,omitempty"`
// Model identifier, as defined in the Device Repository.
ModelId string `protobuf:"bytes,2,opt,name=model_id,json=modelId,proto3" json:"model_id,omitempty"`
// Model name, as defined in the Device Repository.
Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"`
// Model description.
Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty"`
// Available hardware versions.
HardwareVersions []*EndDeviceModel_HardwareVersion `protobuf:"bytes,5,rep,name=hardware_versions,json=hardwareVersions,proto3" json:"hardware_versions,omitempty"`
// Available firmware versions.
FirmwareVersions []*EndDeviceModel_FirmwareVersion `protobuf:"bytes,6,rep,name=firmware_versions,json=firmwareVersions,proto3" json:"firmware_versions,omitempty"`
// List of sensors included in the device.
Sensors []string `protobuf:"bytes,7,rep,name=sensors,proto3" json:"sensors,omitempty"`
// Device dimensions.
Dimensions *EndDeviceModel_Dimensions `protobuf:"bytes,8,opt,name=dimensions,proto3" json:"dimensions,omitempty"`
// Device weight (gram).
Weight *types.FloatValue `protobuf:"bytes,9,opt,name=weight,proto3" json:"weight,omitempty"`
// Device battery information.
Battery *EndDeviceModel_Battery `protobuf:"bytes,10,opt,name=battery,proto3" json:"battery,omitempty"`
// Device operating conditions.
OperatingConditions *EndDeviceModel_OperatingConditions `protobuf:"bytes,11,opt,name=operating_conditions,json=operatingConditions,proto3" json:"operating_conditions,omitempty"`
// Device IP rating code.
IpCode string `protobuf:"bytes,12,opt,name=ip_code,json=ipCode,proto3" json:"ip_code,omitempty"`
// Supported key provisioning methods.
KeyProvisioning []KeyProvisioning `protobuf:"varint,13,rep,packed,name=key_provisioning,json=keyProvisioning,proto3,enum=ttn.lorawan.v3.KeyProvisioning" json:"key_provisioning,omitempty"`
// Device key security.
KeySecurity KeySecurity `protobuf:"varint,14,opt,name=key_security,json=keySecurity,proto3,enum=ttn.lorawan.v3.KeySecurity" json:"key_security,omitempty"`
// Device photos.
Photos *EndDeviceModel_Photos `protobuf:"bytes,15,opt,name=photos,proto3" json:"photos,omitempty"`
// Device videos.
Videos *EndDeviceModel_Videos `protobuf:"bytes,16,opt,name=videos,proto3" json:"videos,omitempty"`
// Device information page URL.
ProductUrl string `protobuf:"bytes,17,opt,name=product_url,json=productUrl,proto3" json:"product_url,omitempty"`
// Device datasheet URL.
DatasheetUrl string `protobuf:"bytes,18,opt,name=datasheet_url,json=datasheetUrl,proto3" json:"datasheet_url,omitempty"`
// Reseller URLs.
Resellers []*EndDeviceModel_Reseller `protobuf:"bytes,19,rep,name=resellers,proto3" json:"resellers,omitempty"`
// List of standards the device is compliant with.
Compliances *EndDeviceModel_Compliances `protobuf:"bytes,20,opt,name=compliances,proto3" json:"compliances,omitempty"`
// List of any additional radios included in the device.
AdditionalRadios []string `protobuf:"bytes,21,rep,name=additional_radios,json=additionalRadios,proto3" json:"additional_radios,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *EndDeviceModel) Reset() { *m = EndDeviceModel{} }
func (m *EndDeviceModel) String() string { return proto.CompactTextString(m) }
func (*EndDeviceModel) ProtoMessage() {}
func (*EndDeviceModel) Descriptor() ([]byte, []int) {
return fileDescriptor_c0145ad4e3f42c22, []int{1}
}
func (m *EndDeviceModel) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_EndDeviceModel.Unmarshal(m, b)
}
func (m *EndDeviceModel) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_EndDeviceModel.Marshal(b, m, deterministic)
}
func (m *EndDeviceModel) XXX_Merge(src proto.Message) {
xxx_messageInfo_EndDeviceModel.Merge(m, src)
}
func (m *EndDeviceModel) XXX_Size() int {
return xxx_messageInfo_EndDeviceModel.Size(m)
}
func (m *EndDeviceModel) XXX_DiscardUnknown() {
xxx_messageInfo_EndDeviceModel.DiscardUnknown(m)
}
var xxx_messageInfo_EndDeviceModel proto.InternalMessageInfo
func (m *EndDeviceModel) GetBrandId() string {
if m != nil {
return m.BrandId
}
return ""
}
func (m *EndDeviceModel) GetModelId() string {
if m != nil {
return m.ModelId
}
return ""
}
func (m *EndDeviceModel) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *EndDeviceModel) GetDescription() string {
if m != nil {
return m.Description
}
return ""
}
func (m *EndDeviceModel) GetHardwareVersions() []*EndDeviceModel_HardwareVersion {
if m != nil {
return m.HardwareVersions
}
return nil
}
func (m *EndDeviceModel) GetFirmwareVersions() []*EndDeviceModel_FirmwareVersion {
if m != nil {
return m.FirmwareVersions
}
return nil
}
func (m *EndDeviceModel) GetSensors() []string {
if m != nil {
return m.Sensors
}
return nil
}
func (m *EndDeviceModel) GetDimensions() *EndDeviceModel_Dimensions {
if m != nil {
return m.Dimensions
}
return nil
}
func (m *EndDeviceModel) GetWeight() *types.FloatValue {
if m != nil {
return m.Weight
}
return nil
}
func (m *EndDeviceModel) GetBattery() *EndDeviceModel_Battery {
if m != nil {
return m.Battery
}
return nil
}
func (m *EndDeviceModel) GetOperatingConditions() *EndDeviceModel_OperatingConditions {
if m != nil {
return m.OperatingConditions
}
return nil
}
func (m *EndDeviceModel) GetIpCode() string {
if m != nil {
return m.IpCode
}
return ""
}
func (m *EndDeviceModel) GetKeyProvisioning() []KeyProvisioning {
if m != nil {
return m.KeyProvisioning
}
return nil
}
func (m *EndDeviceModel) GetKeySecurity() KeySecurity {
if m != nil {
return m.KeySecurity
}
return KeySecurity_KEY_SECURITY_UNKNOWN
}
func (m *EndDeviceModel) GetPhotos() *EndDeviceModel_Photos {
if m != nil {
return m.Photos
}
return nil
}
func (m *EndDeviceModel) GetVideos() *EndDeviceModel_Videos {
if m != nil {
return m.Videos
}
return nil
}
func (m *EndDeviceModel) GetProductUrl() string {
if m != nil {
return m.ProductUrl
}
return ""
}
func (m *EndDeviceModel) GetDatasheetUrl() string {
if m != nil {
return m.DatasheetUrl
}
return ""
}
func (m *EndDeviceModel) GetResellers() []*EndDeviceModel_Reseller {
if m != nil {
return m.Resellers
}
return nil
}
func (m *EndDeviceModel) GetCompliances() *EndDeviceModel_Compliances {
if m != nil {
return m.Compliances
}
return nil
}
func (m *EndDeviceModel) GetAdditionalRadios() []string {
if m != nil {
return m.AdditionalRadios
}
return nil
}
type EndDeviceModel_HardwareVersion struct {
// Hardware version string.
Version string `protobuf:"bytes,1,opt,name=version,proto3" json:"version,omitempty"`
// Numberic hardware revision number.
Numeric uint32 `protobuf:"varint,2,opt,name=numeric,proto3" json:"numeric,omitempty"`
// Hardware part number.
PartNumber string `protobuf:"bytes,3,opt,name=part_number,json=partNumber,proto3" json:"part_number,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *EndDeviceModel_HardwareVersion) Reset() { *m = EndDeviceModel_HardwareVersion{} }
func (m *EndDeviceModel_HardwareVersion) String() string { return proto.CompactTextString(m) }
func (*EndDeviceModel_HardwareVersion) ProtoMessage() {}
func (*EndDeviceModel_HardwareVersion) Descriptor() ([]byte, []int) {
return fileDescriptor_c0145ad4e3f42c22, []int{1, 0}
}
func (m *EndDeviceModel_HardwareVersion) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_EndDeviceModel_HardwareVersion.Unmarshal(m, b)
}
func (m *EndDeviceModel_HardwareVersion) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_EndDeviceModel_HardwareVersion.Marshal(b, m, deterministic)
}
func (m *EndDeviceModel_HardwareVersion) XXX_Merge(src proto.Message) {
xxx_messageInfo_EndDeviceModel_HardwareVersion.Merge(m, src)
}
func (m *EndDeviceModel_HardwareVersion) XXX_Size() int {
return xxx_messageInfo_EndDeviceModel_HardwareVersion.Size(m)
}
func (m *EndDeviceModel_HardwareVersion) XXX_DiscardUnknown() {
xxx_messageInfo_EndDeviceModel_HardwareVersion.DiscardUnknown(m)
}
var xxx_messageInfo_EndDeviceModel_HardwareVersion proto.InternalMessageInfo
func (m *EndDeviceModel_HardwareVersion) GetVersion() string {
if m != nil {
return m.Version
}
return ""
}
func (m *EndDeviceModel_HardwareVersion) GetNumeric() uint32 {
if m != nil {
return m.Numeric
}
return 0
}
func (m *EndDeviceModel_HardwareVersion) GetPartNumber() string {
if m != nil {
return m.PartNumber
}
return ""
}
type EndDeviceModel_FirmwareVersion struct {
// Firmware version string.
Version string `protobuf:"bytes,1,opt,name=version,proto3" json:"version,omitempty"`
// Numeric firmware revision number.
Numeric uint32 `protobuf:"varint,2,opt,name=numeric,proto3" json:"numeric,omitempty"`
// Hardware versions supported by this firmware version.
SupportedHardwareVersions []string `protobuf:"bytes,3,rep,name=supported_hardware_versions,json=supportedHardwareVersions,proto3" json:"supported_hardware_versions,omitempty"`
// Device profiles for each supported region (band).
Profiles map[string]*EndDeviceModel_FirmwareVersion_Profile `protobuf:"bytes,4,rep,name=profiles,proto3" json:"profiles,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *EndDeviceModel_FirmwareVersion) Reset() { *m = EndDeviceModel_FirmwareVersion{} }
func (m *EndDeviceModel_FirmwareVersion) String() string { return proto.CompactTextString(m) }
func (*EndDeviceModel_FirmwareVersion) ProtoMessage() {}
func (*EndDeviceModel_FirmwareVersion) Descriptor() ([]byte, []int) {
return fileDescriptor_c0145ad4e3f42c22, []int{1, 1}
}
func (m *EndDeviceModel_FirmwareVersion) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_EndDeviceModel_FirmwareVersion.Unmarshal(m, b)
}
func (m *EndDeviceModel_FirmwareVersion) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_EndDeviceModel_FirmwareVersion.Marshal(b, m, deterministic)
}
func (m *EndDeviceModel_FirmwareVersion) XXX_Merge(src proto.Message) {
xxx_messageInfo_EndDeviceModel_FirmwareVersion.Merge(m, src)
}
func (m *EndDeviceModel_FirmwareVersion) XXX_Size() int {
return xxx_messageInfo_EndDeviceModel_FirmwareVersion.Size(m)
}
func (m *EndDeviceModel_FirmwareVersion) XXX_DiscardUnknown() {
xxx_messageInfo_EndDeviceModel_FirmwareVersion.DiscardUnknown(m)
}
var xxx_messageInfo_EndDeviceModel_FirmwareVersion proto.InternalMessageInfo
func (m *EndDeviceModel_FirmwareVersion) GetVersion() string {
if m != nil {
return m.Version
}
return ""
}
func (m *EndDeviceModel_FirmwareVersion) GetNumeric() uint32 {
if m != nil {
return m.Numeric
}
return 0
}
func (m *EndDeviceModel_FirmwareVersion) GetSupportedHardwareVersions() []string {
if m != nil {
return m.SupportedHardwareVersions
}
return nil
}
func (m *EndDeviceModel_FirmwareVersion) GetProfiles() map[string]*EndDeviceModel_FirmwareVersion_Profile {
if m != nil {
return m.Profiles
}
return nil
}
type EndDeviceModel_FirmwareVersion_Profile struct {
// Vendor ID of the profile, as defined in the Device Repository.
// If this value is set, the profile is loaded from this vendor's folder.
// If this value is not set, the profile is loaded from the current (end device's) vendor.
VendorId string `protobuf:"bytes,4,opt,name=vendor_id,json=vendorId,proto3" json:"vendor_id,omitempty"`
// Profile identifier, as defined in the Device Repository.
ProfileId string `protobuf:"bytes,1,opt,name=profile_id,json=profileId,proto3" json:"profile_id,omitempty"`
// Whether the device is LoRaWAN certified.
LorawanCertified bool `protobuf:"varint,2,opt,name=lorawan_certified,json=lorawanCertified,proto3" json:"lorawan_certified,omitempty"`
// Payload formatter codec identifier, as defined in the Device Repository.
CodecId string `protobuf:"bytes,3,opt,name=codec_id,json=codecId,proto3" json:"codec_id,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *EndDeviceModel_FirmwareVersion_Profile) Reset() {
*m = EndDeviceModel_FirmwareVersion_Profile{}
}
func (m *EndDeviceModel_FirmwareVersion_Profile) String() string { return proto.CompactTextString(m) }
func (*EndDeviceModel_FirmwareVersion_Profile) ProtoMessage() {}
func (*EndDeviceModel_FirmwareVersion_Profile) Descriptor() ([]byte, []int) {
return fileDescriptor_c0145ad4e3f42c22, []int{1, 1, 0}
}
func (m *EndDeviceModel_FirmwareVersion_Profile) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_EndDeviceModel_FirmwareVersion_Profile.Unmarshal(m, b)
}
func (m *EndDeviceModel_FirmwareVersion_Profile) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_EndDeviceModel_FirmwareVersion_Profile.Marshal(b, m, deterministic)
}
func (m *EndDeviceModel_FirmwareVersion_Profile) XXX_Merge(src proto.Message) {
xxx_messageInfo_EndDeviceModel_FirmwareVersion_Profile.Merge(m, src)
}
func (m *EndDeviceModel_FirmwareVersion_Profile) XXX_Size() int {
return xxx_messageInfo_EndDeviceModel_FirmwareVersion_Profile.Size(m)
}
func (m *EndDeviceModel_FirmwareVersion_Profile) XXX_DiscardUnknown() {
xxx_messageInfo_EndDeviceModel_FirmwareVersion_Profile.DiscardUnknown(m)
}
var xxx_messageInfo_EndDeviceModel_FirmwareVersion_Profile proto.InternalMessageInfo
func (m *EndDeviceModel_FirmwareVersion_Profile) GetVendorId() string {
if m != nil {
return m.VendorId
}
return ""
}
func (m *EndDeviceModel_FirmwareVersion_Profile) GetProfileId() string {
if m != nil {
return m.ProfileId
}
return ""
}
func (m *EndDeviceModel_FirmwareVersion_Profile) GetLorawanCertified() bool {
if m != nil {
return m.LorawanCertified
}
return false
}
func (m *EndDeviceModel_FirmwareVersion_Profile) GetCodecId() string {
if m != nil {
return m.CodecId
}
return ""
}
type EndDeviceModel_Dimensions struct {
// Device width (mm).
Width *types.FloatValue `protobuf:"bytes,1,opt,name=width,proto3" json:"width,omitempty"`
// Device height (mm).
Height *types.FloatValue `protobuf:"bytes,2,opt,name=height,proto3" json:"height,omitempty"`
// Device diameter (mm).
Diameter *types.FloatValue `protobuf:"bytes,3,opt,name=diameter,proto3" json:"diameter,omitempty"`
// Device length (mm).
Length *types.FloatValue `protobuf:"bytes,4,opt,name=length,proto3" json:"length,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *EndDeviceModel_Dimensions) Reset() { *m = EndDeviceModel_Dimensions{} }
func (m *EndDeviceModel_Dimensions) String() string { return proto.CompactTextString(m) }
func (*EndDeviceModel_Dimensions) ProtoMessage() {}
func (*EndDeviceModel_Dimensions) Descriptor() ([]byte, []int) {
return fileDescriptor_c0145ad4e3f42c22, []int{1, 2}
}
func (m *EndDeviceModel_Dimensions) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_EndDeviceModel_Dimensions.Unmarshal(m, b)
}
func (m *EndDeviceModel_Dimensions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_EndDeviceModel_Dimensions.Marshal(b, m, deterministic)
}
func (m *EndDeviceModel_Dimensions) XXX_Merge(src proto.Message) {
xxx_messageInfo_EndDeviceModel_Dimensions.Merge(m, src)
}
func (m *EndDeviceModel_Dimensions) XXX_Size() int {
return xxx_messageInfo_EndDeviceModel_Dimensions.Size(m)
}
func (m *EndDeviceModel_Dimensions) XXX_DiscardUnknown() {
xxx_messageInfo_EndDeviceModel_Dimensions.DiscardUnknown(m)
}
var xxx_messageInfo_EndDeviceModel_Dimensions proto.InternalMessageInfo
func (m *EndDeviceModel_Dimensions) GetWidth() *types.FloatValue {
if m != nil {
return m.Width
}
return nil
}
func (m *EndDeviceModel_Dimensions) GetHeight() *types.FloatValue {
if m != nil {
return m.Height
}
return nil
}
func (m *EndDeviceModel_Dimensions) GetDiameter() *types.FloatValue {
if m != nil {
return m.Diameter
}
return nil
}
func (m *EndDeviceModel_Dimensions) GetLength() *types.FloatValue {
if m != nil {
return m.Length
}
return nil
}
type EndDeviceModel_Battery struct {
// Whether the device battery can be replaced.
Replaceable *types.BoolValue `protobuf:"bytes,1,opt,name=replaceable,proto3" json:"replaceable,omitempty"`
// Battery type.
Type string `protobuf:"bytes,2,opt,name=type,proto3" json:"type,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *EndDeviceModel_Battery) Reset() { *m = EndDeviceModel_Battery{} }
func (m *EndDeviceModel_Battery) String() string { return proto.CompactTextString(m) }
func (*EndDeviceModel_Battery) ProtoMessage() {}
func (*EndDeviceModel_Battery) Descriptor() ([]byte, []int) {
return fileDescriptor_c0145ad4e3f42c22, []int{1, 3}
}
func (m *EndDeviceModel_Battery) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_EndDeviceModel_Battery.Unmarshal(m, b)
}
func (m *EndDeviceModel_Battery) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_EndDeviceModel_Battery.Marshal(b, m, deterministic)
}
func (m *EndDeviceModel_Battery) XXX_Merge(src proto.Message) {
xxx_messageInfo_EndDeviceModel_Battery.Merge(m, src)
}
func (m *EndDeviceModel_Battery) XXX_Size() int {
return xxx_messageInfo_EndDeviceModel_Battery.Size(m)
}
func (m *EndDeviceModel_Battery) XXX_DiscardUnknown() {
xxx_messageInfo_EndDeviceModel_Battery.DiscardUnknown(m)
}
var xxx_messageInfo_EndDeviceModel_Battery proto.InternalMessageInfo
func (m *EndDeviceModel_Battery) GetReplaceable() *types.BoolValue {
if m != nil {
return m.Replaceable
}
return nil
}
func (m *EndDeviceModel_Battery) GetType() string {
if m != nil {
return m.Type
}
return ""
}
type EndDeviceModel_OperatingConditions struct {
// Temperature operating conditions (Celsius).
Temperature *EndDeviceModel_OperatingConditions_Limits `protobuf:"bytes,1,opt,name=temperature,proto3" json:"temperature,omitempty"`
// Relative humidity operating conditions (Fraction, in range [0, 1]).
RelativeHumidity *EndDeviceModel_OperatingConditions_Limits `protobuf:"bytes,2,opt,name=relative_humidity,json=relativeHumidity,proto3" json:"relative_humidity,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *EndDeviceModel_OperatingConditions) Reset() { *m = EndDeviceModel_OperatingConditions{} }
func (m *EndDeviceModel_OperatingConditions) String() string { return proto.CompactTextString(m) }
func (*EndDeviceModel_OperatingConditions) ProtoMessage() {}
func (*EndDeviceModel_OperatingConditions) Descriptor() ([]byte, []int) {
return fileDescriptor_c0145ad4e3f42c22, []int{1, 4}
}
func (m *EndDeviceModel_OperatingConditions) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_EndDeviceModel_OperatingConditions.Unmarshal(m, b)
}
func (m *EndDeviceModel_OperatingConditions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_EndDeviceModel_OperatingConditions.Marshal(b, m, deterministic)
}
func (m *EndDeviceModel_OperatingConditions) XXX_Merge(src proto.Message) {
xxx_messageInfo_EndDeviceModel_OperatingConditions.Merge(m, src)
}
func (m *EndDeviceModel_OperatingConditions) XXX_Size() int {
return xxx_messageInfo_EndDeviceModel_OperatingConditions.Size(m)
}
func (m *EndDeviceModel_OperatingConditions) XXX_DiscardUnknown() {
xxx_messageInfo_EndDeviceModel_OperatingConditions.DiscardUnknown(m)
}
var xxx_messageInfo_EndDeviceModel_OperatingConditions proto.InternalMessageInfo
func (m *EndDeviceModel_OperatingConditions) GetTemperature() *EndDeviceModel_OperatingConditions_Limits {
if m != nil {
return m.Temperature
}
return nil
}
func (m *EndDeviceModel_OperatingConditions) GetRelativeHumidity() *EndDeviceModel_OperatingConditions_Limits {
if m != nil {
return m.RelativeHumidity
}
return nil
}
type EndDeviceModel_OperatingConditions_Limits struct {
// Min value of operating conditions range.
Min *types.FloatValue `protobuf:"bytes,1,opt,name=min,proto3" json:"min,omitempty"`
// Max value of operating conditions range.
Max *types.FloatValue `protobuf:"bytes,2,opt,name=max,proto3" json:"max,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *EndDeviceModel_OperatingConditions_Limits) Reset() {
*m = EndDeviceModel_OperatingConditions_Limits{}
}
func (m *EndDeviceModel_OperatingConditions_Limits) String() string {
return proto.CompactTextString(m)
}
func (*EndDeviceModel_OperatingConditions_Limits) ProtoMessage() {}
func (*EndDeviceModel_OperatingConditions_Limits) Descriptor() ([]byte, []int) {
return fileDescriptor_c0145ad4e3f42c22, []int{1, 4, 0}
}
func (m *EndDeviceModel_OperatingConditions_Limits) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_EndDeviceModel_OperatingConditions_Limits.Unmarshal(m, b)
}
func (m *EndDeviceModel_OperatingConditions_Limits) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_EndDeviceModel_OperatingConditions_Limits.Marshal(b, m, deterministic)
}
func (m *EndDeviceModel_OperatingConditions_Limits) XXX_Merge(src proto.Message) {
xxx_messageInfo_EndDeviceModel_OperatingConditions_Limits.Merge(m, src)
}
func (m *EndDeviceModel_OperatingConditions_Limits) XXX_Size() int {
return xxx_messageInfo_EndDeviceModel_OperatingConditions_Limits.Size(m)
}
func (m *EndDeviceModel_OperatingConditions_Limits) XXX_DiscardUnknown() {
xxx_messageInfo_EndDeviceModel_OperatingConditions_Limits.DiscardUnknown(m)
}
var xxx_messageInfo_EndDeviceModel_OperatingConditions_Limits proto.InternalMessageInfo
func (m *EndDeviceModel_OperatingConditions_Limits) GetMin() *types.FloatValue {
if m != nil {
return m.Min
}
return nil
}
func (m *EndDeviceModel_OperatingConditions_Limits) GetMax() *types.FloatValue {
if m != nil {
return m.Max
}
return nil
}
type EndDeviceModel_Photos struct {
// Main device photo.
Main string `protobuf:"bytes,1,opt,name=main,proto3" json:"main,omitempty"`
// List of other device photos.
Other []string `protobuf:"bytes,2,rep,name=other,proto3" json:"other,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *EndDeviceModel_Photos) Reset() { *m = EndDeviceModel_Photos{} }
func (m *EndDeviceModel_Photos) String() string { return proto.CompactTextString(m) }
func (*EndDeviceModel_Photos) ProtoMessage() {}
func (*EndDeviceModel_Photos) Descriptor() ([]byte, []int) {
return fileDescriptor_c0145ad4e3f42c22, []int{1, 5}
}
func (m *EndDeviceModel_Photos) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_EndDeviceModel_Photos.Unmarshal(m, b)
}
func (m *EndDeviceModel_Photos) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_EndDeviceModel_Photos.Marshal(b, m, deterministic)
}
func (m *EndDeviceModel_Photos) XXX_Merge(src proto.Message) {
xxx_messageInfo_EndDeviceModel_Photos.Merge(m, src)
}
func (m *EndDeviceModel_Photos) XXX_Size() int {
return xxx_messageInfo_EndDeviceModel_Photos.Size(m)
}
func (m *EndDeviceModel_Photos) XXX_DiscardUnknown() {
xxx_messageInfo_EndDeviceModel_Photos.DiscardUnknown(m)
}
var xxx_messageInfo_EndDeviceModel_Photos proto.InternalMessageInfo
func (m *EndDeviceModel_Photos) GetMain() string {
if m != nil {
return m.Main
}
return ""
}
func (m *EndDeviceModel_Photos) GetOther() []string {
if m != nil {
return m.Other
}
return nil
}
type EndDeviceModel_Videos struct {
// Link to main device video.
Main string `protobuf:"bytes,1,opt,name=main,proto3" json:"main,omitempty"`
// Links to other device videos.
Other []string `protobuf:"bytes,2,rep,name=other,proto3" json:"other,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *EndDeviceModel_Videos) Reset() { *m = EndDeviceModel_Videos{} }
func (m *EndDeviceModel_Videos) String() string { return proto.CompactTextString(m) }
func (*EndDeviceModel_Videos) ProtoMessage() {}
func (*EndDeviceModel_Videos) Descriptor() ([]byte, []int) {
return fileDescriptor_c0145ad4e3f42c22, []int{1, 6}
}
func (m *EndDeviceModel_Videos) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_EndDeviceModel_Videos.Unmarshal(m, b)
}
func (m *EndDeviceModel_Videos) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_EndDeviceModel_Videos.Marshal(b, m, deterministic)
}
func (m *EndDeviceModel_Videos) XXX_Merge(src proto.Message) {
xxx_messageInfo_EndDeviceModel_Videos.Merge(m, src)
}
func (m *EndDeviceModel_Videos) XXX_Size() int {
return xxx_messageInfo_EndDeviceModel_Videos.Size(m)
}
func (m *EndDeviceModel_Videos) XXX_DiscardUnknown() {
xxx_messageInfo_EndDeviceModel_Videos.DiscardUnknown(m)
}
var xxx_messageInfo_EndDeviceModel_Videos proto.InternalMessageInfo
func (m *EndDeviceModel_Videos) GetMain() string {
if m != nil {
return m.Main
}
return ""
}
func (m *EndDeviceModel_Videos) GetOther() []string {
if m != nil {
return m.Other
}
return nil
}
type EndDeviceModel_Reseller struct {
// Reseller name.
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
// Reseller regions.
Region []string `protobuf:"bytes,2,rep,name=region,proto3" json:"region,omitempty"`
// Reseller URL.
Url string `protobuf:"bytes,3,opt,name=url,proto3" json:"url,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *EndDeviceModel_Reseller) Reset() { *m = EndDeviceModel_Reseller{} }
func (m *EndDeviceModel_Reseller) String() string { return proto.CompactTextString(m) }
func (*EndDeviceModel_Reseller) ProtoMessage() {}
func (*EndDeviceModel_Reseller) Descriptor() ([]byte, []int) {
return fileDescriptor_c0145ad4e3f42c22, []int{1, 7}
}
func (m *EndDeviceModel_Reseller) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_EndDeviceModel_Reseller.Unmarshal(m, b)
}
func (m *EndDeviceModel_Reseller) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_EndDeviceModel_Reseller.Marshal(b, m, deterministic)
}
func (m *EndDeviceModel_Reseller) XXX_Merge(src proto.Message) {
xxx_messageInfo_EndDeviceModel_Reseller.Merge(m, src)
}
func (m *EndDeviceModel_Reseller) XXX_Size() int {
return xxx_messageInfo_EndDeviceModel_Reseller.Size(m)
}
func (m *EndDeviceModel_Reseller) XXX_DiscardUnknown() {
xxx_messageInfo_EndDeviceModel_Reseller.DiscardUnknown(m)
}
var xxx_messageInfo_EndDeviceModel_Reseller proto.InternalMessageInfo
func (m *EndDeviceModel_Reseller) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *EndDeviceModel_Reseller) GetRegion() []string {
if m != nil {
return m.Region
}
return nil
}
func (m *EndDeviceModel_Reseller) GetUrl() string {
if m != nil {
return m.Url
}
return ""
}
type EndDeviceModel_Compliances struct {
// List of safety standards the device is compliant with.
Safety []*EndDeviceModel_Compliances_Compliance `protobuf:"bytes,1,rep,name=safety,proto3" json:"safety,omitempty"`
// List of radio equipment standards the device is compliant with.
RadioEquipment []*EndDeviceModel_Compliances_Compliance `protobuf:"bytes,2,rep,name=radio_equipment,json=radioEquipment,proto3" json:"radio_equipment,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *EndDeviceModel_Compliances) Reset() { *m = EndDeviceModel_Compliances{} }
func (m *EndDeviceModel_Compliances) String() string { return proto.CompactTextString(m) }
func (*EndDeviceModel_Compliances) ProtoMessage() {}
func (*EndDeviceModel_Compliances) Descriptor() ([]byte, []int) {
return fileDescriptor_c0145ad4e3f42c22, []int{1, 8}
}
func (m *EndDeviceModel_Compliances) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_EndDeviceModel_Compliances.Unmarshal(m, b)
}
func (m *EndDeviceModel_Compliances) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_EndDeviceModel_Compliances.Marshal(b, m, deterministic)
}
func (m *EndDeviceModel_Compliances) XXX_Merge(src proto.Message) {
xxx_messageInfo_EndDeviceModel_Compliances.Merge(m, src)
}
func (m *EndDeviceModel_Compliances) XXX_Size() int {
return xxx_messageInfo_EndDeviceModel_Compliances.Size(m)
}
func (m *EndDeviceModel_Compliances) XXX_DiscardUnknown() {
xxx_messageInfo_EndDeviceModel_Compliances.DiscardUnknown(m)
}
var xxx_messageInfo_EndDeviceModel_Compliances proto.InternalMessageInfo
func (m *EndDeviceModel_Compliances) GetSafety() []*EndDeviceModel_Compliances_Compliance {
if m != nil {
return m.Safety
}
return nil
}
func (m *EndDeviceModel_Compliances) GetRadioEquipment() []*EndDeviceModel_Compliances_Compliance {
if m != nil {
return m.RadioEquipment
}
return nil
}
type EndDeviceModel_Compliances_Compliance struct {
Body string `protobuf:"bytes,1,opt,name=body,proto3" json:"body,omitempty"`
Norm string `protobuf:"bytes,2,opt,name=norm,proto3" json:"norm,omitempty"`
Standard string `protobuf:"bytes,3,opt,name=standard,proto3" json:"standard,omitempty"`
Version string `protobuf:"bytes,4,opt,name=version,proto3" json:"version,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *EndDeviceModel_Compliances_Compliance) Reset() { *m = EndDeviceModel_Compliances_Compliance{} }
func (m *EndDeviceModel_Compliances_Compliance) String() string { return proto.CompactTextString(m) }
func (*EndDeviceModel_Compliances_Compliance) ProtoMessage() {}
func (*EndDeviceModel_Compliances_Compliance) Descriptor() ([]byte, []int) {
return fileDescriptor_c0145ad4e3f42c22, []int{1, 8, 0}
}
func (m *EndDeviceModel_Compliances_Compliance) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_EndDeviceModel_Compliances_Compliance.Unmarshal(m, b)
}
func (m *EndDeviceModel_Compliances_Compliance) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_EndDeviceModel_Compliances_Compliance.Marshal(b, m, deterministic)
}
func (m *EndDeviceModel_Compliances_Compliance) XXX_Merge(src proto.Message) {
xxx_messageInfo_EndDeviceModel_Compliances_Compliance.Merge(m, src)
}
func (m *EndDeviceModel_Compliances_Compliance) XXX_Size() int {
return xxx_messageInfo_EndDeviceModel_Compliances_Compliance.Size(m)
}
func (m *EndDeviceModel_Compliances_Compliance) XXX_DiscardUnknown() {
xxx_messageInfo_EndDeviceModel_Compliances_Compliance.DiscardUnknown(m)
}
var xxx_messageInfo_EndDeviceModel_Compliances_Compliance proto.InternalMessageInfo
func (m *EndDeviceModel_Compliances_Compliance) GetBody() string {
if m != nil {
return m.Body
}
return ""
}
func (m *EndDeviceModel_Compliances_Compliance) GetNorm() string {
if m != nil {
return m.Norm
}
return ""
}
func (m *EndDeviceModel_Compliances_Compliance) GetStandard() string {
if m != nil {
return m.Standard
}
return ""
}
func (m *EndDeviceModel_Compliances_Compliance) GetVersion() string {
if m != nil {
return m.Version
}
return ""
}
type GetEndDeviceBrandRequest struct {
// Application identifiers.
ApplicationIds *ApplicationIdentifiers `protobuf:"bytes,1,opt,name=application_ids,json=applicationIds,proto3" json:"application_ids,omitempty"`
// Brand identifier, as defined in the Device Repository.
BrandId string `protobuf:"bytes,2,opt,name=brand_id,json=brandId,proto3" json:"brand_id,omitempty"`
// Field mask paths.
FieldMask *types.FieldMask `protobuf:"bytes,3,opt,name=field_mask,json=fieldMask,proto3" json:"field_mask,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *GetEndDeviceBrandRequest) Reset() { *m = GetEndDeviceBrandRequest{} }
func (m *GetEndDeviceBrandRequest) String() string { return proto.CompactTextString(m) }
func (*GetEndDeviceBrandRequest) ProtoMessage() {}
func (*GetEndDeviceBrandRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_c0145ad4e3f42c22, []int{2}
}
func (m *GetEndDeviceBrandRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_GetEndDeviceBrandRequest.Unmarshal(m, b)
}
func (m *GetEndDeviceBrandRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_GetEndDeviceBrandRequest.Marshal(b, m, deterministic)
}
func (m *GetEndDeviceBrandRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_GetEndDeviceBrandRequest.Merge(m, src)
}
func (m *GetEndDeviceBrandRequest) XXX_Size() int {
return xxx_messageInfo_GetEndDeviceBrandRequest.Size(m)
}
func (m *GetEndDeviceBrandRequest) XXX_DiscardUnknown() {
xxx_messageInfo_GetEndDeviceBrandRequest.DiscardUnknown(m)
}
var xxx_messageInfo_GetEndDeviceBrandRequest proto.InternalMessageInfo
func (m *GetEndDeviceBrandRequest) GetApplicationIds() *ApplicationIdentifiers {
if m != nil {
return m.ApplicationIds
}
return nil
}
func (m *GetEndDeviceBrandRequest) GetBrandId() string {
if m != nil {
return m.BrandId
}
return ""
}
func (m *GetEndDeviceBrandRequest) GetFieldMask() *types.FieldMask {
if m != nil {
return m.FieldMask
}
return nil
}
type ListEndDeviceBrandsRequest struct {
// Application identifiers.
ApplicationIds *ApplicationIdentifiers `protobuf:"bytes,1,opt,name=application_ids,json=applicationIds,proto3" json:"application_ids,omitempty"`
// Limit the number of results per page.
Limit uint32 `protobuf:"varint,2,opt,name=limit,proto3" json:"limit,omitempty"`
// Page number for pagination. 0 is interpreted as 1.
Page uint32 `protobuf:"varint,3,opt,name=page,proto3" json:"page,omitempty"`
// Order (for pagination)
OrderBy string `protobuf:"bytes,4,opt,name=order_by,json=orderBy,proto3" json:"order_by,omitempty"`
// Search for brands matching a query string.
Search string `protobuf:"bytes,5,opt,name=search,proto3" json:"search,omitempty"`
// Field mask paths.
FieldMask *types.FieldMask `protobuf:"bytes,6,opt,name=field_mask,json=fieldMask,proto3" json:"field_mask,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ListEndDeviceBrandsRequest) Reset() { *m = ListEndDeviceBrandsRequest{} }
func (m *ListEndDeviceBrandsRequest) String() string { return proto.CompactTextString(m) }
func (*ListEndDeviceBrandsRequest) ProtoMessage() {}
func (*ListEndDeviceBrandsRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_c0145ad4e3f42c22, []int{3}
}
func (m *ListEndDeviceBrandsRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ListEndDeviceBrandsRequest.Unmarshal(m, b)
}
func (m *ListEndDeviceBrandsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ListEndDeviceBrandsRequest.Marshal(b, m, deterministic)
}
func (m *ListEndDeviceBrandsRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_ListEndDeviceBrandsRequest.Merge(m, src)
}
func (m *ListEndDeviceBrandsRequest) XXX_Size() int {
return xxx_messageInfo_ListEndDeviceBrandsRequest.Size(m)
}
func (m *ListEndDeviceBrandsRequest) XXX_DiscardUnknown() {
xxx_messageInfo_ListEndDeviceBrandsRequest.DiscardUnknown(m)
}
var xxx_messageInfo_ListEndDeviceBrandsRequest proto.InternalMessageInfo
func (m *ListEndDeviceBrandsRequest) GetApplicationIds() *ApplicationIdentifiers {
if m != nil {
return m.ApplicationIds
}
return nil
}
func (m *ListEndDeviceBrandsRequest) GetLimit() uint32 {
if m != nil {
return m.Limit
}
return 0
}
func (m *ListEndDeviceBrandsRequest) GetPage() uint32 {
if m != nil {
return m.Page
}
return 0
}
func (m *ListEndDeviceBrandsRequest) GetOrderBy() string {
if m != nil {
return m.OrderBy
}
return ""
}
func (m *ListEndDeviceBrandsRequest) GetSearch() string {
if m != nil {
return m.Search
}
return ""
}
func (m *ListEndDeviceBrandsRequest) GetFieldMask() *types.FieldMask {
if m != nil {
return m.FieldMask
}
return nil
}
type GetEndDeviceModelRequest struct {
// Application identifiers.
ApplicationIds *ApplicationIdentifiers `protobuf:"bytes,1,opt,name=application_ids,json=applicationIds,proto3" json:"application_ids,omitempty"`
// Brand identifier, as defined in the Device Repository.
BrandId string `protobuf:"bytes,2,opt,name=brand_id,json=brandId,proto3" json:"brand_id,omitempty"`
// Model identifier, as defined in the Device Repository.
ModelId string `protobuf:"bytes,3,opt,name=model_id,json=modelId,proto3" json:"model_id,omitempty"`
// Field mask paths.
FieldMask *types.FieldMask `protobuf:"bytes,4,opt,name=field_mask,json=fieldMask,proto3" json:"field_mask,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *GetEndDeviceModelRequest) Reset() { *m = GetEndDeviceModelRequest{} }
func (m *GetEndDeviceModelRequest) String() string { return proto.CompactTextString(m) }
func (*GetEndDeviceModelRequest) ProtoMessage() {}
func (*GetEndDeviceModelRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_c0145ad4e3f42c22, []int{4}
}
func (m *GetEndDeviceModelRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_GetEndDeviceModelRequest.Unmarshal(m, b)
}
func (m *GetEndDeviceModelRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_GetEndDeviceModelRequest.Marshal(b, m, deterministic)
}
func (m *GetEndDeviceModelRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_GetEndDeviceModelRequest.Merge(m, src)
}
func (m *GetEndDeviceModelRequest) XXX_Size() int {
return xxx_messageInfo_GetEndDeviceModelRequest.Size(m)
}
func (m *GetEndDeviceModelRequest) XXX_DiscardUnknown() {
xxx_messageInfo_GetEndDeviceModelRequest.DiscardUnknown(m)
}
var xxx_messageInfo_GetEndDeviceModelRequest proto.InternalMessageInfo
func (m *GetEndDeviceModelRequest) GetApplicationIds() *ApplicationIdentifiers {
if m != nil {
return m.ApplicationIds
}
return nil
}
func (m *GetEndDeviceModelRequest) GetBrandId() string {
if m != nil {
return m.BrandId
}
return ""
}
func (m *GetEndDeviceModelRequest) GetModelId() string {
if m != nil {
return m.ModelId
}
return ""
}
func (m *GetEndDeviceModelRequest) GetFieldMask() *types.FieldMask {
if m != nil {
return m.FieldMask
}
return nil
}
type ListEndDeviceModelsRequest struct {
// Application identifiers.
ApplicationIds *ApplicationIdentifiers `protobuf:"bytes,1,opt,name=application_ids,json=applicationIds,proto3" json:"application_ids,omitempty"`
// List end devices from a specific brand.
BrandId string `protobuf:"bytes,2,opt,name=brand_id,json=brandId,proto3" json:"brand_id,omitempty"`
// Limit the number of results per page.
Limit uint32 `protobuf:"varint,3,opt,name=limit,proto3" json:"limit,omitempty"`
// Page number for pagination. 0 is interpreted as 1.
Page uint32 `protobuf:"varint,4,opt,name=page,proto3" json:"page,omitempty"`
// Order end devices
OrderBy string `protobuf:"bytes,5,opt,name=order_by,json=orderBy,proto3" json:"order_by,omitempty"`
// List end devices matching a query string.
Search string `protobuf:"bytes,6,opt,name=search,proto3" json:"search,omitempty"`
// Field mask paths.
FieldMask *types.FieldMask `protobuf:"bytes,7,opt,name=field_mask,json=fieldMask,proto3" json:"field_mask,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ListEndDeviceModelsRequest) Reset() { *m = ListEndDeviceModelsRequest{} }
func (m *ListEndDeviceModelsRequest) String() string { return proto.CompactTextString(m) }
func (*ListEndDeviceModelsRequest) ProtoMessage() {}
func (*ListEndDeviceModelsRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_c0145ad4e3f42c22, []int{5}
}
func (m *ListEndDeviceModelsRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ListEndDeviceModelsRequest.Unmarshal(m, b)
}
func (m *ListEndDeviceModelsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ListEndDeviceModelsRequest.Marshal(b, m, deterministic)
}
func (m *ListEndDeviceModelsRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_ListEndDeviceModelsRequest.Merge(m, src)
}
func (m *ListEndDeviceModelsRequest) XXX_Size() int {
return xxx_messageInfo_ListEndDeviceModelsRequest.Size(m)
}
func (m *ListEndDeviceModelsRequest) XXX_DiscardUnknown() {
xxx_messageInfo_ListEndDeviceModelsRequest.DiscardUnknown(m)
}
var xxx_messageInfo_ListEndDeviceModelsRequest proto.InternalMessageInfo
func (m *ListEndDeviceModelsRequest) GetApplicationIds() *ApplicationIdentifiers {
if m != nil {
return m.ApplicationIds
}
return nil
}
func (m *ListEndDeviceModelsRequest) GetBrandId() string {
if m != nil {
return m.BrandId
}
return ""
}
func (m *ListEndDeviceModelsRequest) GetLimit() uint32 {
if m != nil {
return m.Limit
}
return 0
}
func (m *ListEndDeviceModelsRequest) GetPage() uint32 {
if m != nil {
return m.Page
}
return 0
}
func (m *ListEndDeviceModelsRequest) GetOrderBy() string {
if m != nil {
return m.OrderBy
}
return ""
}
func (m *ListEndDeviceModelsRequest) GetSearch() string {
if m != nil {
return m.Search
}
return ""
}
func (m *ListEndDeviceModelsRequest) GetFieldMask() *types.FieldMask {
if m != nil {
return m.FieldMask
}
return nil
}
type GetTemplateRequest struct {
// Application identifiers.
ApplicationIds *ApplicationIdentifiers `protobuf:"bytes,1,opt,name=application_ids,json=applicationIds,proto3" json:"application_ids,omitempty"`
// End device version information.
VersionIds *EndDeviceVersionIdentifiers `protobuf:"bytes,2,opt,name=version_ids,json=versionIds,proto3" json:"version_ids,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *GetTemplateRequest) Reset() { *m = GetTemplateRequest{} }
func (m *GetTemplateRequest) String() string { return proto.CompactTextString(m) }
func (*GetTemplateRequest) ProtoMessage() {}
func (*GetTemplateRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_c0145ad4e3f42c22, []int{6}
}
func (m *GetTemplateRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_GetTemplateRequest.Unmarshal(m, b)
}
func (m *GetTemplateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_GetTemplateRequest.Marshal(b, m, deterministic)
}
func (m *GetTemplateRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_GetTemplateRequest.Merge(m, src)
}
func (m *GetTemplateRequest) XXX_Size() int {
return xxx_messageInfo_GetTemplateRequest.Size(m)
}
func (m *GetTemplateRequest) XXX_DiscardUnknown() {
xxx_messageInfo_GetTemplateRequest.DiscardUnknown(m)
}
var xxx_messageInfo_GetTemplateRequest proto.InternalMessageInfo
func (m *GetTemplateRequest) GetApplicationIds() *ApplicationIdentifiers {
if m != nil {
return m.ApplicationIds
}
return nil
}
func (m *GetTemplateRequest) GetVersionIds() *EndDeviceVersionIdentifiers {
if m != nil {
return m.VersionIds
}
return nil
}
type GetPayloadFormatterRequest struct {
// Application identifiers.
ApplicationIds *ApplicationIdentifiers `protobuf:"bytes,1,opt,name=application_ids,json=applicationIds,proto3" json:"application_ids,omitempty"`
// End device version information.
VersionIds *EndDeviceVersionIdentifiers `protobuf:"bytes,2,opt,name=version_ids,json=versionIds,proto3" json:"version_ids,omitempty"`
// Field mask paths.
FieldMask *types.FieldMask `protobuf:"bytes,3,opt,name=field_mask,json=fieldMask,proto3" json:"field_mask,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *GetPayloadFormatterRequest) Reset() { *m = GetPayloadFormatterRequest{} }
func (m *GetPayloadFormatterRequest) String() string { return proto.CompactTextString(m) }
func (*GetPayloadFormatterRequest) ProtoMessage() {}
func (*GetPayloadFormatterRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_c0145ad4e3f42c22, []int{7}
}
func (m *GetPayloadFormatterRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_GetPayloadFormatterRequest.Unmarshal(m, b)
}
func (m *GetPayloadFormatterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_GetPayloadFormatterRequest.Marshal(b, m, deterministic)
}
func (m *GetPayloadFormatterRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_GetPayloadFormatterRequest.Merge(m, src)
}
func (m *GetPayloadFormatterRequest) XXX_Size() int {
return xxx_messageInfo_GetPayloadFormatterRequest.Size(m)
}
func (m *GetPayloadFormatterRequest) XXX_DiscardUnknown() {
xxx_messageInfo_GetPayloadFormatterRequest.DiscardUnknown(m)
}
var xxx_messageInfo_GetPayloadFormatterRequest proto.InternalMessageInfo
func (m *GetPayloadFormatterRequest) GetApplicationIds() *ApplicationIdentifiers {
if m != nil {
return m.ApplicationIds
}
return nil
}
func (m *GetPayloadFormatterRequest) GetVersionIds() *EndDeviceVersionIdentifiers {
if m != nil {
return m.VersionIds
}
return nil
}
func (m *GetPayloadFormatterRequest) GetFieldMask() *types.FieldMask {
if m != nil {
return m.FieldMask
}
return nil
}
type ListEndDeviceBrandsResponse struct {
Brands []*EndDeviceBrand `protobuf:"bytes,1,rep,name=brands,proto3" json:"brands,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ListEndDeviceBrandsResponse) Reset() { *m = ListEndDeviceBrandsResponse{} }
func (m *ListEndDeviceBrandsResponse) String() string { return proto.CompactTextString(m) }
func (*ListEndDeviceBrandsResponse) ProtoMessage() {}
func (*ListEndDeviceBrandsResponse) Descriptor() ([]byte, []int) {
return fileDescriptor_c0145ad4e3f42c22, []int{8}
}
func (m *ListEndDeviceBrandsResponse) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ListEndDeviceBrandsResponse.Unmarshal(m, b)
}
func (m *ListEndDeviceBrandsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ListEndDeviceBrandsResponse.Marshal(b, m, deterministic)
}
func (m *ListEndDeviceBrandsResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_ListEndDeviceBrandsResponse.Merge(m, src)
}
func (m *ListEndDeviceBrandsResponse) XXX_Size() int {
return xxx_messageInfo_ListEndDeviceBrandsResponse.Size(m)
}
func (m *ListEndDeviceBrandsResponse) XXX_DiscardUnknown() {
xxx_messageInfo_ListEndDeviceBrandsResponse.DiscardUnknown(m)
}
var xxx_messageInfo_ListEndDeviceBrandsResponse proto.InternalMessageInfo
func (m *ListEndDeviceBrandsResponse) GetBrands() []*EndDeviceBrand {
if m != nil {
return m.Brands
}
return nil
}
type ListEndDeviceModelsResponse struct {
Models []*EndDeviceModel `protobuf:"bytes,1,rep,name=models,proto3" json:"models,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ListEndDeviceModelsResponse) Reset() { *m = ListEndDeviceModelsResponse{} }
func (m *ListEndDeviceModelsResponse) String() string { return proto.CompactTextString(m) }
func (*ListEndDeviceModelsResponse) ProtoMessage() {}
func (*ListEndDeviceModelsResponse) Descriptor() ([]byte, []int) {
return fileDescriptor_c0145ad4e3f42c22, []int{9}
}
func (m *ListEndDeviceModelsResponse) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ListEndDeviceModelsResponse.Unmarshal(m, b)
}
func (m *ListEndDeviceModelsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ListEndDeviceModelsResponse.Marshal(b, m, deterministic)
}
func (m *ListEndDeviceModelsResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_ListEndDeviceModelsResponse.Merge(m, src)
}
func (m *ListEndDeviceModelsResponse) XXX_Size() int {
return xxx_messageInfo_ListEndDeviceModelsResponse.Size(m)
}
func (m *ListEndDeviceModelsResponse) XXX_DiscardUnknown() {
xxx_messageInfo_ListEndDeviceModelsResponse.DiscardUnknown(m)
}
var xxx_messageInfo_ListEndDeviceModelsResponse proto.InternalMessageInfo
func (m *ListEndDeviceModelsResponse) GetModels() []*EndDeviceModel {
if m != nil {
return m.Models
}
return nil
}
type EncodedMessagePayload struct {
FPort uint32 `protobuf:"varint,1,opt,name=f_port,json=fPort,proto3" json:"f_port,omitempty"`
FrmPayload []byte `protobuf:"bytes,2,opt,name=frm_payload,json=frmPayload,proto3" json:"frm_payload,omitempty"`
Warnings []string `protobuf:"bytes,3,rep,name=warnings,proto3" json:"warnings,omitempty"`
Errors []string `protobuf:"bytes,4,rep,name=errors,proto3" json:"errors,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *EncodedMessagePayload) Reset() { *m = EncodedMessagePayload{} }
func (m *EncodedMessagePayload) String() string { return proto.CompactTextString(m) }
func (*EncodedMessagePayload) ProtoMessage() {}
func (*EncodedMessagePayload) Descriptor() ([]byte, []int) {
return fileDescriptor_c0145ad4e3f42c22, []int{10}
}
func (m *EncodedMessagePayload) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_EncodedMessagePayload.Unmarshal(m, b)
}
func (m *EncodedMessagePayload) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_EncodedMessagePayload.Marshal(b, m, deterministic)
}
func (m *EncodedMessagePayload) XXX_Merge(src proto.Message) {
xxx_messageInfo_EncodedMessagePayload.Merge(m, src)
}
func (m *EncodedMessagePayload) XXX_Size() int {
return xxx_messageInfo_EncodedMessagePayload.Size(m)
}
func (m *EncodedMessagePayload) XXX_DiscardUnknown() {
xxx_messageInfo_EncodedMessagePayload.DiscardUnknown(m)
}
var xxx_messageInfo_EncodedMessagePayload proto.InternalMessageInfo
func (m *EncodedMessagePayload) GetFPort() uint32 {
if m != nil {
return m.FPort
}
return 0
}
func (m *EncodedMessagePayload) GetFrmPayload() []byte {
if m != nil {
return m.FrmPayload
}
return nil
}
func (m *EncodedMessagePayload) GetWarnings() []string {
if m != nil {
return m.Warnings
}
return nil
}
func (m *EncodedMessagePayload) GetErrors() []string {
if m != nil {
return m.Errors
}
return nil
}
type DecodedMessagePayload struct {
Data *types.Struct `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"`
Warnings []string `protobuf:"bytes,2,rep,name=warnings,proto3" json:"warnings,omitempty"`
Errors []string `protobuf:"bytes,3,rep,name=errors,proto3" json:"errors,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *DecodedMessagePayload) Reset() { *m = DecodedMessagePayload{} }
func (m *DecodedMessagePayload) String() string { return proto.CompactTextString(m) }
func (*DecodedMessagePayload) ProtoMessage() {}
func (*DecodedMessagePayload) Descriptor() ([]byte, []int) {
return fileDescriptor_c0145ad4e3f42c22, []int{11}
}
func (m *DecodedMessagePayload) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_DecodedMessagePayload.Unmarshal(m, b)
}
func (m *DecodedMessagePayload) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_DecodedMessagePayload.Marshal(b, m, deterministic)
}
func (m *DecodedMessagePayload) XXX_Merge(src proto.Message) {
xxx_messageInfo_DecodedMessagePayload.Merge(m, src)
}
func (m *DecodedMessagePayload) XXX_Size() int {
return xxx_messageInfo_DecodedMessagePayload.Size(m)
}
func (m *DecodedMessagePayload) XXX_DiscardUnknown() {
xxx_messageInfo_DecodedMessagePayload.DiscardUnknown(m)
}
var xxx_messageInfo_DecodedMessagePayload proto.InternalMessageInfo
func (m *DecodedMessagePayload) GetData() *types.Struct {
if m != nil {
return m.Data
}
return nil
}
func (m *DecodedMessagePayload) GetWarnings() []string {
if m != nil {
return m.Warnings
}
return nil
}
func (m *DecodedMessagePayload) GetErrors() []string {
if m != nil {
return m.Errors
}
return nil
}
type MessagePayloadDecoder struct {
// Payload formatter type.
Formatter PayloadFormatter `protobuf:"varint,1,opt,name=formatter,proto3,enum=ttn.lorawan.v3.PayloadFormatter" json:"formatter,omitempty"`
// Parameter for the formatter, must be set together.
FormatterParameter string `protobuf:"bytes,2,opt,name=formatter_parameter,json=formatterParameter,proto3" json:"formatter_parameter,omitempty"`
CodecId string `protobuf:"bytes,3,opt,name=codec_id,json=codecId,proto3" json:"codec_id,omitempty"`
Examples []*MessagePayloadDecoder_Example `protobuf:"bytes,4,rep,name=examples,proto3" json:"examples,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *MessagePayloadDecoder) Reset() { *m = MessagePayloadDecoder{} }
func (m *MessagePayloadDecoder) String() string { return proto.CompactTextString(m) }
func (*MessagePayloadDecoder) ProtoMessage() {}
func (*MessagePayloadDecoder) Descriptor() ([]byte, []int) {
return fileDescriptor_c0145ad4e3f42c22, []int{12}
}
func (m *MessagePayloadDecoder) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_MessagePayloadDecoder.Unmarshal(m, b)
}
func (m *MessagePayloadDecoder) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_MessagePayloadDecoder.Marshal(b, m, deterministic)
}
func (m *MessagePayloadDecoder) XXX_Merge(src proto.Message) {
xxx_messageInfo_MessagePayloadDecoder.Merge(m, src)
}
func (m *MessagePayloadDecoder) XXX_Size() int {
return xxx_messageInfo_MessagePayloadDecoder.Size(m)
}
func (m *MessagePayloadDecoder) XXX_DiscardUnknown() {
xxx_messageInfo_MessagePayloadDecoder.DiscardUnknown(m)
}
var xxx_messageInfo_MessagePayloadDecoder proto.InternalMessageInfo
func (m *MessagePayloadDecoder) GetFormatter() PayloadFormatter {
if m != nil {
return m.Formatter
}
return PayloadFormatter_FORMATTER_NONE
}
func (m *MessagePayloadDecoder) GetFormatterParameter() string {
if m != nil {
return m.FormatterParameter
}
return ""
}
func (m *MessagePayloadDecoder) GetCodecId() string {
if m != nil {
return m.CodecId
}
return ""
}
func (m *MessagePayloadDecoder) GetExamples() []*MessagePayloadDecoder_Example {
if m != nil {
return m.Examples
}
return nil
}
type MessagePayloadDecoder_Example struct {
Description string `protobuf:"bytes,1,opt,name=description,proto3" json:"description,omitempty"`
Input *EncodedMessagePayload `protobuf:"bytes,2,opt,name=input,proto3" json:"input,omitempty"`
Output *DecodedMessagePayload `protobuf:"bytes,3,opt,name=output,proto3" json:"output,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *MessagePayloadDecoder_Example) Reset() { *m = MessagePayloadDecoder_Example{} }
func (m *MessagePayloadDecoder_Example) String() string { return proto.CompactTextString(m) }
func (*MessagePayloadDecoder_Example) ProtoMessage() {}
func (*MessagePayloadDecoder_Example) Descriptor() ([]byte, []int) {
return fileDescriptor_c0145ad4e3f42c22, []int{12, 0}
}
func (m *MessagePayloadDecoder_Example) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_MessagePayloadDecoder_Example.Unmarshal(m, b)
}
func (m *MessagePayloadDecoder_Example) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_MessagePayloadDecoder_Example.Marshal(b, m, deterministic)
}
func (m *MessagePayloadDecoder_Example) XXX_Merge(src proto.Message) {
xxx_messageInfo_MessagePayloadDecoder_Example.Merge(m, src)
}
func (m *MessagePayloadDecoder_Example) XXX_Size() int {
return xxx_messageInfo_MessagePayloadDecoder_Example.Size(m)
}
func (m *MessagePayloadDecoder_Example) XXX_DiscardUnknown() {
xxx_messageInfo_MessagePayloadDecoder_Example.DiscardUnknown(m)
}
var xxx_messageInfo_MessagePayloadDecoder_Example proto.InternalMessageInfo
func (m *MessagePayloadDecoder_Example) GetDescription() string {
if m != nil {
return m.Description
}
return ""
}
func (m *MessagePayloadDecoder_Example) GetInput() *EncodedMessagePayload {
if m != nil {
return m.Input
}
return nil
}
func (m *MessagePayloadDecoder_Example) GetOutput() *DecodedMessagePayload {
if m != nil {
return m.Output
}
return nil
}
type MessagePayloadEncoder struct {
// Payload formatter type.
Formatter PayloadFormatter `protobuf:"varint,1,opt,name=formatter,proto3,enum=ttn.lorawan.v3.PayloadFormatter" json:"formatter,omitempty"`
// Parameter for the formatter, must be set together.
FormatterParameter string `protobuf:"bytes,2,opt,name=formatter_parameter,json=formatterParameter,proto3" json:"formatter_parameter,omitempty"`
CodecId string `protobuf:"bytes,3,opt,name=codec_id,json=codecId,proto3" json:"codec_id,omitempty"`
Examples []*MessagePayloadEncoder_Example `protobuf:"bytes,4,rep,name=examples,proto3" json:"examples,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *MessagePayloadEncoder) Reset() { *m = MessagePayloadEncoder{} }
func (m *MessagePayloadEncoder) String() string { return proto.CompactTextString(m) }
func (*MessagePayloadEncoder) ProtoMessage() {}
func (*MessagePayloadEncoder) Descriptor() ([]byte, []int) {
return fileDescriptor_c0145ad4e3f42c22, []int{13}
}
func (m *MessagePayloadEncoder) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_MessagePayloadEncoder.Unmarshal(m, b)
}
func (m *MessagePayloadEncoder) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_MessagePayloadEncoder.Marshal(b, m, deterministic)
}
func (m *MessagePayloadEncoder) XXX_Merge(src proto.Message) {
xxx_messageInfo_MessagePayloadEncoder.Merge(m, src)
}
func (m *MessagePayloadEncoder) XXX_Size() int {
return xxx_messageInfo_MessagePayloadEncoder.Size(m)
}
func (m *MessagePayloadEncoder) XXX_DiscardUnknown() {
xxx_messageInfo_MessagePayloadEncoder.DiscardUnknown(m)
}
var xxx_messageInfo_MessagePayloadEncoder proto.InternalMessageInfo
func (m *MessagePayloadEncoder) GetFormatter() PayloadFormatter {
if m != nil {
return m.Formatter
}
return PayloadFormatter_FORMATTER_NONE
}
func (m *MessagePayloadEncoder) GetFormatterParameter() string {
if m != nil {
return m.FormatterParameter
}
return ""
}
func (m *MessagePayloadEncoder) GetCodecId() string {
if m != nil {
return m.CodecId
}
return ""
}
func (m *MessagePayloadEncoder) GetExamples() []*MessagePayloadEncoder_Example {
if m != nil {
return m.Examples
}
return nil
}
type MessagePayloadEncoder_Example struct {
Description string `protobuf:"bytes,1,opt,name=description,proto3" json:"description,omitempty"`
Input *DecodedMessagePayload `protobuf:"bytes,2,opt,name=input,proto3" json:"input,omitempty"`
Output *EncodedMessagePayload `protobuf:"bytes,3,opt,name=output,proto3" json:"output,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *MessagePayloadEncoder_Example) Reset() { *m = MessagePayloadEncoder_Example{} }
func (m *MessagePayloadEncoder_Example) String() string { return proto.CompactTextString(m) }
func (*MessagePayloadEncoder_Example) ProtoMessage() {}
func (*MessagePayloadEncoder_Example) Descriptor() ([]byte, []int) {
return fileDescriptor_c0145ad4e3f42c22, []int{13, 0}
}
func (m *MessagePayloadEncoder_Example) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_MessagePayloadEncoder_Example.Unmarshal(m, b)
}
func (m *MessagePayloadEncoder_Example) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_MessagePayloadEncoder_Example.Marshal(b, m, deterministic)
}
func (m *MessagePayloadEncoder_Example) XXX_Merge(src proto.Message) {
xxx_messageInfo_MessagePayloadEncoder_Example.Merge(m, src)
}
func (m *MessagePayloadEncoder_Example) XXX_Size() int {
return xxx_messageInfo_MessagePayloadEncoder_Example.Size(m)
}
func (m *MessagePayloadEncoder_Example) XXX_DiscardUnknown() {
xxx_messageInfo_MessagePayloadEncoder_Example.DiscardUnknown(m)
}
var xxx_messageInfo_MessagePayloadEncoder_Example proto.InternalMessageInfo
func (m *MessagePayloadEncoder_Example) GetDescription() string {
if m != nil {
return m.Description
}
return ""
}
func (m *MessagePayloadEncoder_Example) GetInput() *DecodedMessagePayload {
if m != nil {
return m.Input
}
return nil
}
func (m *MessagePayloadEncoder_Example) GetOutput() *EncodedMessagePayload {
if m != nil {
return m.Output
}
return nil
}
func init() {
proto.RegisterEnum("ttn.lorawan.v3.KeyProvisioning", KeyProvisioning_name, KeyProvisioning_value)
golang_proto.RegisterEnum("ttn.lorawan.v3.KeyProvisioning", KeyProvisioning_name, KeyProvisioning_value)
proto.RegisterEnum("ttn.lorawan.v3.KeySecurity", KeySecurity_name, KeySecurity_value)
golang_proto.RegisterEnum("ttn.lorawan.v3.KeySecurity", KeySecurity_name, KeySecurity_value)
proto.RegisterType((*EndDeviceBrand)(nil), "ttn.lorawan.v3.EndDeviceBrand")
golang_proto.RegisterType((*EndDeviceBrand)(nil), "ttn.lorawan.v3.EndDeviceBrand")
proto.RegisterType((*EndDeviceModel)(nil), "ttn.lorawan.v3.EndDeviceModel")
golang_proto.RegisterType((*EndDeviceModel)(nil), "ttn.lorawan.v3.EndDeviceModel")
proto.RegisterType((*EndDeviceModel_HardwareVersion)(nil), "ttn.lorawan.v3.EndDeviceModel.HardwareVersion")
golang_proto.RegisterType((*EndDeviceModel_HardwareVersion)(nil), "ttn.lorawan.v3.EndDeviceModel.HardwareVersion")
proto.RegisterType((*EndDeviceModel_FirmwareVersion)(nil), "ttn.lorawan.v3.EndDeviceModel.FirmwareVersion")
golang_proto.RegisterType((*EndDeviceModel_FirmwareVersion)(nil), "ttn.lorawan.v3.EndDeviceModel.FirmwareVersion")
proto.RegisterMapType((map[string]*EndDeviceModel_FirmwareVersion_Profile)(nil), "ttn.lorawan.v3.EndDeviceModel.FirmwareVersion.ProfilesEntry")
golang_proto.RegisterMapType((map[string]*EndDeviceModel_FirmwareVersion_Profile)(nil), "ttn.lorawan.v3.EndDeviceModel.FirmwareVersion.ProfilesEntry")
proto.RegisterType((*EndDeviceModel_FirmwareVersion_Profile)(nil), "ttn.lorawan.v3.EndDeviceModel.FirmwareVersion.Profile")
golang_proto.RegisterType((*EndDeviceModel_FirmwareVersion_Profile)(nil), "ttn.lorawan.v3.EndDeviceModel.FirmwareVersion.Profile")
proto.RegisterType((*EndDeviceModel_Dimensions)(nil), "ttn.lorawan.v3.EndDeviceModel.Dimensions")
golang_proto.RegisterType((*EndDeviceModel_Dimensions)(nil), "ttn.lorawan.v3.EndDeviceModel.Dimensions")
proto.RegisterType((*EndDeviceModel_Battery)(nil), "ttn.lorawan.v3.EndDeviceModel.Battery")
golang_proto.RegisterType((*EndDeviceModel_Battery)(nil), "ttn.lorawan.v3.EndDeviceModel.Battery")
proto.RegisterType((*EndDeviceModel_OperatingConditions)(nil), "ttn.lorawan.v3.EndDeviceModel.OperatingConditions")
golang_proto.RegisterType((*EndDeviceModel_OperatingConditions)(nil), "ttn.lorawan.v3.EndDeviceModel.OperatingConditions")
proto.RegisterType((*EndDeviceModel_OperatingConditions_Limits)(nil), "ttn.lorawan.v3.EndDeviceModel.OperatingConditions.Limits")
golang_proto.RegisterType((*EndDeviceModel_OperatingConditions_Limits)(nil), "ttn.lorawan.v3.EndDeviceModel.OperatingConditions.Limits")
proto.RegisterType((*EndDeviceModel_Photos)(nil), "ttn.lorawan.v3.EndDeviceModel.Photos")
golang_proto.RegisterType((*EndDeviceModel_Photos)(nil), "ttn.lorawan.v3.EndDeviceModel.Photos")
proto.RegisterType((*EndDeviceModel_Videos)(nil), "ttn.lorawan.v3.EndDeviceModel.Videos")
golang_proto.RegisterType((*EndDeviceModel_Videos)(nil), "ttn.lorawan.v3.EndDeviceModel.Videos")
proto.RegisterType((*EndDeviceModel_Reseller)(nil), "ttn.lorawan.v3.EndDeviceModel.Reseller")
golang_proto.RegisterType((*EndDeviceModel_Reseller)(nil), "ttn.lorawan.v3.EndDeviceModel.Reseller")
proto.RegisterType((*EndDeviceModel_Compliances)(nil), "ttn.lorawan.v3.EndDeviceModel.Compliances")
golang_proto.RegisterType((*EndDeviceModel_Compliances)(nil), "ttn.lorawan.v3.EndDeviceModel.Compliances")
proto.RegisterType((*EndDeviceModel_Compliances_Compliance)(nil), "ttn.lorawan.v3.EndDeviceModel.Compliances.Compliance")
golang_proto.RegisterType((*EndDeviceModel_Compliances_Compliance)(nil), "ttn.lorawan.v3.EndDeviceModel.Compliances.Compliance")
proto.RegisterType((*GetEndDeviceBrandRequest)(nil), "ttn.lorawan.v3.GetEndDeviceBrandRequest")
golang_proto.RegisterType((*GetEndDeviceBrandRequest)(nil), "ttn.lorawan.v3.GetEndDeviceBrandRequest")
proto.RegisterType((*ListEndDeviceBrandsRequest)(nil), "ttn.lorawan.v3.ListEndDeviceBrandsRequest")
golang_proto.RegisterType((*ListEndDeviceBrandsRequest)(nil), "ttn.lorawan.v3.ListEndDeviceBrandsRequest")
proto.RegisterType((*GetEndDeviceModelRequest)(nil), "ttn.lorawan.v3.GetEndDeviceModelRequest")
golang_proto.RegisterType((*GetEndDeviceModelRequest)(nil), "ttn.lorawan.v3.GetEndDeviceModelRequest")
proto.RegisterType((*ListEndDeviceModelsRequest)(nil), "ttn.lorawan.v3.ListEndDeviceModelsRequest")
golang_proto.RegisterType((*ListEndDeviceModelsRequest)(nil), "ttn.lorawan.v3.ListEndDeviceModelsRequest")
proto.RegisterType((*GetTemplateRequest)(nil), "ttn.lorawan.v3.GetTemplateRequest")
golang_proto.RegisterType((*GetTemplateRequest)(nil), "ttn.lorawan.v3.GetTemplateRequest")
proto.RegisterType((*GetPayloadFormatterRequest)(nil), "ttn.lorawan.v3.GetPayloadFormatterRequest")
golang_proto.RegisterType((*GetPayloadFormatterRequest)(nil), "ttn.lorawan.v3.GetPayloadFormatterRequest")
proto.RegisterType((*ListEndDeviceBrandsResponse)(nil), "ttn.lorawan.v3.ListEndDeviceBrandsResponse")
golang_proto.RegisterType((*ListEndDeviceBrandsResponse)(nil), "ttn.lorawan.v3.ListEndDeviceBrandsResponse")
proto.RegisterType((*ListEndDeviceModelsResponse)(nil), "ttn.lorawan.v3.ListEndDeviceModelsResponse")
golang_proto.RegisterType((*ListEndDeviceModelsResponse)(nil), "ttn.lorawan.v3.ListEndDeviceModelsResponse")
proto.RegisterType((*EncodedMessagePayload)(nil), "ttn.lorawan.v3.EncodedMessagePayload")
golang_proto.RegisterType((*EncodedMessagePayload)(nil), "ttn.lorawan.v3.EncodedMessagePayload")
proto.RegisterType((*DecodedMessagePayload)(nil), "ttn.lorawan.v3.DecodedMessagePayload")
golang_proto.RegisterType((*DecodedMessagePayload)(nil), "ttn.lorawan.v3.DecodedMessagePayload")
proto.RegisterType((*MessagePayloadDecoder)(nil), "ttn.lorawan.v3.MessagePayloadDecoder")
golang_proto.RegisterType((*MessagePayloadDecoder)(nil), "ttn.lorawan.v3.MessagePayloadDecoder")
proto.RegisterType((*MessagePayloadDecoder_Example)(nil), "ttn.lorawan.v3.MessagePayloadDecoder.Example")
golang_proto.RegisterType((*MessagePayloadDecoder_Example)(nil), "ttn.lorawan.v3.MessagePayloadDecoder.Example")
proto.RegisterType((*MessagePayloadEncoder)(nil), "ttn.lorawan.v3.MessagePayloadEncoder")
golang_proto.RegisterType((*MessagePayloadEncoder)(nil), "ttn.lorawan.v3.MessagePayloadEncoder")
proto.RegisterType((*MessagePayloadEncoder_Example)(nil), "ttn.lorawan.v3.MessagePayloadEncoder.Example")
golang_proto.RegisterType((*MessagePayloadEncoder_Example)(nil), "ttn.lorawan.v3.MessagePayloadEncoder.Example")
}
func init() {
proto.RegisterFile("lorawan-stack/api/devicerepository.proto", fileDescriptor_c0145ad4e3f42c22)
}
func init() {
golang_proto.RegisterFile("lorawan-stack/api/devicerepository.proto", fileDescriptor_c0145ad4e3f42c22)
}
var fileDescriptor_c0145ad4e3f42c22 = []byte{
// 3164 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe4, 0x5a, 0xdd, 0x6f, 0x1b, 0xd7,
0x95, 0xcf, 0x90, 0x12, 0x3f, 0x0e, 0x2d, 0x89, 0xba, 0x96, 0xd7, 0xa3, 0x71, 0x62, 0x2b, 0x5c,
0xd9, 0x26, 0xa5, 0x90, 0xf4, 0xd2, 0xeb, 0x24, 0x56, 0xe2, 0x65, 0x4c, 0x99, 0xb6, 0x15, 0xdb,
0xb2, 0x30, 0x92, 0x9c, 0x75, 0x44, 0x99, 0x3b, 0xe2, 0x5c, 0x92, 0x63, 0x91, 0x33, 0x93, 0x3b,
0x97, 0x64, 0x14, 0xdb, 0xc0, 0x22, 0x4f, 0xc1, 0x62, 0x81, 0xc5, 0x7a, 0xff, 0x82, 0x05, 0x16,
0xd8, 0x42, 0x05, 0x02, 0xb4, 0x45, 0x80, 0xb6, 0xe8, 0xf7, 0x43, 0x11, 0xb4, 0x08, 0x10, 0xf4,
0xe3, 0xa1, 0x0f, 0x79, 0x69, 0x0a, 0xb4, 0x75, 0xdf, 0x8a, 0x00, 0x05, 0xf8, 0xd2, 0xe2, 0xde,
0x99, 0xa1, 0x86, 0xa4, 0x64, 0x7d, 0xc4, 0x4d, 0xe1, 0xf4, 0x41, 0xd0, 0x9d, 0x7b, 0x7e, 0xe7,
0xdc, 0x7b, 0x3e, 0xe6, 0x9c, 0x73, 0xef, 0x10, 0xe2, 0x35, 0x83, 0x28, 0x2d, 0x45, 0x4f, 0x5a,
0x54, 0x29, 0xad, 0xa7, 0x15, 0x53, 0x4b, 0xab, 0xb8, 0xa9, 0x95, 0x30, 0xc1, 0xa6, 0x61, 0x69,
0xd4, 0x20, 0x1b, 0x29, 0x93, 0x18, 0xd4, 0x40, 0xc3, 0x94, 0xea, 0x29, 0x07, 0x9d, 0x6a, 0x9e,
0x95, 0x2e, 0x56, 0x34, 0x5a, 0x6d, 0xac, 0xa5, 0x4a, 0x46, 0x3d, 0x8d, 0xf5, 0xa6, 0xb1, 0x61,
0x12, 0xe3, 0xed, 0x8d, 0x34, 0x07, 0x97, 0x92, 0x15, 0xac, 0x27, 0x9b, 0x4a, 0x4d, 0x53, 0x15,
0x8a, 0xd3, 0x7d, 0x03, 0x5b, 0xa4, 0x94, 0xf4, 0x88, 0xa8, 0x18, 0x15, 0xc3, 0x66, 0x5e, 0x6b,
0x94, 0xf9, 0x13, 0x7f, 0xe0, 0x23, 0x07, 0x3e, 0xeb, 0x81, 0x2f, 0x55, 0xf1, 0x52, 0x55, 0xd3,
0x2b, 0xd6, 0x9c, 0xae, 0x36, 0x2c, 0x4a, 0x34, 0x6c, 0x79, 0x97, 0xae, 0x18, 0xc9, 0xbb, 0x96,
0xa1, 0xa7, 0x15, 0x5d, 0x37, 0xa8, 0x42, 0x35, 0x43, 0xb7, 0x1c, 0x21, 0xcf, 0x56, 0x0c, 0xa3,
0x52, 0xc3, 0x5c, 0xd3, 0x7e, 0xea, 0x84, 0x43, 0xed, 0x6c, 0xa4, 0xac, 0xe1, 0x9a, 0x5a, 0xac,
0x2b, 0xd6, 0x7a, 0x0f, 0x7f, 0x07, 0x61, 0x51, 0xd2, 0x28, 0x51, 0x87, 0x7a, 0xbc, 0x97, 0xda,
0x22, 0x8a, 0x69, 0x62, 0xe2, 0xca, 0x8f, 0xf5, 0x9b, 0x1b, 0xeb, 0x6a, 0xd1, 0x36, 0xb9, 0xbb,
0x87, 0x7e, 0x4c, 0x1d, 0x5b, 0x96, 0x52, 0xc1, 0xae, 0x94, 0x7f, 0xec, 0x47, 0x68, 0x2a, 0xd6,
0xa9, 0x56, 0xd6, 0x3a, 0x4b, 0xc5, 0x7e, 0xe6, 0x87, 0xe1, 0xbc, 0xae, 0x5e, 0xe2, 0xa2, 0x73,
0x44, 0xd1, 0x55, 0x94, 0x83, 0xd0, 0x1a, 0x1b, 0x14, 0x35, 0x55, 0x14, 0x26, 0x84, 0x78, 0x38,
0x77, 0xba, 0x9d, 0x9b, 0x24, 0x31, 0x71, 0x32, 0x73, 0xfc, 0xce, 0x8a, 0x92, 0x7c, 0xe7, 0x4c,
0xf2, 0xfc, 0x6a, 0x3c, 0x3b, 0xb3, 0x92, 0x5c, 0xcd, 0xba, 0x8f, 0x89, 0x7b, 0x99, 0x17, 0x1e,
0x4c, 0xca, 0x41, 0xce, 0x38, 0xa7, 0x22, 0x04, 0x03, 0xba, 0x52, 0xc7, 0xa2, 0x8f, 0xf1, 0xcb,
0x7c, 0x8c, 0x66, 0x60, 0xdc, 0x24, 0x5a, 0x53, 0xa1, 0xb8, 0x88, 0x75, 0x8a, 0x89, 0x49, 0x34,
0x0b, 0x17, 0xf5, 0x46, 0x7d, 0x0d, 0x13, 0xd1, 0x3f, 0x21, 0xc4, 0x87, 0xe4, 0xa3, 0x0e, 0x20,
0xdf, 0xa1, 0xcf, 0x73, 0x32, 0x2a, 0xc2, 0x09, 0x83, 0x54, 0x14, 0x5d, 0x7b, 0x87, 0x3b, 0xa2,
0xd8, 0xd0, 0xb5, 0xb7, 0x1a, 0xb8, 0xe8, 0xd1, 0x47, 0x1c, 0x98, 0xf0, 0xc7, 0xc3, 0xb9, 0xa3,
0xed, 0xdc, 0xd8, 0x43, 0x61, 0x34, 0x36, 0x42, 0x86, 0x32, 0x91, 0x95, 0x33, 0xc9, 0xf3, 0x17,
0x93, 0x97, 0x57, 0xef, 0xbd, 0xf8, 0x40, 0x7e, 0xce, 0xcb, 0xbf, 0xcc, 0xd9, 0xe7, 0xb6, 0xb8,
0xd1, 0x39, 0x38, 0xca, 0xcc, 0x55, 0x54, 0x6a, 0x35, 0x4d, 0xd1, 0x4b, 0xb8, 0xd8, 0xc4, 0xba,
0x6a, 0x10, 0x66, 0x83, 0x41, 0xbe, 0xb5, 0x31, 0x46, 0xbe, 0xe8, 0x50, 0x6f, 0x71, 0xe2, 0x9c,
0x8a, 0x4e, 0x42, 0xb0, 0x85, 0xd7, 0x2c, 0x8d, 0x62, 0x31, 0xc0, 0x4d, 0x15, 0x69, 0xe7, 0x42,
0x24, 0xf0, 0x9e, 0x20, 0x7c, 0x2c, 0x08, 0xb2, 0x4b, 0x43, 0x13, 0x30, 0x88, 0xeb, 0x8a, 0x56,
0x13, 0x83, 0x1c, 0x04, 0xed, 0x5c, 0x90, 0x0c, 0xfe, 0x1b, 0xc7, 0xd8, 0x04, 0x74, 0x0d, 0x06,
0x6a, 0x46, 0xc5, 0x10, 0x43, 0x1c, 0xf0, 0x52, 0x3b, 0xf7, 0xcf, 0x24, 0x93, 0x39, 0x73, 0x67,
0xf2, 0xfe, 0x9d, 0x78, 0xdc, 0x31, 0x72, 0x72, 0x75, 0xba, 0x90, 0x4e, 0x4c, 0x27, 0xb2, 0xee,
0x44, 0x91, 0xcd, 0xa4, 0x12, 0xd3, 0x71, 0x53, 0xaf, 0xdc, 0xb7, 0x9a, 0x95, 0xc4, 0xa4, 0xcc,
0x85, 0xc4, 0x7e, 0xfa, 0x9c, 0xc7, 0xa9, 0x37, 0x0c, 0x15, 0xd7, 0x9e, 0x88, 0x53, 0x73, 0x10,
0xaa, 0x33, 0x61, 0x4c, 0x86, 0x6f, 0x9f, 0x32, 0x38, 0xa3, 0x27, 0x30, 0xfc, 0x9e, 0xc0, 0x98,
0x80, 0x88, 0x8a, 0xad, 0x12, 0xd1, 0x4c, 0xe6, 0x1b, 0x71, 0x80, 0x93, 0xbc, 0x53, 0x68, 0x05,
0x46, 0xab, 0x0a, 0x51, 0x5b, 0x0a, 0x61, 0x8e, 0x21, 0x16, 0x7b, 0x17, 0xc5, 0xc1, 0x09, 0x7f,
0x3c, 0x92, 0x49, 0xa5, 0xba, 0x33, 0x4e, 0xaa, 0x5b, 0xf1, 0xd4, 0x55, 0x87, 0xef, 0x96, 0xcd,
0x26, 0x47, 0xab, 0xdd, 0x13, 0x16, 0x13, 0x5e, 0xd6, 0x48, 0xbd, 0x5b, 0x78, 0x60, 0x4f, 0xc2,
0x2f, 0x3b, 0x7c, 0x1d, 0xe1, 0xe5, 0xee, 0x09, 0x0b, 0xc5, 0x20, 0x68, 0x61, 0xdd, 0x32, 0x88,
0x25, 0x06, 0x79, 0x80, 0x86, 0xda, 0xb9, 0xc1, 0x87, 0x82, 0x4f, 0x14, 0x64, 0x97, 0x80, 0xe6,
0x00, 0x54, 0xad, 0x8e, 0x75, 0x7b, 0x65, 0x16, 0x01, 0x91, 0x4c, 0x62, 0x97, 0x95, 0x2f, 0x75,
0x18, 0x64, 0x0f, 0x33, 0x3a, 0x0b, 0x81, 0x16, 0xd6, 0x2a, 0x55, 0x2a, 0x86, 0xb9, 0x98, 0x63,
0x29, 0x3b, 0xd5, 0xa4, 0xdc, 0x54, 0x93, 0xba, 0x5c, 0x33, 0x14, 0x7a, 0x4b, 0xa9, 0x35, 0xb0,
0xec, 0x40, 0xd1, 0x6b, 0x10, 0x5c, 0x53, 0x28, 0xc5, 0x64, 0x43, 0x04, 0xce, 0x75, 0x6a, 0x97,
0xc5, 0x73, 0x36, 0x5a, 0x76, 0xd9, 0x10, 0x86, 0x31, 0xc3, 0xc4, 0x44, 0xa1, 0x9a, 0x5e, 0x29,
0x96, 0x0c, 0x5d, 0xd5, 0x78, 0xba, 0x14, 0x23, 0x5c, 0x5c, 0x66, 0x17, 0x71, 0x37, 0x5d, 0xd6,
0xd9, 0x0e, 0xa7, 0x7c, 0xd8, 0xe8, 0x9f, 0x44, 0x47, 0x21, 0xa8, 0x99, 0xc5, 0x92, 0xa1, 0x62,
0xf1, 0x10, 0x0f, 0x92, 0x80, 0x66, 0xce, 0x1a, 0x2a, 0x46, 0x2b, 0x10, 0x5d, 0xc7, 0x1b, 0x45,
0x93, 0x18, 0x4d, 0x8d, 0x19, 0x42, 0xd3, 0x2b, 0xe2, 0xd0, 0x84, 0x3f, 0x3e, 0x9c, 0x39, 0xd1,
0xbb, 0xf6, 0x35, 0xbc, 0xb1, 0xe0, 0x81, 0xe5, 0x46, 0xda, 0xb9, 0x43, 0x0f, 0x85, 0xb0, 0x28,
0xc4, 0x06, 0xdf, 0x15, 0x7c, 0x51, 0x41, 0x1e, 0x59, 0xef, 0x46, 0xa0, 0xab, 0x70, 0x88, 0x09,
0xb7, 0x70, 0xa9, 0x41, 0x34, 0xba, 0x21, 0x0e, 0x4f, 0x08, 0xf1, 0xe1, 0xcc, 0xb1, 0x6d, 0x04,
0x2f, 0x3a, 0x10, 0xee, 0x64, 0x5b, 0x5a, 0x64, 0x7d, 0x6b, 0x1a, 0x5d, 0x80, 0x80, 0x59, 0x35,
0xa8, 0x61, 0x89, 0x23, 0xdc, 0x30, 0x27, 0x77, 0x31, 0xcc, 0x02, 0x07, 0xcb, 0x0e, 0x13, 0x63,
0x6f, 0x6a, 0x2a, 0x36, 0x2c, 0x31, 0xba, 0x27, 0xf6, 0x5b, 0x1c, 0x2c, 0x3b, 0x4c, 0xe8, 0x05,
0x88, 0x98, 0xc4, 0x50, 0x1b, 0x25, 0x5a, 0x6c, 0x90, 0x9a, 0x38, 0xda, 0x9f, 0xaf, 0xc0, 0xa1,
0x2f, 0x93, 0x1a, 0x3a, 0x03, 0x43, 0xaa, 0x42, 0x15, 0xab, 0x8a, 0xb1, 0x8d, 0x47, 0xfd, 0xf8,
0x43, 0x1d, 0x04, 0xe3, 0xc8, 0x43, 0x98, 0x60, 0x0b, 0xd7, 0x6a, 0x2c, 0x1b, 0x1f, 0xe6, 0xef,
0xcf, 0xe9, 0x5d, 0x76, 0x28, 0x3b, 0x78, 0x79, 0x8b, 0x13, 0x5d, 0x87, 0x48, 0xc9, 0xa8, 0x9b,
0x76, 0xa2, 0xb5, 0xc4, 0x31, 0xae, 0xea, 0xd4, 0x2e, 0x82, 0x66, 0xb7, 0x38, 0x64, 0x2f, 0x3b,
0x3a, 0x07, 0xa3, 0x8a, 0x6a, 0xc7, 0x8f, 0x52, 0x2b, 0x12, 0x45, 0xd5, 0x0c, 0x4b, 0x3c, 0xd2,
0xf3, 0x26, 0x46, 0xb7, 0x20, 0x32, 0x47, 0x48, 0x65, 0x18, 0xe9, 0x49, 0x1c, 0x48, 0x84, 0xa0,
0x93, 0x1d, 0xec, 0x04, 0x2a, 0xbb, 0x8f, 0x8c, 0xa2, 0x37, 0xea, 0x98, 0x68, 0x25, 0x9e, 0x16,
0x87, 0x64, 0xf7, 0x11, 0x9d, 0x80, 0x88, 0xa9, 0x10, 0xea, 0x2d, 0x72, 0x61, 0x19, 0xd8, 0x94,
0x5d, 0xd7, 0xa4, 0x3f, 0x0e, 0xc0, 0x48, 0x4f, 0x12, 0x39, 0xd0, 0x42, 0x57, 0xe1, 0x98, 0xd5,
0x30, 0x4d, 0x83, 0x50, 0xac, 0x16, 0xfb, 0x53, 0xa5, 0xbf, 0x47, 0xe1, 0xf1, 0x0e, 0xf8, 0x6a,
0x6f, 0x36, 0xfc, 0x57, 0x08, 0x99, 0xc4, 0x28, 0x6b, 0x35, 0x6c, 0x97, 0xd4, 0x48, 0xe6, 0xd5,
0xfd, 0x25, 0xc1, 0xd4, 0x82, 0xc3, 0x9e, 0xd7, 0x29, 0xd9, 0x90, 0x3b, 0xd2, 0xa4, 0xff, 0xf6,
0x41, 0xd0, 0xa1, 0xa1, 0x2b, 0x10, 0xde, 0x2a, 0xb0, 0x3c, 0xe1, 0xe7, 0xa6, 0xda, 0xb9, 0xd3,
0xe4, 0xa4, 0x38, 0x99, 0x79, 0x9e, 0x55, 0xbd, 0xc7, 0x97, 0x93, 0x50, 0xd3, 0x2d, 0xc0, 0x73,
0x00, 0xce, 0x02, 0x5b, 0x95, 0x6d, 0x3f, 0x92, 0xc2, 0x0e, 0xf7, 0x9c, 0x8a, 0xa6, 0x61, 0xd4,
0x51, 0xb2, 0x58, 0xc2, 0x84, 0x37, 0x06, 0x76, 0x9d, 0x0b, 0xc9, 0x51, 0x87, 0x30, 0xeb, 0xce,
0xa3, 0x3c, 0x84, 0x58, 0x1e, 0x2a, 0xb1, 0x55, 0xfd, 0xfb, 0x5e, 0x35, 0xc8, 0x79, 0xe7, 0x54,
0xc9, 0x82, 0xa1, 0x2e, 0x73, 0xa1, 0x28, 0xf8, 0xd7, 0xf1, 0x86, 0xe3, 0x78, 0x36, 0x44, 0xd7,
0x61, 0xb0, 0xc9, 0xd2, 0x35, 0xdf, 0x4a, 0x24, 0xf3, 0xe2, 0xc1, 0xbc, 0x21, 0xdb, 0x42, 0x66,
0x7c, 0x2f, 0x0b, 0xd2, 0xaf, 0x05, 0x80, 0xad, 0xfa, 0x81, 0xfe, 0x09, 0x06, 0x5b, 0x9a, 0x4a,
0xab, 0x7c, 0xd1, 0x5d, 0x4a, 0x86, 0x8d, 0x64, 0x65, 0xa6, 0x6a, 0x97, 0x19, 0xdf, 0x1e, 0xca,
0x8c, 0x0d, 0x45, 0x2f, 0x41, 0x48, 0xd5, 0x94, 0x3a, 0xa6, 0xce, 0x9b, 0xb0, 0x0b, 0x5b, 0x07,
0xcc, 0x56, 0xab, 0x61, 0xbd, 0x42, 0xab, 0x3c, 0x52, 0x76, 0x5b, 0xcd, 0x86, 0x4a, 0x2b, 0x10,
0x74, 0xca, 0x14, 0x7a, 0x15, 0x22, 0x04, 0x9b, 0x35, 0xa5, 0x84, 0x95, 0xb5, 0x1a, 0x76, 0xd4,
0x94, 0xfa, 0x84, 0xe4, 0x0c, 0xa3, 0x66, 0xcb, 0xf0, 0xc2, 0x59, 0xc7, 0x42, 0x37, 0xcc, 0x4e,
0x2b, 0xcb, 0xc6, 0xd2, 0x8f, 0x7c, 0x70, 0x78, 0x9b, 0xaa, 0x85, 0x56, 0x20, 0x42, 0x71, 0x9d,
0x13, 0x1a, 0xc4, 0x5d, 0xe9, 0xfc, 0xfe, 0xcb, 0x5f, 0xea, 0xba, 0x56, 0xd7, 0xa8, 0x25, 0x7b,
0xa5, 0xa1, 0x32, 0x8c, 0x12, 0x5c, 0x53, 0xa8, 0xd6, 0xc4, 0xc5, 0x6a, 0xa3, 0xae, 0xa9, 0xac,
0x18, 0xf9, 0x3e, 0xef, 0x12, 0x51, 0x57, 0xe6, 0x55, 0x47, 0xa4, 0x54, 0x86, 0x80, 0x4d, 0x43,
0x49, 0xf0, 0xd7, 0x35, 0x7d, 0x2f, 0x71, 0xc1, 0x70, 0x1c, 0xae, 0xbc, 0xbd, 0x97, 0x90, 0x60,
0x38, 0xe9, 0x03, 0x01, 0x02, 0x76, 0x85, 0x43, 0x0b, 0x30, 0x50, 0x57, 0x9c, 0x95, 0xc2, 0xb9,
0x57, 0xdb, 0xb9, 0xf3, 0xe4, 0xa5, 0xcc, 0xb9, 0x7d, 0x74, 0xbf, 0x77, 0x4d, 0xf6, 0x87, 0x79,
0x0b, 0xcc, 0x24, 0xa1, 0xdb, 0x30, 0x68, 0xd0, 0x2a, 0x26, 0xa2, 0x8f, 0xa7, 0xbe, 0xd9, 0x76,
0xee, 0xb5, 0x87, 0xc2, 0x05, 0x51, 0x88, 0x7d, 0x0e, 0xc9, 0xb6, 0x44, 0xe9, 0x0f, 0x7e, 0x08,
0xd8, 0xa5, 0x15, 0xfd, 0x5e, 0xe8, 0xda, 0xf8, 0x2f, 0x84, 0x76, 0xee, 0x63, 0x81, 0x7c, 0x24,
0x64, 0x7e, 0x22, 0xdc, 0x89, 0x67, 0x67, 0xaa, 0x94, 0x9a, 0x56, 0x76, 0xa6, 0x90, 0x2e, 0xa4,
0xe3, 0xd9, 0x99, 0x56, 0xab, 0x55, 0x48, 0x25, 0xb2, 0x1b, 0x46, 0x83, 0x36, 0xe2, 0xd9, 0x99,
0x35, 0x5c, 0x60, 0x67, 0xd4, 0x42, 0xba, 0xa5, 0xd0, 0x52, 0xb5, 0x90, 0x6d, 0x5e, 0xb8, 0x5f,
0x48, 0xad, 0xe1, 0x42, 0x3a, 0xc1, 0x72, 0x46, 0xa1, 0x55, 0x48, 0x16, 0x57, 0xa7, 0xd8, 0xf8,
0x54, 0x5c, 0xa9, 0x9b, 0xaf, 0x24, 0xb2, 0x6c, 0x2e, 0x7b, 0x61, 0x75, 0x2a, 0x91, 0x4d, 0xb0,
0xed, 0xef, 0x20, 0xbe, 0xa9, 0xd5, 0xb1, 0xe1, 0x48, 0x8e, 0x67, 0x67, 0x4a, 0x55, 0x45, 0xd7,
0x71, 0xcd, 0xe2, 0x0f, 0x85, 0x16, 0xd3, 0x34, 0x7b, 0xbf, 0x42, 0x8c, 0x86, 0xc9, 0xa6, 0x56,
0xee, 0x14, 0xd2, 0xab, 0x53, 0x89, 0x42, 0xda, 0x6e, 0x11, 0x0a, 0xe9, 0xfb, 0x6c, 0xc1, 0x82,
0x3a, 0xcd, 0xfe, 0xdd, 0x2f, 0xa4, 0x0b, 0xd9, 0x44, 0xc7, 0xa2, 0x9f, 0x09, 0xdd, 0x26, 0xfd,
0x44, 0x68, 0xe7, 0x7e, 0x25, 0x3c, 0x14, 0x7e, 0x29, 0x88, 0x42, 0xec, 0x4b, 0xa8, 0xb5, 0xe3,
0xed, 0x65, 0x08, 0xb9, 0x5d, 0x4a, 0xe7, 0xf0, 0x22, 0x78, 0x0e, 0x2f, 0xff, 0x00, 0x01, 0x82,
0x2b, 0xac, 0x58, 0x73, 0xb3, 0xc8, 0xce, 0x13, 0x7a, 0x0e, 0xfc, 0xac, 0x6b, 0xf2, 0xf7, 0x77,
0x4d, 0x6c, 0x5e, 0xfa, 0x8a, 0x0f, 0x22, 0x9e, 0xa6, 0x05, 0xdd, 0x80, 0x80, 0xa5, 0x94, 0x31,
0x65, 0xa9, 0x9f, 0x15, 0xdd, 0x73, 0x7b, 0x6f, 0x78, 0x3c, 0x63, 0xd9, 0x11, 0x82, 0xee, 0xc0,
0x08, 0xef, 0x75, 0x8a, 0xf8, 0xad, 0x86, 0x66, 0xd6, 0xb1, 0x4e, 0xf9, 0xf6, 0x0e, 0x2c, 0x77,
0x98, 0x4b, 0xcb, 0xbb, 0xc2, 0xa4, 0xbb, 0x00, 0x5b, 0x54, 0x66, 0x97, 0x35, 0x43, 0x75, 0xab,
0x16, 0x1f, 0x73, 0x5b, 0x19, 0xa4, 0xde, 0xb9, 0x01, 0x30, 0x48, 0x1d, 0x49, 0x10, 0xb2, 0xa8,
0xa2, 0xab, 0x0a, 0x71, 0x8a, 0xa6, 0xdc, 0x79, 0xf6, 0x76, 0x3d, 0x03, 0x5d, 0x5d, 0x4f, 0xec,
0x33, 0x01, 0xc4, 0x2b, 0x98, 0x76, 0xdf, 0x52, 0xc8, 0xf8, 0xad, 0x06, 0xb6, 0x28, 0xba, 0x0d,
0x23, 0x8a, 0x69, 0xd6, 0xb4, 0x92, 0x7d, 0x2f, 0xa0, 0xa9, 0x96, 0x93, 0xae, 0xfa, 0xce, 0x30,
0x17, 0xb7, 0x60, 0x9e, 0x83, 0x3f, 0x6f, 0x8a, 0xfe, 0x83, 0xb7, 0xea, 0xc3, 0x8a, 0x17, 0x61,
0x75, 0x1d, 0x99, 0x7d, 0x07, 0x3c, 0x32, 0x9f, 0x07, 0xd8, 0xba, 0x1b, 0x72, 0xaa, 0x5e, 0x7f,
0xe5, 0xb9, 0xcc, 0x20, 0x37, 0x14, 0x6b, 0x5d, 0x0e, 0x97, 0xdd, 0x61, 0xec, 0x23, 0x1f, 0x48,
0xd7, 0x35, 0xab, 0x47, 0x6f, 0xeb, 0x0b, 0x50, 0xfc, 0x38, 0x0c, 0xd6, 0x58, 0x01, 0xb0, 0x9b,
0x4c, 0x0e, 0x9c, 0xf2, 0x8b, 0xbf, 0x0b, 0xca, 0xf6, 0x34, 0x73, 0xad, 0xa9, 0x54, 0xb0, 0x73,
0x67, 0xc3, 0xc7, 0xe8, 0x12, 0x84, 0x0c, 0xa2, 0x62, 0x52, 0x5c, 0xdb, 0x70, 0xfa, 0xb9, 0x44,
0x3b, 0x77, 0x8a, 0x4c, 0xca, 0xcf, 0xc8, 0x1d, 0x2b, 0xca, 0xe1, 0x64, 0x67, 0xc8, 0x5f, 0x1e,
0x79, 0x30, 0xc9, 0xff, 0x05, 0x39, 0x6b, 0x6e, 0x03, 0x9d, 0x80, 0x80, 0x85, 0x15, 0x52, 0xaa,
0xf2, 0x4b, 0x97, 0x70, 0x2e, 0xd8, 0xce, 0x0d, 0x10, 0x9f, 0xa8, 0xca, 0xce, 0x74, 0x8f, 0x3d,
0x03, 0xfb, 0xb1, 0xe7, 0x37, 0x7d, 0xdd, 0x61, 0xc4, 0xe3, 0xfd, 0x29, 0x09, 0x23, 0xef, 0xcd,
0x8b, 0xff, 0x80, 0x37, 0x2f, 0xdd, 0xa6, 0x1b, 0xd8, 0x8f, 0xe9, 0x36, 0xfd, 0x3d, 0xa1, 0xc8,
0x6d, 0xf7, 0x45, 0x84, 0xe2, 0xe5, 0x3e, 0xe3, 0x4d, 0xb7, 0x73, 0x71, 0x72, 0x4a, 0x9c, 0xcc,
0xc4, 0xee, 0xc4, 0x1f, 0xa7, 0xf9, 0xfd, 0x44, 0xd6, 0x63, 0xc0, 0x4e, 0x48, 0xfb, 0x1f, 0x1f,
0xd2, 0x03, 0x9e, 0x90, 0xbe, 0xed, 0x09, 0x69, 0x3b, 0x1c, 0xff, 0xa5, 0x9d, 0x7b, 0x85, 0x9c,
0xdf, 0x29, 0xa4, 0x3b, 0x2e, 0x92, 0xc3, 0xc9, 0xce, 0x70, 0xb7, 0x38, 0x0f, 0xec, 0x25, 0xce,
0x83, 0xfb, 0x71, 0xd6, 0x77, 0x05, 0x40, 0x57, 0x30, 0x5d, 0xc2, 0x75, 0xb3, 0xa6, 0x50, 0xfc,
0x05, 0x38, 0xe9, 0x3a, 0x44, 0x9c, 0x5c, 0xcd, 0xc5, 0xda, 0xfd, 0xdf, 0xf4, 0x8e, 0x85, 0xc6,
0x39, 0x99, 0x78, 0x64, 0xcb, 0xd0, 0x74, 0xe7, 0xac, 0xd8, 0xbf, 0xfb, 0x40, 0xba, 0x82, 0xe9,
0x82, 0xb2, 0x51, 0x33, 0x14, 0xf5, 0xb2, 0x41, 0xea, 0xbc, 0x8d, 0x7f, 0xda, 0xf4, 0xf8, 0x3c,
0xa9, 0x7f, 0x19, 0x8e, 0x6d, 0x9b, 0xf9, 0x2d, 0xd3, 0xd0, 0x2d, 0x8c, 0x5e, 0x84, 0x00, 0x0f,
0x3a, 0xcb, 0xe9, 0x15, 0x8e, 0xef, 0xb8, 0x45, 0xbb, 0x54, 0x3a, 0xe8, 0x3e, 0xb1, 0xee, 0x5b,
0xbc, 0x25, 0x96, 0x87, 0xed, 0xee, 0x62, 0xed, 0xd4, 0xe9, 0xa0, 0x63, 0xdf, 0x12, 0xe0, 0x48,
0x5e, 0x67, 0x27, 0x5a, 0xf5, 0x86, 0xfd, 0x05, 0xc2, 0xf1, 0x1d, 0x0b, 0xf3, 0x72, 0xd1, 0x34,
0x08, 0xe5, 0x2e, 0xea, 0xbc, 0x76, 0x7f, 0x16, 0xe4, 0xc1, 0xf2, 0x82, 0x41, 0x28, 0x3a, 0x01,
0x91, 0x32, 0xa9, 0x17, 0x4d, 0x1b, 0xcf, 0x2d, 0x7e, 0x48, 0x86, 0x32, 0xa9, 0xbb, 0x12, 0xce,
0x40, 0xa8, 0xa5, 0x10, 0x5d, 0xd3, 0x2b, 0xee, 0x25, 0xc6, 0x98, 0x7d, 0x89, 0x11, 0x85, 0x76,
0x2e, 0xfc, 0x50, 0x08, 0xc4, 0xec, 0xf7, 0xa6, 0x83, 0x42, 0x2f, 0x40, 0x00, 0x13, 0x62, 0x74,
0x3e, 0x08, 0x6c, 0x8f, 0x77, 0x30, 0xb1, 0xff, 0x13, 0xe0, 0xc8, 0x25, 0xbc, 0xdd, 0xde, 0xa7,
0x61, 0x40, 0x55, 0xa8, 0xe2, 0x04, 0xd7, 0xd1, 0x3e, 0xc7, 0x2d, 0xf2, 0x0f, 0x3a, 0x32, 0x07,
0x75, 0x6d, 0xd3, 0xb7, 0xcf, 0x6d, 0xfa, 0xf7, 0xb0, 0xcd, 0x4f, 0xfd, 0x70, 0xa4, 0x7b, 0x7f,
0xf6, 0xa6, 0x09, 0xba, 0x0a, 0xe1, 0xb2, 0xfb, 0x8a, 0xf0, 0xbd, 0x0e, 0x67, 0x26, 0x7a, 0xfd,
0xd6, 0xfb, 0x2a, 0x79, 0xae, 0x27, 0xb7, 0x98, 0x51, 0x1a, 0x0e, 0x77, 0x1e, 0x8a, 0xa6, 0x42,
0x9c, 0x93, 0xba, 0xdd, 0xbf, 0xa1, 0x0e, 0x69, 0xc1, 0xa5, 0xb0, 0xdc, 0xdc, 0x73, 0x05, 0xb2,
0xbf, 0xdc, 0xec, 0xdc, 0x81, 0xa0, 0x45, 0x08, 0xe1, 0xb7, 0x95, 0xba, 0xb9, 0x75, 0xe3, 0x94,
0xec, 0xd5, 0x60, 0x5b, 0xdd, 0x53, 0x79, 0x9b, 0xcb, 0xbd, 0xd7, 0x8a, 0x8e, 0xc9, 0x1d, 0x41,
0xd2, 0xd7, 0x04, 0x08, 0x3a, 0x74, 0x34, 0xd5, 0xfd, 0x7d, 0xc1, 0x3e, 0xab, 0x31, 0x26, 0xe2,
0x17, 0x3f, 0x14, 0xba, 0xbf, 0x34, 0xbc, 0x02, 0x83, 0x9a, 0x6e, 0x36, 0xdc, 0x8b, 0x8d, 0x6d,
0xae, 0x58, 0xb7, 0x09, 0x16, 0xd9, 0xe6, 0x41, 0x17, 0x20, 0x60, 0x34, 0x28, 0xe3, 0xf6, 0x6f,
0xcf, 0xbd, 0x6d, 0xa8, 0xc9, 0x0e, 0xd3, 0x36, 0x5e, 0xb6, 0x57, 0xfb, 0xfb, 0xf4, 0xb2, 0xa3,
0xfb, 0xdf, 0xc4, 0xcb, 0xdb, 0xfb, 0x69, 0xaf, 0x5e, 0xde, 0x3e, 0x46, 0x1c, 0xa6, 0xa9, 0xdf,
0x0a, 0x30, 0xd2, 0xf3, 0x11, 0x02, 0x4d, 0x83, 0x78, 0x2d, 0x7f, 0xbb, 0xb8, 0x20, 0xdf, 0xbc,
0x35, 0xb7, 0x38, 0x77, 0x73, 0x7e, 0x6e, 0xfe, 0x4a, 0x71, 0x79, 0xfe, 0xda, 0xfc, 0xcd, 0x37,
0xe6, 0xa3, 0xcf, 0x48, 0x43, 0x8f, 0x36, 0xc7, 0xc3, 0x10, 0x6c, 0xe8, 0xeb, 0xba, 0xd1, 0xd2,
0x51, 0x02, 0x8e, 0xf6, 0x81, 0x67, 0x97, 0x17, 0x97, 0x6e, 0xde, 0x88, 0x0a, 0xd2, 0xa1, 0x47,
0x9b, 0xe3, 0x21, 0x08, 0x94, 0x1a, 0x16, 0x35, 0xea, 0xe8, 0x2c, 0x3c, 0xdb, 0x07, 0x7d, 0xfd,
0xe6, 0xdc, 0x7c, 0x71, 0x31, 0x2f, 0xdf, 0xca, 0xcb, 0x51, 0x9f, 0x34, 0xfa, 0x68, 0x73, 0x7c,
0x08, 0x22, 0x77, 0x0d, 0x4d, 0x9f, 0xb0, 0x30, 0x69, 0x62, 0x82, 0x92, 0x30, 0xde, 0xc7, 0x74,
0xe3, 0xe2, 0xfc, 0xdc, 0xe5, 0xfc, 0xe2, 0x52, 0xd4, 0x2f, 0x0d, 0x3f, 0xda, 0x1c, 0x07, 0x08,
0xd5, 0x15, 0x5d, 0x2b, 0x63, 0x8b, 0x4a, 0xe2, 0xa3, 0xcd, 0xf1, 0x31, 0x51, 0x98, 0x8a, 0xf6,
0x32, 0x4d, 0x7d, 0x22, 0x40, 0xc4, 0xf3, 0x55, 0x04, 0x9d, 0x86, 0x31, 0x86, 0x59, 0xcc, 0xcf,
0x2e, 0xcb, 0x73, 0x4b, 0xb7, 0x77, 0xd6, 0xf0, 0x79, 0x18, 0xed, 0x02, 0xce, 0xdf, 0x9c, 0xcf,
0x47, 0x05, 0x09, 0x1e, 0x6d, 0x8e, 0x07, 0xd8, 0xb1, 0x52, 0xc7, 0xe8, 0x1c, 0x1c, 0xeb, 0x82,
0xc8, 0xf9, 0x8b, 0x97, 0xd8, 0xea, 0x4b, 0xf9, 0xd9, 0xa5, 0xfc, 0xa5, 0xa8, 0x4f, 0x1a, 0x7b,
0xb4, 0x39, 0x1e, 0x85, 0x61, 0x82, 0x15, 0x75, 0x82, 0x25, 0x6d, 0x5c, 0xa2, 0x58, 0xed, 0x63,
0xe3, 0x83, 0x7c, 0x31, 0x7f, 0x3d, 0x7f, 0x23, 0x3f, 0xcf, 0xb4, 0x73, 0xd8, 0xf8, 0xc7, 0x1e,
0x3c, 0x81, 0x6b, 0x98, 0x1f, 0x77, 0xf9, 0xac, 0x28, 0x4c, 0x1d, 0xf2, 0x32, 0x67, 0xbe, 0x3d,
0x02, 0x51, 0xbb, 0x20, 0xca, 0x9d, 0x9f, 0x41, 0xa0, 0xff, 0x17, 0x00, 0x58, 0x95, 0xb5, 0x6b,
0x36, 0xea, 0xfb, 0x70, 0xb1, 0xf3, 0x91, 0x4e, 0x9a, 0xde, 0x13, 0xd6, 0xae, 0xd6, 0xb1, 0xd7,
0xde, 0xfd, 0xf9, 0x6f, 0xfe, 0xc7, 0x37, 0x83, 0x5e, 0x4e, 0xab, 0x24, 0xed, 0x69, 0x64, 0xac,
0xf4, 0xbd, 0x9e, 0xfe, 0x28, 0xd5, 0xfd, 0xfc, 0x20, 0x6d, 0xb7, 0x03, 0xe8, 0x7f, 0x05, 0x08,
0x5d, 0xc1, 0xf6, 0x46, 0x51, 0xbc, 0x77, 0xed, 0x9d, 0x4e, 0xdc, 0xd2, 0x2e, 0xdd, 0x46, 0xec,
0x1a, 0xdf, 0x58, 0x1e, 0xcd, 0x1e, 0x74, 0x63, 0xe9, 0x7b, 0x6e, 0x4b, 0xfd, 0x00, 0xfd, 0xc9,
0xb1, 0xa6, 0xdd, 0xaa, 0xec, 0x62, 0xcd, 0xae, 0x53, 0xc9, 0x2e, 0xd6, 0xec, 0xee, 0x7d, 0x62,
0xff, 0x25, 0xf0, 0x5d, 0xbf, 0x27, 0xa0, 0xd7, 0x9f, 0xc0, 0xb6, 0xd3, 0x76, 0x67, 0xf4, 0xe6,
0x81, 0xbc, 0x63, 0xf3, 0xa2, 0x6f, 0xd8, 0xde, 0xb1, 0xbf, 0xde, 0x3f, 0xd6, 0x3b, 0xde, 0x83,
0xac, 0xb4, 0x4b, 0xd3, 0x16, 0x5b, 0xe5, 0x7a, 0xbe, 0x81, 0x96, 0x9f, 0x9c, 0x9a, 0xe9, 0x7b,
0xee, 0x69, 0xe7, 0x01, 0xfa, 0x4f, 0x1f, 0x44, 0x3c, 0x87, 0x0f, 0x14, 0xdb, 0x66, 0xe3, 0x3d,
0x27, 0x13, 0xe9, 0xf9, 0x1d, 0xb7, 0xec, 0x22, 0x63, 0x1f, 0xd8, 0xee, 0x79, 0x5f, 0x40, 0x5f,
0x15, 0x0e, 0xbe, 0x71, 0x4f, 0x73, 0x9f, 0xea, 0x57, 0xc2, 0x4b, 0xed, 0x28, 0xd4, 0x3d, 0xdd,
0xfb, 0x33, 0x81, 0x1e, 0xf2, 0x9a, 0x2b, 0x92, 0xba, 0xea, 0xbf, 0xef, 0x83, 0xe8, 0x15, 0x4c,
0x97, 0xcd, 0x9a, 0xa6, 0xaf, 0xbb, 0x2d, 0xdb, 0xd4, 0x36, 0x36, 0xd9, 0xe1, 0xb4, 0x23, 0x9d,
0xdc, 0x53, 0x27, 0x14, 0xfb, 0xb1, 0x6d, 0x9f, 0xef, 0x0b, 0xe8, 0x3b, 0x4f, 0x83, 0x7d, 0x3a,
0x6d, 0x86, 0x95, 0x6e, 0x70, 0xd3, 0xa4, 0x55, 0xc7, 0x36, 0x5f, 0xf7, 0xf1, 0xc3, 0xeb, 0x25,
0xa3, 0xa5, 0xff, 0x95, 0x4d, 0xf6, 0xa1, 0x6d, 0xb2, 0x1f, 0x0a, 0xe8, 0x7b, 0x4f, 0x99, 0xc9,
0x54, 0xc7, 0x38, 0x3b, 0x19, 0xcd, 0x6d, 0x1a, 0x9f, 0xa0, 0xd1, 0x1c, 0x91, 0x5f, 0x06, 0xa3,
0x61, 0x5b, 0x95, 0xdc, 0xb9, 0x1f, 0x7c, 0x7a, 0x5c, 0x78, 0x33, 0x5d, 0x31, 0x52, 0xb4, 0x8a,
0x29, 0xff, 0x91, 0x60, 0x4a, 0xc7, 0xb4, 0x65, 0x90, 0xf5, 0x74, 0xf7, 0x0f, 0xe7, 0x9a, 0x67,
0xd3, 0xe6, 0x7a, 0x25, 0x4d, 0xa9, 0x6e, 0xae, 0xad, 0x05, 0xf8, 0x01, 0xf0, 0xec, 0x5f, 0x02,
0x00, 0x00, 0xff, 0xff, 0x76, 0x3a, 0xc0, 0x52, 0x12, 0x29, 0x00, 0x00,
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConn
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion4
// DeviceRepositoryClient is the client API for DeviceRepository service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type DeviceRepositoryClient interface {
ListBrands(ctx context.Context, in *ListEndDeviceBrandsRequest, opts ...grpc.CallOption) (*ListEndDeviceBrandsResponse, error)
GetBrand(ctx context.Context, in *GetEndDeviceBrandRequest, opts ...grpc.CallOption) (*EndDeviceBrand, error)
ListModels(ctx context.Context, in *ListEndDeviceModelsRequest, opts ...grpc.CallOption) (*ListEndDeviceModelsResponse, error)
GetModel(ctx context.Context, in *GetEndDeviceModelRequest, opts ...grpc.CallOption) (*EndDeviceModel, error)
GetTemplate(ctx context.Context, in *GetTemplateRequest, opts ...grpc.CallOption) (*EndDeviceTemplate, error)
GetUplinkDecoder(ctx context.Context, in *GetPayloadFormatterRequest, opts ...grpc.CallOption) (*MessagePayloadDecoder, error)
GetDownlinkDecoder(ctx context.Context, in *GetPayloadFormatterRequest, opts ...grpc.CallOption) (*MessagePayloadDecoder, error)
GetDownlinkEncoder(ctx context.Context, in *GetPayloadFormatterRequest, opts ...grpc.CallOption) (*MessagePayloadEncoder, error)
}
type deviceRepositoryClient struct {
cc *grpc.ClientConn
}
func NewDeviceRepositoryClient(cc *grpc.ClientConn) DeviceRepositoryClient {
return &deviceRepositoryClient{cc}
}
func (c *deviceRepositoryClient) ListBrands(ctx context.Context, in *ListEndDeviceBrandsRequest, opts ...grpc.CallOption) (*ListEndDeviceBrandsResponse, error) {
out := new(ListEndDeviceBrandsResponse)
err := c.cc.Invoke(ctx, "/ttn.lorawan.v3.DeviceRepository/ListBrands", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *deviceRepositoryClient) GetBrand(ctx context.Context, in *GetEndDeviceBrandRequest, opts ...grpc.CallOption) (*EndDeviceBrand, error) {
out := new(EndDeviceBrand)
err := c.cc.Invoke(ctx, "/ttn.lorawan.v3.DeviceRepository/GetBrand", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *deviceRepositoryClient) ListModels(ctx context.Context, in *ListEndDeviceModelsRequest, opts ...grpc.CallOption) (*ListEndDeviceModelsResponse, error) {
out := new(ListEndDeviceModelsResponse)
err := c.cc.Invoke(ctx, "/ttn.lorawan.v3.DeviceRepository/ListModels", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *deviceRepositoryClient) GetModel(ctx context.Context, in *GetEndDeviceModelRequest, opts ...grpc.CallOption) (*EndDeviceModel, error) {
out := new(EndDeviceModel)
err := c.cc.Invoke(ctx, "/ttn.lorawan.v3.DeviceRepository/GetModel", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *deviceRepositoryClient) GetTemplate(ctx context.Context, in *GetTemplateRequest, opts ...grpc.CallOption) (*EndDeviceTemplate, error) {
out := new(EndDeviceTemplate)
err := c.cc.Invoke(ctx, "/ttn.lorawan.v3.DeviceRepository/GetTemplate", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *deviceRepositoryClient) GetUplinkDecoder(ctx context.Context, in *GetPayloadFormatterRequest, opts ...grpc.CallOption) (*MessagePayloadDecoder, error) {
out := new(MessagePayloadDecoder)
err := c.cc.Invoke(ctx, "/ttn.lorawan.v3.DeviceRepository/GetUplinkDecoder", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *deviceRepositoryClient) GetDownlinkDecoder(ctx context.Context, in *GetPayloadFormatterRequest, opts ...grpc.CallOption) (*MessagePayloadDecoder, error) {
out := new(MessagePayloadDecoder)
err := c.cc.Invoke(ctx, "/ttn.lorawan.v3.DeviceRepository/GetDownlinkDecoder", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *deviceRepositoryClient) GetDownlinkEncoder(ctx context.Context, in *GetPayloadFormatterRequest, opts ...grpc.CallOption) (*MessagePayloadEncoder, error) {
out := new(MessagePayloadEncoder)
err := c.cc.Invoke(ctx, "/ttn.lorawan.v3.DeviceRepository/GetDownlinkEncoder", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// DeviceRepositoryServer is the server API for DeviceRepository service.
type DeviceRepositoryServer interface {
ListBrands(context.Context, *ListEndDeviceBrandsRequest) (*ListEndDeviceBrandsResponse, error)
GetBrand(context.Context, *GetEndDeviceBrandRequest) (*EndDeviceBrand, error)
ListModels(context.Context, *ListEndDeviceModelsRequest) (*ListEndDeviceModelsResponse, error)
GetModel(context.Context, *GetEndDeviceModelRequest) (*EndDeviceModel, error)
GetTemplate(context.Context, *GetTemplateRequest) (*EndDeviceTemplate, error)
GetUplinkDecoder(context.Context, *GetPayloadFormatterRequest) (*MessagePayloadDecoder, error)
GetDownlinkDecoder(context.Context, *GetPayloadFormatterRequest) (*MessagePayloadDecoder, error)
GetDownlinkEncoder(context.Context, *GetPayloadFormatterRequest) (*MessagePayloadEncoder, error)
}
// UnimplementedDeviceRepositoryServer can be embedded to have forward compatible implementations.
type UnimplementedDeviceRepositoryServer struct {
}
func (*UnimplementedDeviceRepositoryServer) ListBrands(ctx context.Context, req *ListEndDeviceBrandsRequest) (*ListEndDeviceBrandsResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method ListBrands not implemented")
}
func (*UnimplementedDeviceRepositoryServer) GetBrand(ctx context.Context, req *GetEndDeviceBrandRequest) (*EndDeviceBrand, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetBrand not implemented")
}
func (*UnimplementedDeviceRepositoryServer) ListModels(ctx context.Context, req *ListEndDeviceModelsRequest) (*ListEndDeviceModelsResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method ListModels not implemented")
}
func (*UnimplementedDeviceRepositoryServer) GetModel(ctx context.Context, req *GetEndDeviceModelRequest) (*EndDeviceModel, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetModel not implemented")
}
func (*UnimplementedDeviceRepositoryServer) GetTemplate(ctx context.Context, req *GetTemplateRequest) (*EndDeviceTemplate, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetTemplate not implemented")
}
func (*UnimplementedDeviceRepositoryServer) GetUplinkDecoder(ctx context.Context, req *GetPayloadFormatterRequest) (*MessagePayloadDecoder, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetUplinkDecoder not implemented")
}
func (*UnimplementedDeviceRepositoryServer) GetDownlinkDecoder(ctx context.Context, req *GetPayloadFormatterRequest) (*MessagePayloadDecoder, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetDownlinkDecoder not implemented")
}
func (*UnimplementedDeviceRepositoryServer) GetDownlinkEncoder(ctx context.Context, req *GetPayloadFormatterRequest) (*MessagePayloadEncoder, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetDownlinkEncoder not implemented")
}
func RegisterDeviceRepositoryServer(s *grpc.Server, srv DeviceRepositoryServer) {
s.RegisterService(&_DeviceRepository_serviceDesc, srv)
}
func _DeviceRepository_ListBrands_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(ListEndDeviceBrandsRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(DeviceRepositoryServer).ListBrands(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ttn.lorawan.v3.DeviceRepository/ListBrands",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(DeviceRepositoryServer).ListBrands(ctx, req.(*ListEndDeviceBrandsRequest))
}
return interceptor(ctx, in, info, handler)
}
func _DeviceRepository_GetBrand_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(GetEndDeviceBrandRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(DeviceRepositoryServer).GetBrand(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ttn.lorawan.v3.DeviceRepository/GetBrand",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(DeviceRepositoryServer).GetBrand(ctx, req.(*GetEndDeviceBrandRequest))
}
return interceptor(ctx, in, info, handler)
}
func _DeviceRepository_ListModels_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(ListEndDeviceModelsRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(DeviceRepositoryServer).ListModels(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ttn.lorawan.v3.DeviceRepository/ListModels",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(DeviceRepositoryServer).ListModels(ctx, req.(*ListEndDeviceModelsRequest))
}
return interceptor(ctx, in, info, handler)
}
func _DeviceRepository_GetModel_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(GetEndDeviceModelRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(DeviceRepositoryServer).GetModel(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ttn.lorawan.v3.DeviceRepository/GetModel",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(DeviceRepositoryServer).GetModel(ctx, req.(*GetEndDeviceModelRequest))
}
return interceptor(ctx, in, info, handler)
}
func _DeviceRepository_GetTemplate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(GetTemplateRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(DeviceRepositoryServer).GetTemplate(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ttn.lorawan.v3.DeviceRepository/GetTemplate",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(DeviceRepositoryServer).GetTemplate(ctx, req.(*GetTemplateRequest))
}
return interceptor(ctx, in, info, handler)
}
func _DeviceRepository_GetUplinkDecoder_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(GetPayloadFormatterRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(DeviceRepositoryServer).GetUplinkDecoder(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ttn.lorawan.v3.DeviceRepository/GetUplinkDecoder",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(DeviceRepositoryServer).GetUplinkDecoder(ctx, req.(*GetPayloadFormatterRequest))
}
return interceptor(ctx, in, info, handler)
}
func _DeviceRepository_GetDownlinkDecoder_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(GetPayloadFormatterRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(DeviceRepositoryServer).GetDownlinkDecoder(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ttn.lorawan.v3.DeviceRepository/GetDownlinkDecoder",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(DeviceRepositoryServer).GetDownlinkDecoder(ctx, req.(*GetPayloadFormatterRequest))
}
return interceptor(ctx, in, info, handler)
}
func _DeviceRepository_GetDownlinkEncoder_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(GetPayloadFormatterRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(DeviceRepositoryServer).GetDownlinkEncoder(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ttn.lorawan.v3.DeviceRepository/GetDownlinkEncoder",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(DeviceRepositoryServer).GetDownlinkEncoder(ctx, req.(*GetPayloadFormatterRequest))
}
return interceptor(ctx, in, info, handler)
}
var _DeviceRepository_serviceDesc = grpc.ServiceDesc{
ServiceName: "ttn.lorawan.v3.DeviceRepository",
HandlerType: (*DeviceRepositoryServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "ListBrands",
Handler: _DeviceRepository_ListBrands_Handler,
},
{
MethodName: "GetBrand",
Handler: _DeviceRepository_GetBrand_Handler,
},
{
MethodName: "ListModels",
Handler: _DeviceRepository_ListModels_Handler,
},
{
MethodName: "GetModel",
Handler: _DeviceRepository_GetModel_Handler,
},
{
MethodName: "GetTemplate",
Handler: _DeviceRepository_GetTemplate_Handler,
},
{
MethodName: "GetUplinkDecoder",
Handler: _DeviceRepository_GetUplinkDecoder_Handler,
},
{
MethodName: "GetDownlinkDecoder",
Handler: _DeviceRepository_GetDownlinkDecoder_Handler,
},
{
MethodName: "GetDownlinkEncoder",
Handler: _DeviceRepository_GetDownlinkEncoder_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "lorawan-stack/api/devicerepository.proto",
}
|
#!/bin/bash
# This script parses in the command line parameters from runCust,
# maps them to the correct command line parameters for DispNet training script and launches that task
# The last line of runCust should be: bash $CONFIG_FILE --data-dir $DATA_DIR --log-dir $LOG_DIR
# Parse the command line parameters
# that runCust will give out
DATA_DIR=NONE
LOG_DIR=NONE
CONFIG_DIR=NONE
MODEL_DIR=NONE
# Parsing command line arguments:
while [[ $# > 0 ]]
do
key="$1"
case $key in
-h|--help)
echo "Usage: run_dispnet_training_philly.sh [run_options]"
echo "Options:"
echo " -d|--data-dir <path> - directory path to input data (default NONE)"
echo " -l|--log-dir <path> - directory path to save the log files (default NONE)"
echo " -p|--config-file-dir <path> - directory path to config file directory (default NONE)"
echo " -m|--model-dir <path> - directory path to output model file (default NONE)"
exit 1
;;
-d|--data-dir)
DATA_DIR="$2"
shift # pass argument
;;
-p|--config-file-dir)
CONFIG_DIR="$2"
shift # pass argument
;;
-m|--model-dir)
MODEL_DIR="$2"
shift # pass argument
;;
-l|--log-dir)
LOG_DIR="$2"
;;
*)
echo Unkown option $key
;;
esac
shift # past argument or value
done
# Prints out the arguments that were passed into the script
echo "DATA_DIR=$DATA_DIR"
echo "LOG_DIR=$LOG_DIR"
echo "CONFIG_DIR=$CONFIG_DIR"
echo "MODEL_DIR=$MODEL_DIR"
# Run training on philly
# Add the root folder of the code to the PYTHONPATH
export PYTHONPATH=$PYTHONPATH:$CONFIG_DIR
# Run the actual job
python $CONFIG_DIR/examples/ResNet/cifar-logdense.py \
--data_dir=$DATA_DIR \
--log_dir=$LOG_DIR \
--model_dir=$MODEL_DIR \
-f=5 \
--opt_at=-1 \
--num_classes=10 \
--growth_rate=32 \
--num_units=32 \
--batch_size=32
|
<gh_stars>1-10
#include <stdlib.h>
#include <stdio.h>
#include <unistd.h>
#include <syslog.h>
#include <sys/socket.h>
#include <sys/un.h>
#include "logger.h"
/* Forward declare functions not in header */
void logger_cleanup(void);
/* Global variables */
char* logger_identifier = NULL;
char* logger_socket = NULL;
int logger_socket_fd = 0;
/* Initialize listener and syslog */
void logger_server_setup(char* new_identifier, char* new_socket) {
struct sockaddr_un addr;
logger_set_identifier(new_identifier);
logger_set_socket(new_socket);
if ((logger_socket_fd = socket(AF_UNIX, SOCK_STREAM, 0)) == -1) {
perror("Socket error");
exit(EXIT_FAILURE);
}
memset(&addr, 0, sizeof(addr));
addr.sun_family = AF_UNIX;
if (*logger_socket == '\0') {
*addr.sun_path = '\0';
strncpy(addr.sun_path + 1, logger_socket + 1, sizeof(addr.sun_path) - 2);
} else {
strncpy(addr.sun_path, logger_socket, sizeof(addr.sun_path) - 1);
unlink(logger_socket);
}
if (bind(logger_socket_fd, (struct sockaddr*) &addr, sizeof(addr)) == -1) {
perror("Bind error");
exit(EXIT_FAILURE);
}
openlog(logger_identifier, LOG_PERROR | LOG_NDELAY, LOG_USER);
}
void logger_listen() {
int c1 = 0;
int rc = 0;
int pid = 0;
char buf[1024];
if (listen(logger_socket_fd, 5) == -1) {
perror("Listen error");
exit(EXIT_FAILURE);
}
while (1) {
if ((c1 = accept(logger_socket_fd, NULL, NULL)) == -1) {
perror("Accept error");
continue;
}
pid = fork();
if (!pid) {
while ((rc = read(c1, buf, sizeof(buf))) > 0) {
// fprintf(stdout, "Read %u bytes: %.*s\n", rc, rc, buf);
// This is where the syslog logic will go
syslog(LOG_INFO, "%.*s", rc, buf);
}
if (rc == -1) {
perror("Read error");
exit(EXIT_FAILURE);
} else if (rc == 0) {
close(c1);
exit(EXIT_SUCCESS);
}
}
}
}
/* Write message to syslog */
// void logger_send(int p);
/* Logger Identifier */
void logger_set_identifier(char* new_identifier) {
if (logger_identifier != NULL) free(logger_identifier);
slutil_strcpy(&logger_identifier, new_identifier);
}
char* logger_get_identifier() {
char* logger_identifier_copy = NULL;
slutil_strcpy(&logger_identifier_copy, logger_identifier);
return logger_identifier_copy;
}
/* Logger Socket */
void logger_set_socket(char* new_socket) {
slutil_strcpy(&logger_socket, new_socket);
}
char* logger_get_socket() {
char* logger_socket_copy = NULL;
slutil_strcpy(&logger_socket_copy, logger_socket);
return logger_socket_copy;
}
/* Shutdown function */
void logger_shutdown() {
char* s = logger_get_socket();
if (s != NULL) {
unlink(s);
free(s);
}
logger_cleanup();
}
/* Cleanup function */
void logger_cleanup() {
if (logger_identifier != NULL) free(logger_identifier);
if (logger_socket != NULL) free(logger_socket);
closelog();
}
|
<filename>src/hotplug/AbstractHotplugMonitor.h
#pragma once
#include <list>
#include <string>
#include "hotplug/HotplugListener.h"
#include "util/Loggable.h"
namespace BeeeOn {
class HotplugEvent;
class AbstractHotplugMonitor : protected Loggable {
public:
void registerListener(HotplugListener::Ptr listener);
protected:
AbstractHotplugMonitor();
void logEvent(const HotplugEvent &event, const std::string &action) const;
void fireAddEvent(const HotplugEvent &event);
void fireRemoveEvent(const HotplugEvent &event);
void fireChangeEvent(const HotplugEvent &event);
void fireMoveEvent(const HotplugEvent &event);
private:
std::list<HotplugListener::Ptr> m_listeners;
};
}
|
/*************************************************************************
*
* CAUTION: DO NOT EDIT THIS FILE -- YOUR CHANGES WILL BE LOST!
*
* This file is generated by config.sh
*
*************************************************************************/
#define REALM_VERSION "0.92.2"
#define REALM_INSTALL_PREFIX "/usr/local"
#define REALM_INSTALL_EXEC_PREFIX "/usr/local"
#define REALM_INSTALL_INCLUDEDIR "/usr/local/include"
#define REALM_INSTALL_BINDIR "/usr/local/bin"
#define REALM_INSTALL_LIBDIR "/usr/local/lib"
#define REALM_INSTALL_LIBEXECDIR "/usr/local/libexec"
#ifdef REALM_DEBUG
# define REALM_MAX_BPNODE_SIZE 1000
#else
# define REALM_MAX_BPNODE_SIZE 1000
#endif
#if 0
# define REALM_ENABLE_ALLOC_SET_ZERO 1
#endif
#if 1
# define REALM_ENABLE_ENCRYPTION 1
#endif
#if 1
# define REALM_ENABLE_ASSERTIONS 1
#endif
#define REALM_NULL_STRINGS 0
|
import TopicFeedHeader from './TopicFeedHeader'
import connector from './TopicFeedHeader.connector'
export default connector(TopicFeedHeader)
|
#!/bin/sh -e
rm -rf build
script/check.sh --ci-mode
script/test.sh --ci-mode
# TODO: Finish implementation, then uncomment.
#script/docker/build.sh
|
package com.joyue.tech.gankio.ui;
import android.content.Context;
import android.util.Log;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONTokener;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.util.ArrayList;
public class Utils {
private static final String FILENAME = "userinfo.json"; // 用户保存文件名
private static final String TAG = "Utils";
/* 保存用户登录信息列表 */
public static void saveUserList(Context context, ArrayList<User> users) throws Exception {
/* 保存 */
Log.i(TAG, "正在保存");
Writer writer = null;
OutputStream out = null;
JSONArray array = new JSONArray();
for (User user : users) {
array.put(user.toJSON());
}
try {
out = context.openFileOutput(FILENAME, Context.MODE_PRIVATE); // 覆盖
writer = new OutputStreamWriter(out);
Log.i(TAG, "json的值:" + array.toString());
writer.write(array.toString());
} finally {
if (writer != null)
writer.close();
}
}
/* 获取用户登录信息列表 */
public static ArrayList<User> getUserList(Context context) {
/* 加载 */
FileInputStream in = null;
ArrayList<User> users = new ArrayList<User>();
try {
in = context.openFileInput(FILENAME);
BufferedReader reader = new BufferedReader(new InputStreamReader(in));
StringBuilder jsonString = new StringBuilder();
JSONArray jsonArray = new JSONArray();
String line;
while ((line = reader.readLine()) != null) {
jsonString.append(line);
}
Log.i(TAG, jsonString.toString());
jsonArray = (JSONArray) new JSONTokener(jsonString.toString()).nextValue(); // 把字符串转换成JSONArray对象
for (int i = 0; i < jsonArray.length(); i++) {
User user = new User(jsonArray.getJSONObject(i));
users.add(user);
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (JSONException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
return users;
}
}
|
#include<iostream>
#include<fstream>
#include<vector>
using namespace std;
// The following function prints the lattice to file "output.ppm"
void Print_lattice (const vector<int> vlat, const int &vlx, const int &vly, const int &vwidth, const int &vheight, const char* vfilename="output.ppm")
{
int i, j, k, l;
int vw= vwidth/vlx, vh=vheight/vly;
int r[8], g[8], b[8];
r[0]= 255; g[0]= 255; b[0]= 255; //white use 0 in your lattice if you want to color it white
r[1]= 255; g[1]= 0; b[1]= 0; //red
r[2]= 255; g[2]= 128; b[2]= 0; //orange
r[3]= 255; g[3]= 255; b[3]= 0; //yellow
r[4]= 0; g[4]= 255; b[4]= 0; //green
r[5]= 0; g[5]= 255; b[5]= 255; //magenta
r[6]= 0; g[6]= 128; b[6]= 255; //light blue
r[7]= 0; g[7]= 0; b[7]= 255; //blue
ofstream out (vfilename);
out << "P3" << endl;
out << vw*vlx << " " << vh*vly << endl;
out << "255" << endl;
for (i=vly-1; i>=0; i--)
for (j=0; j<vh; j++)
for (k=0; k<vlx; k++)
{
for (l=0; l<vw; l++)
{ out << r[vlat[k+i*vlx]] << " " << g[vlat[k+i*vlx]] << " " << b[vlat[k+i*vlx]] << " ";
}
}
out << endl;
out.close ();
}
|
// +build !ignore_autogenerated
/*
Copyright The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by deepcopy-gen. DO NOT EDIT.
package v1
import (
runtime "k8s.io/apimachinery/pkg/runtime"
)
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *InferenceEndpoint) DeepCopyInto(out *InferenceEndpoint) {
*out = *in
out.TypeMeta = in.TypeMeta
in.ObjectMeta.DeepCopyInto(&out.ObjectMeta)
in.Spec.DeepCopyInto(&out.Spec)
out.Status = in.Status
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InferenceEndpoint.
func (in *InferenceEndpoint) DeepCopy() *InferenceEndpoint {
if in == nil {
return nil
}
out := new(InferenceEndpoint)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *InferenceEndpoint) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
}
return nil
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *InferenceEndpointList) DeepCopyInto(out *InferenceEndpointList) {
*out = *in
out.TypeMeta = in.TypeMeta
out.ListMeta = in.ListMeta
if in.Items != nil {
in, out := &in.Items, &out.Items
*out = make([]InferenceEndpoint, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InferenceEndpointList.
func (in *InferenceEndpointList) DeepCopy() *InferenceEndpointList {
if in == nil {
return nil
}
out := new(InferenceEndpointList)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *InferenceEndpointList) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
}
return nil
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *InferenceEndpointSpec) DeepCopyInto(out *InferenceEndpointSpec) {
*out = *in
in.Resources.DeepCopyInto(&out.Resources)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InferenceEndpointSpec.
func (in *InferenceEndpointSpec) DeepCopy() *InferenceEndpointSpec {
if in == nil {
return nil
}
out := new(InferenceEndpointSpec)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *InferenceEndpointStatus) DeepCopyInto(out *InferenceEndpointStatus) {
*out = *in
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InferenceEndpointStatus.
func (in *InferenceEndpointStatus) DeepCopy() *InferenceEndpointStatus {
if in == nil {
return nil
}
out := new(InferenceEndpointStatus)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ResourceSpec) DeepCopyInto(out *ResourceSpec) {
*out = *in
if in.Requests != nil {
in, out := &in.Requests, &out.Requests
*out = make(map[string]string, len(*in))
for key, val := range *in {
(*out)[key] = val
}
}
if in.Limits != nil {
in, out := &in.Limits, &out.Limits
*out = make(map[string]string, len(*in))
for key, val := range *in {
(*out)[key] = val
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ResourceSpec.
func (in *ResourceSpec) DeepCopy() *ResourceSpec {
if in == nil {
return nil
}
out := new(ResourceSpec)
in.DeepCopyInto(out)
return out
}
|
<reponame>sebamomann/dein-li-backend<gh_stars>0
export class User {
sub: string;
email_verified: boolean;
name: string;
preferred_username: string;
given_name: string;
family_name: string;
email: string;
}
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
__protobuf__ = proto.module(
package="google.cloud.resourcemanager.v3",
manifest={
"Folder",
"GetFolderRequest",
"ListFoldersRequest",
"ListFoldersResponse",
"SearchFoldersRequest",
"SearchFoldersResponse",
"CreateFolderRequest",
"CreateFolderMetadata",
"UpdateFolderRequest",
"UpdateFolderMetadata",
"MoveFolderRequest",
"MoveFolderMetadata",
"DeleteFolderRequest",
"DeleteFolderMetadata",
"UndeleteFolderRequest",
"UndeleteFolderMetadata",
},
)
class Folder(proto.Message):
r"""A folder in an organization's resource hierarchy, used to
organize that organization's resources.
Attributes:
name (str):
Output only. The resource name of the folder. Its format is
``folders/{folder_id}``, for example: "folders/1234".
parent (str):
Required. The folder's parent's resource name. Updates to
the folder's parent must be performed using
[MoveFolder][google.cloud.resourcemanager.v3.Folders.MoveFolder].
display_name (str):
The folder's display name. A folder's display name must be
unique amongst its siblings. For example, no two folders
with the same parent can share the same display name. The
display name must start and end with a letter or digit, may
contain letters, digits, spaces, hyphens and underscores and
can be no longer than 30 characters. This is captured by the
regular expression:
``[\p{L}\p{N}]([\p{L}\p{N}_- ]{0,28}[\p{L}\p{N}])?``.
state (google.cloud.resourcemanager_v3.types.Folder.State):
Output only. The lifecycle state of the folder. Updates to
the state must be performed using
[DeleteFolder][google.cloud.resourcemanager.v3.Folders.DeleteFolder]
and
[UndeleteFolder][google.cloud.resourcemanager.v3.Folders.UndeleteFolder].
create_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. Timestamp when the folder was
created.
update_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. Timestamp when the folder was
last modified.
delete_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. Timestamp when the folder was
requested to be deleted.
etag (str):
Output only. A checksum computed by the
server based on the current value of the folder
resource. This may be sent on update and delete
requests to ensure the client has an up-to-date
value before proceeding.
"""
class State(proto.Enum):
r"""Folder lifecycle states."""
STATE_UNSPECIFIED = 0
ACTIVE = 1
DELETE_REQUESTED = 2
name = proto.Field(proto.STRING, number=1,)
parent = proto.Field(proto.STRING, number=2,)
display_name = proto.Field(proto.STRING, number=3,)
state = proto.Field(proto.ENUM, number=4, enum=State,)
create_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,)
update_time = proto.Field(proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp,)
delete_time = proto.Field(proto.MESSAGE, number=7, message=timestamp_pb2.Timestamp,)
etag = proto.Field(proto.STRING, number=8,)
class GetFolderRequest(proto.Message):
r"""The GetFolder request message.
Attributes:
name (str):
Required. The resource name of the folder to retrieve. Must
be of the form ``folders/{folder_id}``.
"""
name = proto.Field(proto.STRING, number=1,)
class ListFoldersRequest(proto.Message):
r"""The ListFolders request message.
Attributes:
parent (str):
Required. The resource name of the organization or folder
whose folders are being listed. Must be of the form
``folders/{folder_id}`` or ``organizations/{org_id}``.
Access to this method is controlled by checking the
``resourcemanager.folders.list`` permission on the
``parent``.
page_size (int):
Optional. The maximum number of folders to
return in the response. If unspecified, server
picks an appropriate default.
page_token (str):
Optional. A pagination token returned from a previous call
to ``ListFolders`` that indicates where this listing should
continue from.
show_deleted (bool):
Optional. Controls whether folders in the
[DELETE_REQUESTED][google.cloud.resourcemanager.v3.Folder.State.DELETE_REQUESTED]
state should be returned. Defaults to false.
"""
parent = proto.Field(proto.STRING, number=1,)
page_size = proto.Field(proto.INT32, number=2,)
page_token = proto.Field(proto.STRING, number=3,)
show_deleted = proto.Field(proto.BOOL, number=4,)
class ListFoldersResponse(proto.Message):
r"""The ListFolders response message.
Attributes:
folders (Sequence[google.cloud.resourcemanager_v3.types.Folder]):
A possibly paginated list of folders that are
direct descendants of the specified parent
resource.
next_page_token (str):
A pagination token returned from a previous call to
``ListFolders`` that indicates from where listing should
continue.
"""
@property
def raw_page(self):
return self
folders = proto.RepeatedField(proto.MESSAGE, number=1, message="Folder",)
next_page_token = proto.Field(proto.STRING, number=2,)
class SearchFoldersRequest(proto.Message):
r"""The request message for searching folders.
Attributes:
page_size (int):
Optional. The maximum number of folders to
return in the response. If unspecified, server
picks an appropriate default.
page_token (str):
Optional. A pagination token returned from a previous call
to ``SearchFolders`` that indicates from where search should
continue.
query (str):
Optional. Search criteria used to select the folders to
return. If no search criteria is specified then all
accessible folders will be returned.
Query expressions can be used to restrict results based upon
displayName, state and parent, where the operators ``=``
(``:``) ``NOT``, ``AND`` and ``OR`` can be used along with
the suffix wildcard symbol ``*``.
The ``displayName`` field in a query expression should use
escaped quotes for values that include whitespace to prevent
unexpected behavior.
\| Field \| Description \|
\|-------------------------\|----------------------------------------\|
\| displayName \| Filters by displayName. \| \| parent \|
Filters by parent (for example: folders/123). \| \| state,
lifecycleState \| Filters by state. \|
Some example queries are:
- Query ``displayName=Test*`` returns Folder resources
whose display name starts with "Test".
- Query ``state=ACTIVE`` returns Folder resources with
``state`` set to ``ACTIVE``.
- Query ``parent=folders/123`` returns Folder resources
that have ``folders/123`` as a parent resource.
- Query ``parent=folders/123 AND state=ACTIVE`` returns
active Folder resources that have ``folders/123`` as a
parent resource.
- Query ``displayName=\\"Test String\\"`` returns Folder
resources with display names that include both "Test" and
"String".
"""
page_size = proto.Field(proto.INT32, number=1,)
page_token = proto.Field(proto.STRING, number=2,)
query = proto.Field(proto.STRING, number=3,)
class SearchFoldersResponse(proto.Message):
r"""The response message for searching folders.
Attributes:
folders (Sequence[google.cloud.resourcemanager_v3.types.Folder]):
A possibly paginated folder search results.
the specified parent resource.
next_page_token (str):
A pagination token returned from a previous call to
``SearchFolders`` that indicates from where searching should
continue.
"""
@property
def raw_page(self):
return self
folders = proto.RepeatedField(proto.MESSAGE, number=1, message="Folder",)
next_page_token = proto.Field(proto.STRING, number=2,)
class CreateFolderRequest(proto.Message):
r"""The CreateFolder request message.
Attributes:
folder (google.cloud.resourcemanager_v3.types.Folder):
Required. The folder being created, only the
display name and parent will be consulted. All
other fields will be ignored.
"""
folder = proto.Field(proto.MESSAGE, number=2, message="Folder",)
class CreateFolderMetadata(proto.Message):
r"""Metadata pertaining to the Folder creation process.
Attributes:
display_name (str):
The display name of the folder.
parent (str):
The resource name of the folder or
organization we are creating the folder under.
"""
display_name = proto.Field(proto.STRING, number=1,)
parent = proto.Field(proto.STRING, number=2,)
class UpdateFolderRequest(proto.Message):
r"""The request sent to the
[UpdateFolder][google.cloud.resourcemanager.v3.Folder.UpdateFolder]
method.
Only the ``display_name`` field can be changed. All other fields
will be ignored. Use the
[MoveFolder][google.cloud.resourcemanager.v3.Folders.MoveFolder]
method to change the ``parent`` field.
Attributes:
folder (google.cloud.resourcemanager_v3.types.Folder):
Required. The new definition of the Folder. It must include
the ``name`` field, which cannot be changed.
update_mask (google.protobuf.field_mask_pb2.FieldMask):
Required. Fields to be updated. Only the ``display_name``
can be updated.
"""
folder = proto.Field(proto.MESSAGE, number=1, message="Folder",)
update_mask = proto.Field(
proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask,
)
class UpdateFolderMetadata(proto.Message):
r"""A status object which is used as the ``metadata`` field for the
Operation returned by UpdateFolder.
"""
class MoveFolderRequest(proto.Message):
r"""The MoveFolder request message.
Attributes:
name (str):
Required. The resource name of the Folder to move. Must be
of the form folders/{folder_id}
destination_parent (str):
Required. The resource name of the folder or organization
which should be the folder's new parent. Must be of the form
``folders/{folder_id}`` or ``organizations/{org_id}``.
"""
name = proto.Field(proto.STRING, number=1,)
destination_parent = proto.Field(proto.STRING, number=2,)
class MoveFolderMetadata(proto.Message):
r"""Metadata pertaining to the folder move process.
Attributes:
display_name (str):
The display name of the folder.
source_parent (str):
The resource name of the folder's parent.
destination_parent (str):
The resource name of the folder or
organization to move the folder to.
"""
display_name = proto.Field(proto.STRING, number=1,)
source_parent = proto.Field(proto.STRING, number=2,)
destination_parent = proto.Field(proto.STRING, number=3,)
class DeleteFolderRequest(proto.Message):
r"""The DeleteFolder request message.
Attributes:
name (str):
Required. The resource name of the folder to be deleted.
Must be of the form ``folders/{folder_id}``.
"""
name = proto.Field(proto.STRING, number=1,)
class DeleteFolderMetadata(proto.Message):
r"""A status object which is used as the ``metadata`` field for the
``Operation`` returned by ``DeleteFolder``.
"""
class UndeleteFolderRequest(proto.Message):
r"""The UndeleteFolder request message.
Attributes:
name (str):
Required. The resource name of the folder to undelete. Must
be of the form ``folders/{folder_id}``.
"""
name = proto.Field(proto.STRING, number=1,)
class UndeleteFolderMetadata(proto.Message):
r"""A status object which is used as the ``metadata`` field for the
``Operation`` returned by ``UndeleteFolder``.
"""
__all__ = tuple(sorted(__protobuf__.manifest))
|
//#####################################################################
// Copyright 2004, <NAME>.
// This file is part of PhysBAM whose distribution is governed by the license
// contained in the accompanying file PHYSBAM_COPYRIGHT.txt.
//#####################################################################
#include <PhysBAM_Rendering/PhysBAM_OpenGL/OpenGL/OPENGL_COLOR_RAMP.h>
using namespace PhysBAM;
template<class T> OPENGL_COLOR_RAMP<T>::
OPENGL_COLOR_RAMP()
{
}
template<class T> OPENGL_COLOR_RAMP<T>::
~OPENGL_COLOR_RAMP()
{
}
template<class T> OPENGL_COLOR OPENGL_COLOR_RAMP<T>::
Lookup(T x) const
{
int left_index=0,right_index=0;
for(int i=1;i<=color_x.m;i++){
if(x>color_x(i))left_index=i;
else if(x<color_x(i)){right_index=i;break;}
else return equal_colors(i);}
if(left_index&&right_index){T alpha=(x-color_x(left_index))/(color_x(right_index)-color_x(left_index));
return T(alpha)*less_colors(right_index)+T(1.0-alpha)*greater_colors(left_index);}
else if(left_index)return greater_colors(left_index);
else if(right_index)return less_colors(right_index);
return OPENGL_COLOR(1,0,0);
}
template<class T> void OPENGL_COLOR_RAMP<T>::
Add_Color(T x,const OPENGL_COLOR& less_color,const OPENGL_COLOR& exact_color,const OPENGL_COLOR& greater_color)
{
assert(color_x.m==0||x>color_x(color_x.m));
color_x.Append(x);
less_colors.Append(less_color);
equal_colors.Append(exact_color);
greater_colors.Append(greater_color);
}
template<class T> OPENGL_COLOR_RAMP<T>* OPENGL_COLOR_RAMP<T>::
Matlab_Jet(T value_min,T value_max)
{
OPENGL_COLOR_RAMP<T> *jet=new OPENGL_COLOR_RAMP<T>;
T interval_width=value_max-value_min;
jet->Add_Color(interval_width*0+value_min,OPENGL_COLOR(0,0,0.5608f));
jet->Add_Color(interval_width*(T)0.1406+value_min,OPENGL_COLOR(0,0,1));
jet->Add_Color(interval_width*(T)0.3594+value_min,OPENGL_COLOR(0,1,1));
jet->Add_Color(interval_width*(T)0.6094+value_min,OPENGL_COLOR(1,1,0));
jet->Add_Color(interval_width*(T)0.8594+value_min,OPENGL_COLOR(1,0,0));
jet->Add_Color(interval_width*1+value_min,OPENGL_COLOR(0.5f,0,0));
return jet;
}
template<class T> OPENGL_COLOR_RAMP<T>* OPENGL_COLOR_RAMP<T>::
Matlab_Hot(T value_min,T value_max)
{
OPENGL_COLOR_RAMP<T> *hot=new OPENGL_COLOR_RAMP<T>;
T interval_width=value_max-value_min;
hot->Add_Color(interval_width*0+value_min,OPENGL_COLOR(0,0,0,0));
hot->Add_Color(interval_width*(T)0.3750+value_min,OPENGL_COLOR(1,0,0));
hot->Add_Color(interval_width*(T)0.7656+value_min,OPENGL_COLOR(1,1,0));
hot->Add_Color(interval_width*1+value_min,OPENGL_COLOR(1,1,1));
return hot;
}
template<class T> OPENGL_COLOR_RAMP<T>* OPENGL_COLOR_RAMP<T>::
Two_Color_Ramp(T value_min,T value_max,const OPENGL_COLOR& color_min,const OPENGL_COLOR& color_min_exact,const OPENGL_COLOR& color_max,const OPENGL_COLOR& color_max_exact)
{
OPENGL_COLOR_RAMP<T> *ramp=new OPENGL_COLOR_RAMP<T>;
ramp->Add_Color(value_min,color_min,color_min_exact);ramp->Add_Color(value_max,color_max,color_max_exact);
return ramp;
}
template<class T> OPENGL_COLOR_RAMP<T>* OPENGL_COLOR_RAMP<T>::
Two_Color_Ramp(T value_min,T value_max,const OPENGL_COLOR& color_min,const OPENGL_COLOR& color_max)
{
OPENGL_COLOR_RAMP<T> *ramp=new OPENGL_COLOR_RAMP<T>;
ramp->Add_Color(value_min,color_min,color_min);ramp->Add_Color(value_max,color_max,color_max);
return ramp;
}
template<class T> OPENGL_COLOR_RAMP<T>* OPENGL_COLOR_RAMP<T>::
Levelset_Color_Constant_Ramp(const OPENGL_COLOR& negative_color,const OPENGL_COLOR& positive_color)
{OPENGL_COLOR_RAMP<T> *ramp=new OPENGL_COLOR_RAMP<T>;
ramp->Add_Color(0,negative_color,negative_color,positive_color);
return ramp;
}
template<class T> OPENGL_COLOR_RAMP<T>* OPENGL_COLOR_RAMP<T>::
Levelset_Color_Linear_Ramp(const OPENGL_COLOR& negative_color,const OPENGL_COLOR& positive_color,T abs_value_max)
{
OPENGL_COLOR_RAMP<T> *ramp=new OPENGL_COLOR_RAMP<T>;
if(abs_value_max>0) ramp->Add_Color(-abs_value_max,OPENGL_COLOR::Gray(0,0));
ramp->Add_Color(0,negative_color,negative_color,positive_color);
if(abs_value_max>0) ramp->Add_Color(abs_value_max,OPENGL_COLOR::Gray(0,0));
return ramp;
}
template class OPENGL_COLOR_RAMP<float>;
#ifndef COMPILE_WITHOUT_DOUBLE_SUPPORT
template class OPENGL_COLOR_RAMP<double>;
#endif
|
#!/bin/bash
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
echo "ES6 named export:"
# export function foo() {
# ^
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root es6-1.js 3 17
echo "Local use of an ES6 named export:"
# foo();
# ^
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root es6-1.js 9 1
echo "ES6 named import:"
# import Bar, {foo, Foo, baz as localBaz, baz as otherBaz} from './es6-1';
# ^
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root es6-2.js 3 14
echo "Use of ES6 named import:"
# foo();
# ^
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root es6-2.js 6 1
echo "Use of ES6 named import through import *:"
# all.foo();
# ^
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root es6-2.js 22 5
echo "Local name of an aliased ES6 named import:"
# import Bar, {foo, Foo, baz as localBaz, baz as otherBaz} from './es6-1';
# ^
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root es6-2.js 3 34
echo "Remote name of an aliased ES6 named import:"
# import Bar, {foo, Foo, baz as localBaz, baz as otherBaz} from './es6-1';
# ^
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root es6-2.js 3 24
echo "Local use of an aliased ES6 named import:"
# localBaz;
# ^
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root es6-2.js 24 6
echo "Second local name of an aliased ES6 named import:"
# For some reason this is allowed
# import Bar, {foo, Foo, baz as localBaz, baz as otherBaz} from './es6-1';
# ^
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root es6-2.js 3 50
echo "Second remote name of an aliased ES6 named import:"
# For some reason this is allowed
# import Bar, {foo, Foo, baz as localBaz, baz as otherBaz} from './es6-1';
# ^
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root es6-2.js 3 42
echo "Local use of the second name of an aliased ES6 named import:"
# otherBaz;
# ^
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root es6-2.js 25 6
echo "Named ES6 export created as part of a single declaration with multiple names:"
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root es6-1.js 27 24
echo "ES6 default export:"
# export default class Bar {}
# ^
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root es6-1.js 7 11
echo "ES6 default export from the identifier:"
# export default class Bar {}
# ^
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root es6-1.js 7 22
echo "ES6 default export from a use in the file where it is defined:"
# new Bar();
# ^
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root es6-1.js 25 5
echo "ES6 default import:"
# import Bar, {foo, Foo, baz as localBaz, baz as otherBaz} from './es6-1';
# ^
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root es6-2.js 3 8
echo "Use of ES6 default import:"
# new Bar();
# ^
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root es6-2.js 19 6
echo "Use of ES6 default import imported through import *"
# new all.default();
# ^
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root es6-2.js 20 10
echo "ES6 default export of an async function:"
# export default async function foo() { foo(); }
# ^
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root exportDefaultAsync.js 3 32
echo "ES6 import of default async function:"
# import fooAsync from './exportDefaultAsync';
# ^
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root importDefault.js 3 10
echo "ES6 default export of an arbitrary expression:"
# export default (1, function foo() { foo(); });
# ^
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root exportDefaultExpr.js 5 9
echo "Function expression exported via export default:"
# export default (1, function foo() { foo(); });
# ^
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root exportDefaultExpr.js 5 30
echo "Function declaration is not shadowed by export default expression:"
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root exportDefaultExpr.js 9 2
echo
echo "ES6 import of default expression:"
# import foo from './exportDefaultExpr';
# ^
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root importDefault.js 4 10
echo "shadowing an export:"
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root es6-1.js 12 15
echo
echo "class method:"
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root es6-1.js 17 3
echo "class method, queried from a use instead of the definition:"
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root es6-2.js 17 4
echo "class method that overrides a superclass implementation:"
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root extendsFoo.js 7 3
echo "local exported as CJS property:"
# const a = 5;
# ^
# module.exports.foo = a;
#
# We expect this to return only references to the local. Users can find-refs on `foo` for global
# results.
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root cjs-1.js 2 7
echo "CJS property exporting local:"
# module.exports.foo = a;
# ^
#
# This should return downstream refrences to `foo`, plus related locals in the special case where
# the result of a `require` is immediately destructured.
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root cjs-1.js 3 16
echo "CJS property exporting literal:"
# module.exports.foo2 = 0;
# ^
#
# This should behave the same as the previous case since we no longer associate locals with their
# exports.
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root cjs-1.js 4 16
echo "local exported in CJS object:"
# const b = 4;
# ^
# module.exports = {bar: b, bar2: 42, bar3};
#
# This should return only the local uses of `b`.
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root cjs-2.js 2 7
echo "CJS object exporting local:"
# module.exports = {bar: b, bar2: 42, bar3};
# ^
#
# This should return downstream references to the exported value `bar` as well as their associated
# locals in some special cases.
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root cjs-2.js 4 19
echo "CJS object exporting literal:"
# Same as above
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root cjs-2.js 4 27
echo "CJS object exporting shorthand:"
# Same as above
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root cjs-2.js 4 37
echo "CJS ident exports/imports:"
# const baz = {c: 0};
# ^
# module.exports = baz;
#
# Should include references only to the local, since we no longer want to associate local variables
# with their exports.
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root cjs-3.js 2 7
echo "CJS default exports:"
# module.exports = baz;
# ^
# Should include downstream `require` calls which evaluate to this module, as well as the locals
# that they are bound to in some specific cases.
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root cjs-3.js 3 11
echo "CJS default imports:"
# const baz = require('./cjs-3');
# ^
# Should have the same results as above
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root cjs-4.js 4 13
echo "CJS default imports bound to a local:"
# const baz = require('./cjs-3');
# ^
# Should have the same results as above
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root cjs-4.js 4 7
echo "declare var:"
# declare var foo: number;
# ^
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root declare.js 3 14
echo "declare export var:"
# declare export var bar;
# ^
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root declare.js 6 21
echo "declare function:"
# declare function baz(): void;
# ^
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root declare.js 9 19
echo "declare class:"
# declare class Foo {};
# ^
assert_ok "$FLOW" find-refs --global --json --pretty --strip-root declare.js 12 16
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+0+512-pad/7-model --tokenizer_name model-configs/1024-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+0+512-pad/7-512+0+512-N-1 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_nouns_first_half_full --eval_function last_element_eval |
//>>excludeStart("jqmBuildExclude", pragmas.jqmBuildExclude);
//>>description: Utility methods for enabling and disabling user scaling (pinch zoom)
//>>label: Zoom Handling
//>>group: Utilities
define( [ "jquery", "./core" ], function( jQuery ) {
//>>excludeEnd("jqmBuildExclude");
(function( $ ) {
var meta = $( "meta[name=viewport]" ),
initialContent = meta.attr( "content" ),
disabledZoom = initialContent + ",maximum-scale=1, user-scalable=no",
enabledZoom = initialContent + ",maximum-scale=10, user-scalable=yes",
disabledInitially = /(user-scalable[\s]*=[\s]*no)|(maximum-scale[\s]*=[\s]*1)[$,\s]/.test( initialContent );
$.mobile.zoom = $.extend( {}, {
enabled: !disabledInitially,
locked: false,
disable: function( lock ) {
if ( !disabledInitially && !$.mobile.zoom.locked ) {
meta.attr( "content", disabledZoom );
$.mobile.zoom.enabled = false;
$.mobile.zoom.locked = lock || false;
}
},
enable: function( unlock ) {
if ( !disabledInitially && ( !$.mobile.zoom.locked || unlock === true ) ) {
meta.attr( "content", enabledZoom );
$.mobile.zoom.enabled = true;
$.mobile.zoom.locked = false;
}
},
restore: function() {
if ( !disabledInitially ) {
meta.attr( "content", initialContent );
$.mobile.zoom.enabled = true;
}
}
});
}( jQuery ));
//>>excludeStart("jqmBuildExclude", pragmas.jqmBuildExclude);
});
//>>excludeEnd("jqmBuildExclude");
|
<reponame>dynamicbalaji/hack-for-pink-2020<filename>src/app/make-a-wish/make-a-wish.component.spec.ts
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { MakeAWishComponent } from './make-a-wish.component';
describe('MakeAWishComponent', () => {
let component: MakeAWishComponent;
let fixture: ComponentFixture<MakeAWishComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ MakeAWishComponent ]
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(MakeAWishComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});
|
<gh_stars>1-10
/**
*
*/
package gui;
import java.util.logging.ConsoleHandler;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.logging.SimpleFormatter;
import javax.swing.*;
import logic.*;
/**
* @author krist
*Wall to be drawn to the viewers eyes
*/
public class GraphicWall extends AbstractGraphic {
/**
* UID
*/
private static final long serialVersionUID = 5452874926648909735L;
private static final Logger LOGGER = Logger.getLogger( GraphicWall.class.getName() );
private Wall wall;
/**
* Constructor
*/
public GraphicWall() {
super(IconCollection.getInstance().getWall());
//Logger
LOGGER.setLevel(Level.ALL);
ConsoleHandler handler = new ConsoleHandler();
handler.setFormatter(new SimpleFormatter());
LOGGER.addHandler(handler);
handler.setLevel(Level.ALL);
LOGGER.log(Level.FINE,"GraphicWall created");
}
/**
* Other constructor
*/
public GraphicWall(Wall owall) {
super(IconCollection.getInstance().getWall());
//Logger
LOGGER.setLevel(Level.ALL);
ConsoleHandler handler = new ConsoleHandler();
handler.setFormatter(new SimpleFormatter());
LOGGER.addHandler(handler);
handler.setLevel(Level.ALL);
setWall(owall);//Wall
}
/**
* Drawing the wall.
*/
@Override
public ImageIcon draw() {
return IconCollection.getInstance().getWall(); //It's a wall. What can I say?
}
/**
* @return the wall
*/
public Wall getWall() {
return wall;
}
/**
* @param wall the wall to set
*/
public void setWall(Wall wall) {
this.wall = wall;
}
}
|
<reponame>mili-confluent/redux-query
// @flow
import React from 'react';
type ReduxQueryContextValue = ?{
queriesSelector: (state: any, ...any) => any,
};
const ReduxQueryContext = React.createContext<ReduxQueryContextValue>(null);
export default ReduxQueryContext;
|
import numpy as np
matrix = np.random.rand(3, 3)
print(matrix) |
#!/bin/sh
alias uglify-js=$PWD/bin/uglifyjs
UGLIFY_OPTIONS=$@
minify_in_situ() {
ARGS="$UGLIFY_OPTIONS --validate --in-situ"
DIRS="$1"
echo '> uglify-js' $DIRS $UGLIFY_OPTIONS
for i in `find $DIRS -type f -name '*.js'`
do
ARGS="$ARGS $i"
done
uglify-js $ARGS
}
rm -rf tmp/butternut \
&& git clone https://github.com/Rich-Harris/butternut.git tmp/butternut \
&& cd tmp/butternut \
&& rm -rf .git/hooks \
&& patch -l -p1 <<EOF
--- a/package.json
+++ b/package.json
@@ -25 +24,0 @@
- "prepublish": "npm run test:min",
--- a/rollup.config.js
+++ b/rollup.config.js
@@ -1 +0,0 @@
-import buble from 'rollup-plugin-buble';
@@ -28,6 +26,0 @@ const config = {
- buble({
- include: ['src/**', 'node_modules/acorn/**'],
- transforms: {
- dangerousForOf: true
- }
- }),
EOF
ERR=$?; if [ "$ERR" != "0" ]; then echo "Error: $ERR"; exit $ERR; fi
minify_in_situ "src" \
&& rm -rf node_modules \
&& npm install \
&& rm -rf dist \
&& npm run build \
&& minify_in_situ "dist" \
&& node_modules/.bin/mocha test/test.js
|
const Odoo = require('../lib/index');
const odoo = new Odoo({
url: '<insert server URL>',
port: '<insert server port default 80>',
db: '<insert database name>',
username: '<insert username>',
password: '<insert password>'
});
odoo.connect(function (err) {
if (err) { return console.log(err); }
console.log('Connected to Odoo server.');
var inParams = [];
inParams.push([]);
inParams.push([]);
inParams.push([]);
inParams.push(['string', 'help', 'type']); //attributes
var params = [];
params.push(inParams);
odoo.execute_kw('res.partner', 'fields_get', params, function (err, value) {
if (err) { return console.log(err); }
console.log('Result: ', value);
});
});
async function listing_record_fields() {
try {
const uid = await odoo.promise_connect()
console.log(`Connected to odoo with UID ${uid}`)
let result = await odoo.promise_execute_kw('res.partner', 'fields_get', [], {attributes: ['string', 'help', 'type']})
console.log(`result: ${result}`)
} catch (e) {
console.error(e)
}
}
|
import React from 'react';
import { BrowserRouter as Router, Switch, Route } from 'react-router-dom';
import HelloWorld from 'app/pages/HelloWorld';
const Routes = () => (
<Router>
<Switch>
<Route path="/" exact component={HelloWorld} />
</Switch>
</Router>
);
export default Routes; |
function search(arr,val) {
for (var i = 0; i < arr.length; i++) {
if (arr[i] == val) {
return i;
}
}
return -1;
}
// call the search method
var result = search([2, 3, 4, 5], 4);
console.log(result); |
<reponame>weltam/idylfin
/**
* Copyright (C) 2011 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.maths.lowlevelapi.datatypes.primitive;
/**
* Essentially just a wrapper for the many sparse matrix format types
*/
public abstract class SparseMatrixType implements MatrixPrimitive {
/**
* Gets the maximum number of non zero entries that occurs across the (row/column) space depending
* on the most useful direction. The purpose of this variable is to allow the BLAS2/3 level routines
* to index using 16bit int if possible.
* @return Returns the maximum number of non zero entries on any (row/column) (takes all rows/columns into account).
*/
public abstract int getMaxNonZerosInSignificantDirection();
}
|
<filename>tests/test_gmail_service.py
#https://github.com/X/smithy-core/blob/master/smithy/core/tests/test_db.py
#https://github.com/X/calc/blob/master/calc/tests/test_notifications.py
#https://docs.python.org/2/library/unittest.html#unittest.TestCase.assertIsInstance
"""
Tests for `mail_service.gmail_service`.
"""
import unittest
from mail_service.gmail_service import GmailService
from mock import Mock
class CheckAccountV1TestCase(unittest.TestCase):
"""
Tests for `gmail_service.check_account_v1`.
"""
def setUp(self):
self.gmail = GmailService()
def test_check_account_v1(self):
"""
Tests the account check under 'normal' conditions.
"""
#self.assertIsInstance(credentials, SignedJwtAssertionCredentials)
#self.assertIsInstance(service, Resource)
#self.assertEqual(credentials.access_token, None)
#self.assertEqual(http, service._http)
|
#!/usr/bin/env bash
# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e
set -o pipefail
set -x
REPO="$1"
SCRIPT_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)"
if [ $REPO_OWNER = "aws" ]; then
ORIGIN_ORG="eks-distro-pr-bot"
UPSTREAM_ORG="aws"
else
ORIGIN_ORG=$REPO_OWNER
UPSTREAM_ORG=$REPO_OWNER
fi
PR_BRANCH="image-tag-update"
if [ "$JOB_TYPE" = "presubmit" ]; then
PR_BRANCH="image-update-branch"
fi
cd ${SCRIPT_ROOT}/../../../${ORIGIN_ORG}/${REPO}
if [ $(git branch --show-current) != $PR_BRANCH ]; then
git config --global push.default current
git config user.name "EKS Distro PR Bot"
git config user.email "aws-model-rocket-bots+eksdistroprbot@amazon.com"
git remote add origin git@github.com:${ORIGIN_ORG}/${REPO}.git
git remote add upstream https://github.com/${UPSTREAM_ORG}/${REPO}.git
if [ "$REPO" = "eks-distro-build-tooling" ] && [ "$JOB_TYPE" = "presubmit" ]; then
git fetch upstream pull/$PULL_NUMBER/head:image-update-branch
git checkout $PR_BRANCH
else
git fetch upstream
git checkout upstream/main -b $PR_BRANCH
fi
fi |
package com.java.study.algorithm.zuo.dadvanced.advanced_class_06;
/**
* 回文最少分割数
* 【题目】 给定一个字符串str,返回把str全部切成回文子串的最小分割数。 【举例】
* str="ABA"。
* 不需要切割,str本身就是回文串,所以返回0。
* str="ACDCDCDAD"。 最少需要切2次变成3个回文子串,比如"A"、"CDCDC"和"DAD",所以返回2。
*/
public class Code_05_PalindromeMinCut{
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.