text
stringlengths 1
1.05M
|
|---|
SELECT city,
MAX(age) AS oldest_person_age
FROM people
GROUP BY city;
|
#!/bin/bash
echo "Installing vim..."
echo
if is_osx; then
brew install \
neovim \
# Override system vim with macvim
brew install macvim
else
echo "Can't automatically install vim and neovim for the current OS."
fi
# Set up backups directories
mkdir -p $HOME/.vim/.temp/
mkdir -p $HOME/.vim/.undo/
# Install neovim language providers
echo "Installing neovim providers..."
echo
npm install -g neovim
pip2 install --user neovim
pip3 install --user neovim
# gem install neovim
echo "Installing dein..."
echo
curl https://raw.githubusercontent.com/Shougo/dein.vim/master/bin/installer.sh > installer.sh
sh ./installer.sh ~/.vim/dein
rm installer.sh
|
<reponame>ch1huizong/learning
#!/usr/bin/env python
# encoding: utf-8
#
# Copyright (c) 2008 <NAME> All rights reserved.
#
"""
"""
__version__ = "$Id$"
#end_pymotw_header
import heapq
from heapq_showtree import show_tree
from heapq_heapdata import data
heap = []
print 'random :', data
print
for n in data:
print 'add %3d:' % n
heapq.heappush(heap, n)
show_tree(heap)
|
"""trello URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.urls import path
from projects.views import ProjectList, ProjectDetail, ProjectMemberList, ProjectMemberDetail, SendProjectInvite, AcceptProjectInvite
urlpatterns = [
path('', ProjectList.as_view()),
path('<int:pk>/', ProjectDetail.as_view()),
path('<int:pk>/members/', ProjectMemberList.as_view()),
path('members/<int:pk>/', ProjectMemberDetail.as_view()),
path('<int:pk>/invite/', SendProjectInvite.as_view()),
path('join/<str:token>/', AcceptProjectInvite.as_view())
]
|
<reponame>Catsuko/Westward
class OccupiedSpace:
def __init__(self, occupant):
self.occupant = occupant
def enter(self, actor, origin, tile, root):
return root if actor == self.occupant else self.__interaction(actor, origin, tile, root)
def leave(self, actor, tile, root):
from .open_space import OpenSpace
return root.with_area(tile.with_space(OpenSpace())) if actor == self.occupant else root
def update(self, tile, root):
return root.update_actor(self.occupant, self.__update_occupant)
def update_actor(self, actor, update_delegate, tile, root):
return update_delegate(self.occupant, tile, root) if self.occupant == actor else root
def attempt(self, action, root, *args):
return self.occupant.attempt(action, root, *args)
def print_to(self, x, y, media):
return self.occupant.print_to(x, y, media)
def __update_occupant(self, actor, tile, root):
return actor.act(tile, root)
def __replace(self, occupant, tile, root):
return root.with_area(tile.with_space(OccupiedSpace(occupant)))
def __interaction(self, other, origin, tile, root):
return other.interact_with(self.occupant, origin, tile, self.occupant.receive(other, origin, tile, root))
|
#!/bin/bash
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
cd $SCRIPT_DIR || exit 1
mkdir -p logs
echo "# Downloading 10 S. aureus isolate genomes"
rm -rf input_isolates
mkdir -p input_isolates
for i in $(cut -f4 135_saureus_isolates.tsv | sed 1d | head -n 11); do
o=`basename $i | cut -f1 -d'.'`
wget $i -O input_isolates/${o}.fna.gz > /dev/null 2>&1
done;
echo "# Generating S. aureus database based on UniRef90"
phylophlan_setup_database \
-g s__Staphylococcus_aureus \
--max_proteins 100 \
--overwrite \
> /dev/null || exit 1
echo "# Writing default config"
CONFIG_DIR='configs'
phylophlan_write_default_configs.sh $CONFIG_DIR
echo "# Writing isolates config file"
phylophlan_write_config_file -o isolates_config.cfg \
--overwrite \
-d a \
--force_nucleotides \
--db_aa diamond \
--map_aa diamond \
--map_dna diamond \
--msa mafft \
--trim trimal \
--tree1 fasttree \
--tree2 raxml \
> /dev/null || exit 1
echo "# Building the phylogeny of the 10 S. aureus strains"
phylophlan \
-i input_isolates \
-o output_isolates \
-d s__Staphylococcus_aureus \
--trim greedy \
--not_variant_threshold 0.99 \
--remove_fragmentary_entries \
--fragmentary_threshold 0.67 \
--min_num_proteins 1 \
--min_num_entries 4 \
--min_num_markers 1 \
-t a \
--configs_folder $CONFIG_DIR \
-f isolates_config.cfg \
--diversity low \
--force_nucleotides \
--nproc 2 \
> /dev/null || exit 1
echo "# Adding 5 S. aureus reference genomes"
phylophlan_get_reference \
-g s__Staphylococcus_aureus \
-o input_references \
-n 5 \
> /dev/null || exit 1
cp -a input_isolates/* input_references/ || exit 1
echo "# Writing references config file"
phylophlan_write_config_file -o references_config.cfg \
--overwrite \
-d a \
--force_nucleotides \
--db_aa diamond \
--map_aa diamond \
--map_dna diamond \
--msa mafft \
--trim trimal \
--tree1 fasttree \
--tree2 raxml \
> /dev/null || exit 1
# echo "# Building the phylogeny of the 15 S. aureus genomes"
# phylophlan.py \
# -i input_references \
# -o output_references \
# -d s__Staphylococcus_aureus \
# -t a \
# --configs_folder $CONFIG_DIR \
# -f references_config.cfg \
# --nproc 4 \
# --subsample twentyfivepercent \
# --diversity low \
# --fast \
# > /dev/null || exit 1
# # Visualize the phylogenetic tree with GraPhlAn
# # GraPhlAn is Python2-based and have different requirements than PhyloPhlAn
# echo "GraPhlAn annotate"
# graphlan_annotate.py \
# --annot graphlan/isolates_annotation.txt \
# output_isolates/RAxML_bestTree.input_isolates_refined.tre \
# graphlan/isolates_annotated.xml
# echo "GraPhlAn draw"
# graphlan.py \
# graphlan/isolates_annotated.xml \
# graphlan/saureus_isolates.png \
# --dpi 300
|
#!/bin/bash
usage() {
echo "A simple 'build server' using shell script."
echo "Run it passing as parameter the Github username and repo to build."
echo "The repo must contain a Dockerfile for the build to be successfull."
echo ""
echo "Sample usage:"
echo ""
echo -e "\t$(basename $0) robsondepaula/shell-guessing-game"
echo ""
exit
}
if [ -z "$1" ] ; then
usage
fi
git clone "https://github.com/${1}.git" git/
cd git
docker build -t ${1} .
docker push ${1}
rm -rf git/
|
#!/bin/sh
CWD="$(pwd)"
MY_SCRIPT_PATH=`dirname "${BASH_SOURCE[0]}"`
cd "${MY_SCRIPT_PATH}"
rm -drf docs
jazzy --github_url https://github.com/bmlt-enabled/Quick-NA-Meeting-Finder\
--readme ./README.md\
--theme fullwidth\
--author BMLT-Enabled\
--author_url https://bmlt.app\
--min-acl private\
--exclude */Carthage
cp icon.png docs/icon.png
cd "${CWD}"
|
#!/bin/bash
#Companion code for the blog https://cloudywindows.com
#call this code direction from the web with:
#bash <(wget -O - https://raw.githubusercontent.com/PowerShell/PowerShell/master/tools/installpsh-debian.sh) ARGUMENTS
#bash <(curl -s https://raw.githubusercontent.com/PowerShell/PowerShell/master/tools/installpsh-debian.sh) <ARGUMENTS>
#Usage - if you do not have the ability to run scripts directly from the web,
# pull all files in this repo folder and execute, this script
# automatically prefers local copies of sub-scripts
#Completely automated install requires a root account or sudo with a password requirement
#Switches
# -includeide - installs VSCode and VSCode PowerShell extension (only relevant to machines with desktop environment)
# -interactivetesting - do a quick launch test of VSCode (only relevant when used with -includeide)
# -skip-sudo-check - use sudo without verifying its availability (hard to accurately do on some distros)
# -preview - installs the latest preview release of PowerShell side-by-side with any existing production releases
#gitrepo paths are overrideable to run from your own fork or branch for testing or private distribution
VERSION="1.2.0"
gitreposubpath="PowerShell/PowerShell/master"
gitreposcriptroot="https://raw.githubusercontent.com/$gitreposubpath/tools"
thisinstallerdistro=debian
repobased=true
gitscriptname="installpsh-debian.psh"
powershellpackageid=powershell
echo ;
echo "*** PowerShell Development Environment Installer $VERSION for $thisinstallerdistro"
echo "*** Original script is at: $gitreposcriptroot/$gitscriptname"
echo
echo "*** Arguments used: $*"
# Let's quit on interrupt of subcommands
trap '
trap - INT # restore default INT handler
echo "Interrupted"
kill -s INT "$$"
' INT
#Verify The Installer Choice (for direct runs of this script)
lowercase(){
echo "$1" | tr "[:upper:]" "[:lower:]"
}
OS=$(lowercase "$(uname)")
if [ "${OS}" == "windowsnt" ]; then
OS=windows
DistroBasedOn=windows
elif [ "${OS}" == "darwin" ]; then
OS=osx
DistroBasedOn=osx
else
OS=$(uname)
if [ "${OS}" == "SunOS" ] ; then
OS=solaris
DistroBasedOn=sunos
elif [ "${OS}" == "AIX" ] ; then
DistroBasedOn=aix
elif [ "${OS}" == "Linux" ] ; then
if [ -f /etc/redhat-release ] ; then
DistroBasedOn='redhat'
elif [ -f /etc/system-release ] ; then
DIST=$(sed s/\ release.*// < /etc/system-release)
if [[ $DIST == *"Amazon Linux"* ]] ; then
DistroBasedOn='amazonlinux'
else
DistroBasedOn='redhat'
fi
elif [ -f /etc/SuSE-release ] ; then
DistroBasedOn='suse'
elif [ -f /etc/mandrake-release ] ; then
DistroBasedOn='mandrake'
elif [ -f /etc/debian_version ] ; then
DistroBasedOn='debian'
fi
if [ -f /etc/UnitedLinux-release ] ; then
DIST="${DIST}[$( (tr "\n" ' ' | sed s/VERSION.*//) < /etc/UnitedLinux-release )]"
DistroBasedOn=unitedlinux
fi
OS=$(lowercase "$OS")
DistroBasedOn=$(lowercase "$DistroBasedOn")
fi
fi
if [ "$DistroBasedOn" != "$thisinstallerdistro" ]; then
echo "*** This installer is only for $thisinstallerdistro and you are running $DistroBasedOn, please run \"$gitreposcriptroot\install-powershell.sh\" to see if your distro is supported AND to auto-select the appropriate installer if it is."
exit 1
fi
## Check requirements and prerequisites
#Check for sudo if not root
if [[ "${CI}" == "true" ]]; then
echo "Running on CI (as determined by env var CI set to true), skipping SUDO check."
set -- "$@" '-skip-sudo-check'
fi
SUDO=''
if (( EUID != 0 )); then
#Check that sudo is available
if [[ ("'$*'" =~ skip-sudo-check) || ("$(whereis sudo)" == *'/'* && "$(sudo -nv 2>&1)" != 'Sorry, user'*) ]]; then
SUDO='sudo'
else
echo "ERROR: You must either be root or be able to use sudo" >&2
#exit 5
fi
fi
#Collect any variation details if required for this distro
# shellcheck disable=SC1091
if [[ -f /etc/lsb-release ]]; then
. /etc/lsb-release
DISTRIB_ID=$(lowercase "$DISTRIB_ID")
elif [[ -f /etc/debian_version ]]; then
DISTRIB_ID="debian"
DISTRIB_RELEASE=$(cat /etc/debian_version)
fi
#END Collect any variation details if required for this distro
#If there are known incompatible versions of this distro, put the test, message and script exit here:
#END Verify The Installer Choice
##END Check requirements and prerequisites
echo
echo "*** Installing PowerShell for $DistroBasedOn..."
if ! hash curl 2>/dev/null; then
echo "curl not found, installing..."
$SUDO apt-get install -y curl
fi
# The executable to test.
PWSH=pwsh
if [[ "'$*'" =~ preview ]] ; then
echo
echo "-preview was used, the latest preview release will be installed (side-by-side with your production release)"
powershellpackageid=powershell-preview
PWSH=pwsh-preview
fi
currentversion=$(curl https://api.github.com/repos/powershell/powershell/releases/latest | sed '/tag_name/!d' | sed s/\"tag_name\"://g | sed s/\"//g | sed s/v// | sed s/,//g | sed s/\ //g)
echo "*** Current version on git is: $currentversion, repo version may differ slightly..."
echo "*** Setting up PowerShell repo..."
# Import the public repository GPG keys
curl https://packages.microsoft.com/keys/microsoft.asc | $SUDO apt-key add -
#Add the Repo
if [[ "${DISTRIB_ID}" = "linuxmint" ]]; then
echo "Attempting to remap linuxmint to an appropriate ubuntu version" >&2
LINUXMINT_VERSION=${DISTRIB_RELEASE}
#https://en.wikipedia.org/wiki/Linux_Mint_version_history
case ${LINUXMINT_VERSION} in
19*)
DISTRIB_RELEASE=18.04
;;
18*)
DISTRIB_RELEASE=16.04
;;
17*)
DISTRIB_RELEASE=14.04
;;
*)
echo "ERROR: unsupported linuxmint version (${LINUXMINT_VERSION})." >&2
echo "Supported versions: 19" >&2
echo "For additional versions open an issue or pull request at: https://github.com/powershell/powershell" >&2
exit 1
;;
esac
echo "Remapping linuxmint version ${LINUXMINT_VERSION} to ubuntu version ${DISTRIB_RELEASE}" >&2
fi
case $DISTRIB_ID in
ubuntu|linuxmint)
case $DISTRIB_RELEASE in
18.04|16.10|16.04|15.10|14.04)
curl https://packages.microsoft.com/config/ubuntu/$DISTRIB_RELEASE/prod.list | $SUDO tee /etc/apt/sources.list.d/microsoft.list
;;
*)
echo "ERROR: unsupported Ubuntu version ($DISTRIB_RELEASE)." >&2
echo "Supported versions: 14.04, 15.10, 16.04, 16.10, 18.04." >&2
echo "For additional versions open an issue or pull request at: https://github.com/powershell/powershell" >&2
exit 1
;;
esac
;;
debian)
DISTRIB_RELEASE=${DISTRIB_RELEASE%%.*}
case $DISTRIB_RELEASE in
8|9|10|11)
curl https://packages.microsoft.com/config/debian/$DISTRIB_RELEASE/prod.list | $SUDO tee /etc/apt/sources.list.d/microsoft.list
;;
*)
echo "ERROR: unsupported Debian version ($DISTRIB_RELEASE)." >&2
echo "Supported versions: 8, 9." >&2
echo "For additional versions open an issue or pull request at: https://github.com/powershell/powershell" >&2
exit 1
;;
esac
;;
*)
echo "ERROR: unsupported Debian-based distribution ($DISTRIB_ID)." >&2
echo "Supported distributions: Debian, Ubuntu." >&2
exit 1
;;
esac
# Update apt-get
$SUDO apt-get update
# Install PowerShell
$SUDO apt-get install -y ${powershellpackageid}
# shellcheck disable=SC2016
$PWSH -noprofile -c '"Congratulations! PowerShell is installed at $PSHOME.
Run `"'"$PWSH"'`" to start a PowerShell session."'
success=$?
if [[ "$success" != 0 ]]; then
echo "ERROR: PowerShell failed to install!" >&2
exit "$success"
fi
if [[ "'$*'" =~ includeide ]] ; then
echo
echo "*** Installing VS Code PowerShell IDE..."
echo "*** Setting up VS Code repo..."
curl https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > microsoft.gpg
$SUDO mv microsoft.gpg /etc/apt/trusted.gpg.d/microsoft.gpg
$SUDO sh -c 'echo "deb [arch=amd64] http://packages.microsoft.com/repos/vscode stable main" > /etc/apt/sources.list.d/vscode.list'
$SUDO apt-get update
$SUDO apt-get install -y code
echo
echo "*** Installing VS Code PowerShell Extension"
code --install-extension ms-vscode.PowerShell
if [[ "'$*'" =~ -interactivetesting ]] ; then
echo "*** Loading test code in VS Code"
curl -O ./testpowershell.ps1 https://raw.githubusercontent.com/DarwinJS/CloudyWindowsAutomationCode/master/pshcoredevenv/testpowershell.ps1
code ./testpowershell.ps1
fi
fi
if [[ "$repobased" == true ]] ; then
echo
echo "*** NOTE: Run your regular package manager update cycle to update PowerShell"
fi
echo "*** Install Complete"
|
<reponame>prinsmike/go-start
// Internationalization support - not ready yet.
package i18n
// This can be used as independent library
var iso3166_1_alpha2 map[string]string
func EnglishCountryName(code string) string {
name, ok := Countries()[code]
if !ok {
return code
}
return name
}
// Countries returns a map of ISO 3166-1 alpha-2 country codes
// to the corresponding english country name
func Countries() map[string]string {
if iso3166_1_alpha2 == nil {
iso3166_1_alpha2 = map[string]string{
"AD": "Andorra",
"AE": "United Arab Emirates",
"AF": "Afghanistan",
"AG": "Antigua and Barbuda",
"AI": "Anguilla",
"AL": "Albania",
"AM": "Armenia",
"AO": "Angola",
"AQ": "Antarctica",
"AR": "Argentina",
"AS": "American Samoa",
"AT": "Austria",
"AU": "Australia",
"AW": "Aruba",
"AX": "Åland Islands",
"AZ": "Azerbaijan",
"BA": "Bosnia and Herzegovina",
"BB": "Barbados",
"BD": "Bangladesh",
"BE": "Belgium",
"BF": "Burkina Faso",
"BG": "Bulgaria",
"BH": "Bahrain",
"BI": "Burundi",
"BJ": "Benin",
"BL": "Saint Barthélemy",
"BM": "Bermuda",
"BN": "Brunei Darussalam",
"BO": "Bolivia",
"BQ": "Bonaire, Sint Eustatius and Saba",
"BR": "Brazil",
"BS": "Bahamas",
"BT": "Bhutan",
"BV": "Bouvet Island",
"BW": "Botswana",
"BY": "Belarus",
"BZ": "Belize",
"CA": "Canada",
"CC": "Cocos (Keeling) Islands",
"CD": "Congo, the Democratic Republic of the",
"CF": "Central African Republic",
"CG": "Congo",
"CH": "Switzerland",
"CI": "Côte d'Ivoire",
"CK": "Cook Islands",
"CL": "Chile",
"CM": "Cameroon",
"CN": "China",
"CO": "Colombia",
"CR": "Costa Rica",
"CU": "Cuba",
"CV": "Cape Verde",
"CW": "Curaçao",
"CX": "Christmas Island",
"CY": "Cyprus",
"CZ": "Czech Republic",
"DE": "Germany",
"DJ": "Djibouti",
"DK": "Denmark",
"DM": "Dominica",
"DO": "Dominican Republic",
/*
"DZ": "Algeria 1974 .dz ISO 3166-2:DZ Code taken from name in Kabyle: Dzayer
"EC": "Ecuador 1974 .ec ISO 3166-2:EC
"EE": "Estonia 1992 .ee ISO 3166-2:EE Code taken from name in Estonian: Eesti
"EG": "Egypt 1974 .eg ISO 3166-2:EG
"EH": "Western Sahara 1974 .eh ISO 3166-2:EH Previous ISO country name: Spanish Sahara (code taken from name in Spanish: Sahara español)
"ER": "Eritrea 1993 .er ISO 3166-2:ER
"ES": "Spain 1974 .es ISO 3166-2:ES Code taken from name in Spanish: España
"ET": "Ethiopia 1974 .et ISO 3166-2:ET
"FI": "Finland 1974 .fi ISO 3166-2:FI
"FJ": "Fiji 1974 .fj ISO 3166-2:FJ
"FK": "Falkland Islands (Malvinas) 1974 .fk ISO 3166-2:FK
"FM": "Micronesia, Federated States of 1986 .fm ISO 3166-2:FM Previous ISO country name: Micronesia
"FO": "Faroe Islands 1974 .fo ISO 3166-2:FO
"FR": "France 1974 .fr ISO 3166-2:FR Includes Clipperton Island
"GA": "Gabon 1974 .ga ISO 3166-2:GA
"GB": "United Kingdom 1974 .gb
"GD": "Grenada 1974 .gd ISO 3166-2:GD
"GE": "Georgia 1992 .ge ISO 3166-2:GE GE previously represented Gilbert and Ellice Islands
"GF": "French Guiana 1974 .gf ISO 3166-2:GF Code taken from name in French: Guyane française
"GG": "Guernsey 2006 .gg ISO 3166-2:GG
"GH": "Ghana 1974 .gh ISO 3166-2:GH
"GI": "Gibraltar 1974 .gi ISO 3166-2:GI
"GL": "Greenland 1974 .gl ISO 3166-2:GL
"GM": "Gambia 1974 .gm ISO 3166-2:GM
"GN": "Guinea 1974 .gn ISO 3166-2:GN
"GP": "Guadeloupe 1974 .gp ISO 3166-2:GP
"GQ": "Equatorial Guinea 1974 .gq ISO 3166-2:GQ Code taken from name in French: Guinée équatoriale
"GR": "Greece 1974 .gr ISO 3166-2:GR
"GS": "South Georgia and the South Sandwich Islands 1993 .gs ISO 3166-2:GS
"GT": "Guatemala 1974 .gt ISO 3166-2:GT
"GU": "Guam 1974 .gu ISO 3166-2:GU
"GW": "Guinea-Bissau 1974 .gw ISO 3166-2:GW
"GY": "Guyana 1974 .gy ISO 3166-2:GY
"HK": "Hong Kong 1974 .hk ISO 3166-2:HK
"HM": "Heard Island and McDonald Islands 1974 .hm ISO 3166-2:HM
"HN": "Honduras 1974 .hn ISO 3166-2:HN
"HR": "Croatia 1992 .hr ISO 3166-2:HR Code taken from name in Croatian: Hrvatska
"HT": "Haiti 1974 .ht ISO 3166-2:HT
"HU": "Hungary 1974 .hu ISO 3166-2:HU
"ID": "Indonesia 1974 .id ISO 3166-2:ID
"IE": "Ireland 1974 .ie ISO 3166-2:IE
"IL": "Israel 1974 .il ISO 3166-2:IL
"IM": "Isle of Man 2006 .im ISO 3166-2:IM
"IN": "India 1974 .in ISO 3166-2:IN
"IO": "British Indian Ocean Territory 1974 .io ISO 3166-2:IO
"IQ": "Iraq 1974 .iq ISO 3166-2:IQ
"IR": "Iran, Islamic Republic of 1974 .ir ISO 3166-2:IR ISO country name follows UN designation (common name: Iran)
"IS": "Iceland 1974 .is ISO 3166-2:IS Code taken from name in Icelandic: Ísland
"IT": "Italy 1974 .it ISO 3166-2:IT
"JE": "Jersey 2006 .je ISO 3166-2:JE
"JM": "Jamaica 1974 .jm ISO 3166-2:JM
"JO": "Jordan 1974 .jo ISO 3166-2:JO
"JP": "Japan 1974 .jp ISO 3166-2:JP
"KE": "Kenya 1974 .ke ISO 3166-2:KE
"KG": "Kyrgyzstan 1992 .kg ISO 3166-2:KG
"KH": "Cambodia 1974 .kh ISO 3166-2:KH Code taken from former name: Khmer Republic
"KI": "Kiribati 1979 .ki ISO 3166-2:KI
"KM": "Comoros 1974 .km ISO 3166-2:KM Code taken from name in Comorian: Komori
"KN": "Saint Kitts and Nevis 1974 .kn ISO 3166-2:KN Previous ISO country name: Saint Kitts-Nevis-Anguilla
"KP": "Korea, Democratic People's Republic of 1974 .kp ISO 3166-2:KP ISO country name follows UN designation (common name: North Korea)
"KR": "Korea, Republic of 1974 .kr ISO 3166-2:KR ISO country name follows UN designation (common name: South Korea)
"KW": "Kuwait 1974 .kw ISO 3166-2:KW
"KY": "Cayman Islands 1974 .ky ISO 3166-2:KY
"KZ": "Kazakhstan 1992 .kz ISO 3166-2:KZ Previous ISO country name: Kazakstan
"LA": "Lao People's Democratic Republic 1974 .la ISO 3166-2:LA ISO country name follows UN designation (common name: Laos)
"LB": "Lebanon 1974 .lb ISO 3166-2:LB
"LC": "Saint Lucia 1974 .lc ISO 3166-2:LC
"LI": "Liechtenstein 1974 .li ISO 3166-2:LI
"LK": "Sri Lanka 1974 .lk ISO 3166-2:LK
"LR": "Liberia 1974 .lr ISO 3166-2:LR
"LS": "Lesotho 1974 .ls ISO 3166-2:LS
"LT": "Lithuania 1992 .lt ISO 3166-2:LT
"LU": "Luxembourg 1974 .lu ISO 3166-2:LU
"LV": "Latvia 1992 .lv ISO 3166-2:LV
"LY": "Libyan Arab Jamahiriya 1974 .ly ISO 3166-2:LY ISO country name follows UN designation (common name: Libya)
"MA": "Morocco 1974 .ma ISO 3166-2:MA Code taken from name in French: Maroc
"MC": "Monaco 1974 .mc ISO 3166-2:MC
"MD": "Moldova, Republic of 1992 .md ISO 3166-2:MD ISO country name follows UN designation (common name and previous ISO country name: Moldova)
"ME": "Montenegro 2006 .me ISO 3166-2:ME
"MF": "Saint Martin (French part) 2007 .mf ISO 3166-2:MF The Dutch part of Saint Martin island is assigned code SX
"MG": "Madagascar 1974 .mg ISO 3166-2:MG
"MH": "Marshall Islands 1986 .mh ISO 3166-2:MH
"MK": "Macedonia, the former Yugoslav Republic of 1993 .mk ISO 3166-2:MK ISO country name follows UN designation (due to Macedonia naming dispute; official name used by country itself: Republic of Macedonia)
"ML": "Mali 1974 .ml ISO 3166-2:ML
"MM": "Myanmar 1989 .mm ISO 3166-2:MM Name changed from Burma (BU)
"MN": "Mongolia 1974 .mn ISO 3166-2:MN
"MO": "Macao 1974 .mo ISO 3166-2:MO Previous ISO country name: Macau
"MP": "Northern Mariana Islands 1986 .mp ISO 3166-2:MP
"MQ": "Martinique 1974 .mq ISO 3166-2:MQ
"MR": "Mauritania 1974 .mr ISO 3166-2:MR
"MS": "Montserrat 1974 .ms ISO 3166-2:MS
"MT": "Malta 1974 .mt ISO 3166-2:MT
"MU": "Mauritius 1974 .mu ISO 3166-2:MU
"MV": "Maldives 1974 .mv ISO 3166-2:MV
"MW": "Malawi 1974 .mw ISO 3166-2:MW
"MX": "Mexico 1974 .mx ISO 3166-2:MX
"MY": "Malaysia 1974 .my ISO 3166-2:MY
"MZ": "Mozambique 1974 .mz ISO 3166-2:MZ
"NA": "Namibia 1974 .na ISO 3166-2:NA
"NC": "New Caledonia 1974 .nc ISO 3166-2:NC
"NE": "Niger 1974 .ne ISO 3166-2:NE
"NF": "Norfolk Island 1974 .nf ISO 3166-2:NF
"NG": "Nigeria 1974 .ng ISO 3166-2:NG
"NI": "Nicaragua 1974 .ni ISO 3166-2:NI
"NL": "Netherlands 1974 .nl ISO 3166-2:NL
"NO": "Norway 1974 .no ISO 3166-2:NO
"NP": "Nepal 1974 .np ISO 3166-2:NP
"NR": "Nauru 1974 .nr ISO 3166-2:NR
"NU": "Niue 1974 .nu ISO 3166-2:NU
"NZ": "New Zealand 1974 .nz ISO 3166-2:NZ
"OM": "Oman 1974 .om ISO 3166-2:OM
"PA": "Panama 1974 .pa ISO 3166-2:PA
"PE": "Peru 1974 .pe ISO 3166-2:PE
"PF": "French Polynesia 1974 .pf ISO 3166-2:PF Code taken from name in French: Polynésie française
"PG": "Papua New Guinea 1974 .pg ISO 3166-2:PG
"PH": "Philippines 1974 .ph ISO 3166-2:PH
"PK": "Pakistan 1974 .pk ISO 3166-2:PK
"PL": "Poland 1974 .pl ISO 3166-2:PL
"PM": "S<NAME> 1974 .pm ISO 3166-2:PM
"PN": "Pitcairn 1974 .pn ISO 3166-2:PN
"PR": "Puerto Rico 1974 .pr ISO 3166-2:PR
"PS": "Palestinian Territory, Occupied 1999 .ps ISO 3166-2:PS Consists of the West Bank and the Gaza Strip
"PT": "Portugal 1974 .pt ISO 3166-2:PT
"PW": "Palau 1986 .pw ISO 3166-2:PW
"PY": "Paraguay 1974 .py ISO 3166-2:PY
"QA": "Qatar 1974 .qa ISO 3166-2:QA
"RE": "Réunion 1974 .re ISO 3166-2:RE
"RO": "Romania 1974 .ro ISO 3166-2:RO
"RS": "Serbia 2006 .rs ISO 3166-2:RS Code taken from official name: Republic of Serbia (see Serbian country codes)
"RU": "Russian Federation 1992 .ru ISO 3166-2:RU ISO country name follows UN designation (common name: Russia)
"RW": "Rwanda 1974 .rw ISO 3166-2:RW
"SA": "Saudi Arabia 1974 .sa ISO 3166-2:SA
"SB": "Solomon Islands 1974 .sb ISO 3166-2:SB Code taken from former name: British Solomon Islands
"SC": "Seychelles 1974 .sc ISO 3166-2:SC
"SD": "Sudan 1974 .sd ISO 3166-2:SD
"SE": "Sweden 1974 .se ISO 3166-2:SE
"SG": "Singapore 1974 .sg ISO 3166-2:SG
"SH": "Saint Helena, Ascension and Tristan da Cunha 1974 .sh ISO 3166-2:SH Previous ISO country name: Saint Helena
"SI": "Slovenia 1992 .si ISO 3166-2:SI
"SJ": "Svalbard and <NAME> 1974 .sj ISO 3166-2:SJ Consists of two arctic territories of Norway: Svalbard and <NAME>en
"SK": "Slovakia 1993 .sk ISO 3166-2:SK SK previously represented Sikkim
"SL": "Sierra Leone 1974 .sl ISO 3166-2:SL
"SM": "San Marino 1974 .sm ISO 3166-2:SM
"SN": "Senegal 1974 .sn ISO 3166-2:SN
"SO": "Somalia 1974 .so ISO 3166-2:SO
"SR": "Suriname 1974 .sr ISO 3166-2:SR
"SS": "South Sudan 2011 .ss ISO 3166-2:SS
"ST": "Sao Tome and Principe 1974 .st ISO 3166-2:ST
"SV": "El Salvador 1974 .sv ISO 3166-2:SV
"SX": "<NAME> (Dutch part) 2010 .sx ISO 3166-2:SX The French part of Saint Martin island is assigned code MF
"SY": "Syrian Arab Republic 1974 .sy ISO 3166-2:SY ISO country name follows UN designation (common name: Syria)
"SZ": "Swaziland 1974 .sz ISO 3166-2:SZ
"TC": "Turks and Caicos Islands 1974 .tc ISO 3166-2:TC
"TD": "Chad 1974 .td ISO 3166-2:TD Code taken from name in French: Tchad
"TF": "French Southern Territories 1979 .tf ISO 3166-2:TF Covers the French Southern and Antarctic Lands except Adélie Land
"TG": "Togo 1974 .tg ISO 3166-2:TG
"TH": "Thailand 1974 .th ISO 3166-2:TH
"TJ": "Tajikistan 1992 .tj ISO 3166-2:TJ
"TK": "Tokelau 1974 .tk ISO 3166-2:TK
"TL": "Timor-Leste 2002 .tl ISO 3166-2:TL Name changed from East Timor (TP)
"TM": "Turkmenistan 1992 .tm ISO 3166-2:TM
"TN": "Tunisia 1974 .tn ISO 3166-2:TN
"TO": "Tonga 1974 .to ISO 3166-2:TO
"TR": "Turkey 1974 .tr ISO 3166-2:TR
"TT": "Trinidad and Tobago 1974 .tt ISO 3166-2:TT
"TV": "Tuvalu 1979 .tv ISO 3166-2:TV
"TW": "Taiwan, Province of China 1974 .tw ISO 3166-2:TW Covers the current jurisdiction of the Republic of China except Kinmen and Lienchiang
"TZ": "Tanzania, United Republic of 1974 .tz ISO 3166-2:TZ ISO country name follows UN designation (common name: Tanzania)
"UA": "Ukraine 1974 .ua ISO 3166-2:UA Previous ISO country name: Ukrainian SSR
"UG": "Uganda 1974 .ug ISO 3166-2:UG
"UM": "United States Minor Outlying Islands 1986 .um ISO 3166-2:UM Consists of nine minor insular areas of the United States: Baker Island, Howland Island, Jarvis Island, Johnston Atoll, Kingman Reef, Midway Islands, Navassa Island, Palmyra Atoll, and Wake Island
"US": "United States 1974 .us ISO 3166-2:US
"UY": "Uruguay 1974 .uy ISO 3166-2:UY
"UZ": "Uzbekistan 1992 .uz ISO 3166-2:UZ
"VA": "Holy See (Vatican City State) 1974 .va ISO 3166-2:VA Covers Vatican City, territory of the Holy See
"VC": "Saint Vincent and the Grenadines 1974 .vc ISO 3166-2:VC
"VE": "Venezuela, Bolivarian Republic of 1974 .ve ISO 3166-2:VE ISO country name follows UN designation (common name and previous ISO country name: Venezuela)
"VG": "Virgin Islands, British 1974 .vg ISO 3166-2:VG
"VI": "Virgin Islands, U.S. 1974 .vi ISO 3166-2:VI
"VN": "Viet Nam 1974 .vn ISO 3166-2:VN ISO country name follows UN spelling (common spelling: Vietnam)
"VU": "Vanuatu 1980 .vu ISO 3166-2:VU Name changed from New Hebrides (NH)
"WF": "Wallis and Futuna 1974 .wf ISO 3166-2:WF
"WS": "Samoa 1974 .ws ISO 3166-2:WS Code taken from former name: Western Samoa
"YE": "Yemen 1974 .ye ISO 3166-2:YE Previous ISO country name: Yemen, Republic of
"YT": "Mayotte 1993 .yt ISO 3166-2:YT
"ZA": "South Africa 1974 .za ISO 3166-2:ZA Code taken from name in Dutch: Zuid-Afrika
"ZM": "Zambia 1974 .zm ISO 3166-2:ZM
"ZW": "Zimbabwe 1980 .zw ISO 3166-2:ZW Name changed from Southern Rhodesia (RH)
*/
}
}
return iso3166_1_alpha2
}
|
<reponame>3dcitydb/web-feature-service<gh_stars>10-100
package vcs.citydb.wfs.config.filter;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElementWrapper;
import javax.xml.bind.annotation.XmlType;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.List;
@XmlType(name="ScalarCapabilitiesType", propOrder={
"logicalOperators",
"comparisonOperators"
})
public class ScalarCapabilities {
private Boolean logicalOperators;
@XmlElementWrapper(name="comparisonOperators")
@XmlElement(name="operator")
private List<ComparisonOperatorName> comparisonOperators;
public ScalarCapabilities() {
comparisonOperators = new ArrayList<ComparisonOperatorName>();
}
public boolean isSetLogicalOperators() {
return logicalOperators != null && logicalOperators;
}
public void enableLogicalOperators(boolean enable) {
logicalOperators = enable;
}
public boolean isSetComparisonOperators() {
return comparisonOperators != null && !comparisonOperators.isEmpty();
}
public void addComparisonOperator(ComparisonOperatorName comparisonOperator) {
if (!comparisonOperators.contains(comparisonOperator))
comparisonOperators.add(comparisonOperator);
}
public List<ComparisonOperatorName> getComparisonOperators() {
return comparisonOperators;
}
public boolean containsComparisonOperator(ComparisonOperatorName comparisonOperator) {
return comparisonOperators.contains(comparisonOperator);
}
public void setComparisonOperators(List<ComparisonOperatorName> comparisonOperators) {
this.comparisonOperators = comparisonOperators;
}
public boolean containsAll(EnumSet<ComparisonOperatorName> comparisonOperators) {
return this.comparisonOperators.containsAll(comparisonOperators);
}
}
|
<gh_stars>0
package sword.android.graphqlnotes;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.TextView;
import java.util.List;
final class NoteListAdapter extends BaseAdapter {
private final List<NoteEntry> mEntries;
private LayoutInflater mInflater;
NoteListAdapter(List<NoteEntry> entries) {
mEntries = entries;
}
@Override
public int getCount() {
return mEntries.size();
}
@Override
public NoteEntry getItem(int position) {
return mEntries.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(int position, View view, ViewGroup parent) {
if (view == null) {
if (mInflater == null) {
mInflater = LayoutInflater.from(parent.getContext());
}
view = mInflater.inflate(R.layout.note_list_entry, parent, false);
}
final TextView textView = view.findViewById(R.id.text);
textView.setText(mEntries.get(position).title);
return view;
}
public void appendNote(NoteEntry entry) {
mEntries.add(entry);
notifyDataSetChanged();
}
}
|
WITH STORE_MANAGER_ADDRESS AS(
INSERT INTO MAIN.ADDRESS (
ADDRESS_LINE1,
CITY,
STATE,
ZIP
)
VALUES (
'4321 Hubbard Ave',
'Columbus',
'OH',
'43217'
)
RETURNING ID AS STORE_MANAGER_ADDRESS_ID
),
STORE_MANAGER AS(
INSERT INTO MAIN.PERSON (
NAME,
PHONE,
EMAIL,
ADDRESS_ID
)
SELECT '<NAME>',
'15555555555',
'<EMAIL>',
STORE_MANAGER_ADDRESS_ID
FROM STORE_MANAGER_ADDRESS
RETURNING ID AS STORE_MANAGER_PERSON_ID
),
STORE_ADDRESS AS(
INSERT INTO MAIN.ADDRESS (ADDRESS_LINE1, CITY, STATE, ZIP)
VALUES ('1234 Main St', 'Columbus', 'OH', '43201')
RETURNING ID AS STORE_ADDRESS_ID
),
STORE_TAX_AREA AS(
INSERT INTO MAIN.TAX_AREA (ZIP_CODE, TAX_RATE)
VALUES ('43201', 0.075)
RETURNING ID AS TAX_AREA_ID
)
INSERT INTO MAIN.STORE (
NAME,
PHONE,
EMAIL,
MANAGER_ID,
ADDRESS_ID,
TAX_AREA_ID
)
SELECT 'Store 1',
'1231231234',
'<EMAIL>',
STORE_MANAGER.STORE_MANAGER_PERSON_ID,
STORE_ADDRESS.STORE_ADDRESS_ID,
STORE_TAX_AREA.TAX_AREA_ID
FROM STORE_MANAGER
CROSS JOIN STORE_ADDRESS
CROSS JOIN STORE_TAX_AREA;
-- TODO: Should we have a store to tax area mapping? I think this would be a good change for the student to make
-- TODO Add Store and Manager 2
-- From the Scripts
WITH STORE_MANAGER_ADDRESS AS(
INSERT INTO MAIN.ADDRESS (ADDRESS_LINE1, CITY, STATE, ZIP)
VALUES ('201 N. High St', 'Columbus', 'OH', '43201')
RETURNING ID AS STORE_MANAGER_ADDRESS_ID
),
STORE_MANAGER AS(
INSERT INTO MAIN.PERSON (NAME, PHONE, EMAIL, ADDRESS_ID)
SELECT '<NAME>',
'15555555550',
'<EMAIL>',
STORE_MANAGER_ADDRESS_ID
FROM STORE_MANAGER_ADDRESS
RETURNING ID AS STORE_MANAGER_PERSON_ID
),
STORE_ADDRESS AS(
INSERT INTO MAIN.ADDRESS (ADDRESS_LINE1, CITY, STATE, ZIP)
VALUES (
'335 Coffee Way',
'Nowhereville',
'OH',
'35465'
)
RETURNING ID AS STORE_ADDRESS_ID
),
STORE_TAX_AREA AS(
INSERT INTO MAIN.TAX_AREA (ZIP_CODE, TAX_RATE)
VALUES ('43201', 0.075)
RETURNING ID AS TAX_AREA_ID
),
LOVE_YOU_LATTE as(
INSERT INTO MAIN.STORE (
STORE_NUMBER,
NAME,
PHONE,
EMAIL,
MANAGER_ID,
ADDRESS_ID,
TAX_AREA_ID
)
SELECT 121,
'Love you Latte',
'13334636678',
'<EMAIL>',
STORE_MANAGER.STORE_MANAGER_PERSON_ID,
STORE_ADDRESS.STORE_ADDRESS_ID,
STORE_TAX_AREA.TAX_AREA_ID
FROM STORE_MANAGER
CROSS JOIN STORE_ADDRESS
CROSS JOIN STORE_TAX_AREA
RETURNING ID as ID
),
JIM_WILSON_ADDRESS AS(
INSERT INTO MAIN.ADDRESS (ADDRESS_LINE1, CITY, STATE, ZIP)
VALUES ('1212 Test Way', 'Nowhereville', 'OH', '35645')
RETURNING ID AS ADDRESS_ID
),
JIM_WILSON AS(
INSERT INTO MAIN.PERSON (
NAME,
PHONE,
EMAIL,
ADDRESS_ID
)
SELECT 'JIM_WILSON',
'15555555555',
'<EMAIL>',
ADDRESS_ID
FROM JIM_WILSON_ADDRESS
RETURNING ID AS ID
),
BARISTA_1 AS(
INSERT INTO MAIN.EMPLOYEE(
PAYRATE,
IS_ACTIVE,
STORE_ID,
PERSON_ID
)
SELECT 10.50,
TRUE,
LOVE_YOU_LATTE.ID,
JIM_WILSON.ID
FROM LOVE_YOU_LATTE
CROSS JOIN JIM_WILSON
RETURNING ID AS ID
),
-- CUSTOMER
WILL_SMITH_ADDRESS AS(
INSERT INTO MAIN.ADDRESS (
ADDRESS_LINE1,
CITY,
STATE,
ZIP
)
VALUES (
'12 Main St',
'Nowhereville',
'OH',
'35465'
)
RETURNING ID AS ID
),
WILL_SMITH AS(
INSERT INTO MAIN.PERSON (NAME, PHONE, EMAIL, ADDRESS_ID)
SELECT '<NAME>',
'13334345545',
'<EMAIL>',
ID
FROM WILL_SMITH_ADDRESS
RETURNING ID AS ID
),
BILL_JONES_ADDRESS AS(
INSERT INTO MAIN.ADDRESS (
ADDRESS_LINE1,
CITY,
STATE,
ZIP
)
VALUES (
'13 Main St',
'Nowhereville',
'OH',
'35465'
)
RETURNING ID AS ID
),
BILL_JONES AS(
INSERT INTO MAIN.PERSON (NAME, PHONE, EMAIL, ADDRESS_ID)
SELECT '<NAME>',
'13334345545',
'<EMAIL>',
ID
FROM BILL_JONES_ADDRESS
RETURNING ID AS ID
),
BILL_JONES_CUSTOMER AS(
INSERT INTO MAIN.CUSTOMER (STORE_ID, PERSON_ID)
SELECT LOVE_YOU_LATTE.ID,
BILL_JONES.ID
FROM LOVE_YOU_LATTE
CROSS JOIN BILL_JONES
RETURNING ID AS ID
),
JESSIE_ADMAS_ADDRESS AS(
INSERT INTO MAIN.ADDRESS (
ADDRESS_LINE1,
CITY,
STATE,
ZIP
)
VALUES (
'21 Right About way',
'Westernville',
'OH',
'20210'
)
RETURNING ID AS ID
),
JESSIE_ADAMS AS(
INSERT INTO MAIN.PERSON (NAME, PHONE, EMAIL, ADDRESS_ID)
SELECT '<NAME>',
'13334345546',
'<EMAIL>',
ID
FROM JESSIE_ADMAS_ADDRESS
RETURNING ID AS ID
),
JESSIE_ADAMS_CUSTOMER AS(
INSERT INTO MAIN.CUSTOMER (STORE_ID, PERSON_ID)
SELECT LOVE_YOU_LATTE.ID,
JESSIE_ADAMS.ID
FROM LOVE_YOU_LATTE
CROSS JOIN JESSIE_ADAMS
RETURNING ID AS ID
),
JAMAICAN_LIGHT_ROAST AS(
INSERT INTO MAIN.PRODUCT (DESCRIPTION, CURRENT_ITEM_PRICE)
VALUES ('Jamaican Light Roast', 3.99)
RETURNING ID AS ID
),
BLACK_COFFEE AS(
INSERT INTO MAIN.PRODUCT (DESCRIPTION, CURRENT_ITEM_PRICE)
VALUES ('Black Coffee', 2.99)
RETURNING ID AS ID
),
CHAI_TEA AS(
INSERT INTO MAIN.PRODUCT (DESCRIPTION, CURRENT_ITEM_PRICE)
VALUES ('Chai Tea', 5.99)
RETURNING ID AS ID
),
TRANSACTION_1 AS(
INSERT INTO MAIN.TRANSACTION (CUSTOMER_ID)
SELECT ID FROM BILL_JONES_CUSTOMER
RETURNING ID AS ID
),
TRANSACTION_PRODUCT_1 AS(
INSERT INTO MAIN.TRANSACTION_PRODUCT (TRANSACTION_ID, PRODUCT_ID, QUANTITY)
SELECT TRANSACTION_1.ID,
JAMAICAN_LIGHT_ROAST.ID,
1
FROM TRANSACTION_1
CROSS JOIN JAMAICAN_LIGHT_ROAST
RETURNING ID AS ID
),
TRANSACTION_PRODUCT_2 AS(
INSERT INTO MAIN.TRANSACTION_PRODUCT (TRANSACTION_ID, PRODUCT_ID, QUANTITY)
SELECT TRANSACTION_1.ID,
BLACK_COFFEE.ID,
2
FROM TRANSACTION_1
CROSS JOIN BLACK_COFFEE
RETURNING ID AS ID
),
TRANSACTION_2 AS(
INSERT INTO MAIN.TRANSACTION (CUSTOMER_ID)
SELECT ID FROM JESSIE_ADAMS_CUSTOMER
RETURNING ID AS ID
),
TRANSACTION_2_PRODUCT_1 AS(
INSERT INTO MAIN.TRANSACTION_PRODUCT (TRANSACTION_ID, PRODUCT_ID, QUANTITY)
SELECT TRANSACTION_2.ID,
CHAI_TEA.ID,
1
FROM TRANSACTION_2
CROSS JOIN CHAI_TEA
RETURNING ID AS ID
),
TRANSACTION_3 AS(
INSERT INTO MAIN.TRANSACTION (CUSTOMER_ID)
SELECT ID FROM JESSIE_ADAMS_CUSTOMER
RETURNING ID AS ID
),
TRANSACTION_3_PRODUCT_1 AS(
INSERT INTO MAIN.TRANSACTION_PRODUCT (TRANSACTION_ID, PRODUCT_ID, QUANTITY)
SELECT TRANSACTION_3.ID,
CHAI_TEA.ID,
1
FROM TRANSACTION_3
CROSS JOIN CHAI_TEA
RETURNING ID AS ID
),
TRANSACTION_4 AS(
INSERT INTO MAIN.TRANSACTION (CUSTOMER_ID)
SELECT ID FROM BILL_JONES_CUSTOMER
RETURNING ID AS ID
),
TRANSACTION_4_PRODUCT_1 AS(
INSERT INTO MAIN.TRANSACTION_PRODUCT (TRANSACTION_ID, PRODUCT_ID, QUANTITY)
SELECT TRANSACTION_4.ID,
BLACK_COFFEE.ID,
1
FROM TRANSACTION_4
CROSS JOIN BLACK_COFFEE
RETURNING ID AS ID
),
TRANSACTION_5 AS(
INSERT INTO MAIN.TRANSACTION (CUSTOMER_ID)
SELECT ID FROM BILL_JONES_CUSTOMER
RETURNING ID AS ID
),
TRANSACTION_5_PRODUCT_1 AS(
INSERT INTO MAIN.TRANSACTION_PRODUCT (TRANSACTION_ID, PRODUCT_ID, QUANTITY)
SELECT TRANSACTION_5.ID,
BLACK_COFFEE.ID,
3
FROM TRANSACTION_5
CROSS JOIN BLACK_COFFEE
RETURNING ID AS ID
)
SELECT * FROM STORE_ADDRESS;
|
#!/bin/bash
yosys -L yosys.log ../run_script.ys
|
<gh_stars>0
Declare
Stage Sttm_Branch.End_Of_Input%Type := 'T';
Begin
Global.Pr_Init('010', 'FATEKWANA');
If Not Gipks_Gi_Service.Fn_Gidprsif(Global.Current_Branch, Stage)
Then
Debug.Pr_Debug('CL', 'Failed in generation');
End If;
Exception
When Others Then
Debug.Pr_Debug('CL', 'Failed in in exception WOT');
End;
/
|
import { LocalizationLanguage } from './enums';
export type Link = string;
export type LocalizedKeyValuePair = [LocalizationLanguage, LocalizedString];
export type LocalizedString = string;
export type LanguageSkill = 'language-skill-Elementary' | 'language-skill-Limited' | 'language-skill-Professional' | 'language-skill-Native';
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
BCSYMBOLMAP_DIR="BCSymbolMaps"
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
if [ -d "${source}/${BCSYMBOLMAP_DIR}" ]; then
# Locate and install any .bcsymbolmaps if present, and remove them from the .framework before the framework is copied
find "${source}/${BCSYMBOLMAP_DIR}" -name "*.bcsymbolmap"|while read f; do
echo "Installing $f"
install_bcsymbolmap "$f" "$destination"
rm "$f"
done
rmdir "${source}/${BCSYMBOLMAP_DIR}"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
warn_missing_arch=${2:-true}
if [ -r "$source" ]; then
# Copy the dSYM into the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .dSYM "$source")"
binary_name="$(ls "$source/Contents/Resources/DWARF")"
binary="${DERIVED_FILES_DIR}/${basename}.dSYM/Contents/Resources/DWARF/${binary_name}"
# Strip invalid architectures from the dSYM.
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary" "$warn_missing_arch"
fi
if [[ $STRIP_BINARY_RETVAL == 0 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.dSYM"
fi
fi
}
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
warn_missing_arch=${2:-true}
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
if [[ "$warn_missing_arch" == "true" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
fi
STRIP_BINARY_RETVAL=1
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=0
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/AFNetworking/AFNetworking.framework"
install_framework "${BUILT_PRODUCTS_DIR}/WJLSDK/WJLSDK.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/AFNetworking/AFNetworking.framework"
install_framework "${BUILT_PRODUCTS_DIR}/WJLSDK/WJLSDK.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
import React from "react";
class App extends React.Component {
state = {
data: [],
isLoading: true
};
componentDidMount() {
fetch("URL_TO_YOUR_API_HERE")
.then(res => res.json())
.then(data => {
this.setState({
data: data,
isLoading: false
});
});
}
render() {
if (this.state.isLoading) {
return <h2>Loading ...</h2>;
}
return (
<>
{this.state.data.map(item => (
<h2 key={item.id}>{item.title}</h2>
))}
</>
);
}
}
export default App;
|
<gh_stars>0
#include <iostream>
using namespace std;
int main()
{
bool b;
b = 0;
cout << b << "\n";
b = false;
cout << b << "\n";
b++;
cout << b << "\n";
b++;
cout << b << "\n";
// b--; // error
// cout << b << "\n";
}
|
<filename>spring-aop-1/src/main/java/annotation/InvokeLogMethod.java
package annotation;
import java.lang.annotation.*;
/**
* 方法调用前后、异常及结果返回时记录日志
* @author hlc
*
*/
@Target({ElementType.METHOD})//注解用在方法上
@Retention(RetentionPolicy.RUNTIME)//注解在运行时保留
@Documented//指定javadoc生成API文档时显示该注解信息
public @interface InvokeLogMethod {
}
|
import { createNavigationContainer } from 'react-navigation';
export default function(Component) {
const NavigationContainer = createNavigationContainer(Component);
return class extends NavigationContainer {
_nav = null;
// dispatch synchronously
dispatch = (action) => {
if (!this._isStateful()) {
return false;
}
if (!this._nav) {
this._nav = this.state.nav;
}
const nav = Component.router.getStateForAction(action, this._nav);
if (nav && nav !== this._nav) {
this.setState({ nav }, () =>
this._onNavigationStateChange(this._nav, nav, action)
);
this._nav = nav;
return true;
}
return false;
};
};
}
|
CFG_FILE="configs/inference/coco_mask_rcnn_R_50_FPN_1x.yaml"
SRC_FILE="tools/inference.py"
NUM_GPUS=1
python $SRC_FILE --num-gpus $NUM_GPUS \
--config-file $CFG_FILE
|
import React from 'react';
import AllIn from './image_page_allin';
import SaveTheDate from './image_page_savethedate';
function displayView(totalprops) {
switch (totalprops.caption) {
case 'All in':
return (
<AllIn allin={totalprops.allin} />
);
case 'Save the Date':
return (
<SaveTheDate savethedate={totalprops} />
);
default:
return 'Coming Soon...';
}
}
const ImagePage = (props) => {
// for style, the first set of brackets means variable referenced, second means it is object
return (
<span>{displayView(props)}</span>
);
}
export default ImagePage;
|
#!/bin/sh
# This script was created to reduce the complexity of the RUN command
# that installs all combinations of PostgreSQL and TimescaleDB Toolkit
if [ -z "$2" ]; then
echo "Usage: $0 PGVERSION [TOOLKIT_TAG..]"
exit 1
fi
PGVERSION="$1"
shift
if [ "${PGVERSION}" -lt 12 ]; then
exit 0
fi
set -e
export PATH="/usr/lib/postgresql/${PGVERSION}/bin:${PATH}"
mkdir -p /home/postgres/.pgx
for TOOLKIT_VERSION in "$@"; do
git clean -e target -f -x
git reset HEAD --hard
git checkout "${TOOLKIT_VERSION}"
MAJOR_MINOR="$(awk '/^default_version/ {print $3}' ../timescaledb-toolkit/extension/timescaledb_toolkit.control | tr -d "'" | cut -d. -f1,2)"
MAJOR="$(echo "${MAJOR_MINOR}" | cut -d. -f1)"
MINOR="$(echo "${MAJOR_MINOR}" | cut -d. -f2)"
if [ "${MAJOR}" -ge 1 ] && [ "${MINOR}" -ge 4 ]; then
cargo install cargo-pgx --version '^0.2'
else
if [ "${PGVERSION}" -ge 14 ]; then
echo "TimescaleDB Toolkit ${TOOLKIT_VERSION} is not supported on PostgreSQL ${PGVERSION}"
continue;
fi
cargo install --git https://github.com/JLockerman/pgx.git --branch timescale cargo-pgx
fi
cat > /home/postgres/.pgx/config.toml <<__EOT__
[configs]
pg${PGVERSION} = "/usr/lib/postgresql/${PGVERSION}/bin/pg_config"
__EOT__
cd extension
cargo pgx install --release
cargo run --manifest-path ../tools/post-install/Cargo.toml -- "/usr/lib/postgresql/${PGVERSION}/bin/pg_config"
cd ..
done
# We want to enforce users that install toolkit 1.5+ when upgrading or reinstalling.
# NOTE: This does not affect versions that have already been installed, it only blocks
# users from installing/upgrading to these versions
for file in "/usr/share/postgresql/${PGVERSION}/extension/timescaledb_toolkit--"*.sql; do
base="${file%.sql}"
target_version="${base##*--}"
case "${target_version}" in
"1.4"|"1.3"|"1.3.1")
cat > "${file}" << __SQL__
DO LANGUAGE plpgsql
\$\$
BEGIN
RAISE EXCEPTION 'TimescaleDB Toolkit version ${target_version} can not be installed. You can install TimescaleDB Toolkit 1.5.1 or higher';
END;
\$\$
__SQL__
;;
*)
;;
esac
done
|
package nl.knokko.util.blocks;
public interface BlockPlacer {
void place(BlockType block, int x, int y, int z);
}
|
import 'dart:async';
void main() {
int duration = 60; // duration in seconds
Timer.periodic(Duration(seconds: 10), (Timer t) {
// Print the remaining time
print('Remaining time: ${duration}');
// Update the remaining time
duration -= 10;
// Stop the timer when the duration reaches 0
if (duration == 0) {
t.cancel();
}
});
}
|
# Copyright 2017 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Support for the intermediate instruction representation.
The intermediate representation is basically a series of functions that produce
machine code, and their arguments, except for a few differences: Instead of
registers and addresses pointing to stack space, values are stored in an
unlimited number of variables. Instead of numeric offsets, jump targets are
represented as unique objects (instances of 'Target'), acting like labels in
assembly.
Control-flow is represented via instances of 'IRJump', which must precede
instructions that may not be executed, either because the program counter may
be affected by the previous instruction or because the instructions are
executed conditionally.
"""
__all__ = ['SIZE_B','SIZE_W','SIZE_D','SIZE_Q','StackSection','LocationType',
'PtrBinomial','SIZE_PTR','Target','Value','MutableValue','Var','Block',
'VarPart','IndirectVar','Immediate','FixedRegister','ArgStackItem',
'Symbol','PyConst','ensure_same_size','Instr','AddressType','ParamDir',
'Overload','RegAllocatorOverloads','OpDescription','CommentDesc',
'comment_desc','inline_comment_desc','InvalidateRegs','CreateVar','IRJump',
'IndirectMod','LockRegs','UnlockRegs','IRAnnotation','annotate',
'IRSymbolLocDescr','Instr2','CmpType','Cmp','BinCmp','AndCmp','OrCmp',
'OpType','commutative_ops','UnaryOpType','ShiftDir','IROp','IRCode',
'IROp2','IRCode2','ExtraState','CallConvType','PyFuncInfo','FinallyTarget',
'OpGen','IROpGen','JumpCondOpGen','IRCompiler','reg_allocate','Param',
'Function','CompilationUnit','address_of','Tuning','CallingConvention',
'AbiRegister','Abi','BinaryAbi']
import enum
import collections
import weakref
import binascii
import operator
from functools import reduce
from typing import (Any,Callable,cast,Container,DefaultDict,Dict,Generic,
Iterable,List,Optional,NamedTuple,NewType,Sequence,Sized,Set,Tuple,
TYPE_CHECKING,Type,TypeVar,Union)
if __debug__:
import sys
from . import debug
from . import c_types
from . import pyinternals
from .sorted_list import SortedList
from .dinterval import *
from .compilation_unit import *
EMIT_IR_TEST_CODE = False
SIZE_B = 1
SIZE_W = 2
SIZE_D = 4
SIZE_Q = 8
T = TypeVar('T')
U = TypeVar('U')
CALL_ALIGN_MASK = 0xf
def aligned_for_call(x):
return (x + CALL_ALIGN_MASK) & ~CALL_ALIGN_MASK
address_of = id
class StackSection(enum.Enum):
local = 0 # local stack space
args = 1 # space for arguments passed by stack, for next function
previous = 2 # space for arguments passed by stack, for current function
StackLocation = NamedTuple('StackLocation',[('sect',StackSection),('index',int)])
class LocationType(enum.Enum):
register = 1
stack = 2
class Lifetime:
def __init__(self) -> None:
self.intervals = DInterval() # type: DInterval[int]
if __debug__:
self.name = None
self.origin = None
@property
def global_start(self):
return self.intervals.global_start
@property
def global_end(self):
return self.intervals.global_end
def __contains__(self,x):
return x in self.intervals
def itv_at(self,x : int) -> Interval[int]:
return self.intervals.interval_at(x)
@staticmethod
def _itv_shorthand(itv):
end = itv.end - 1
assert end >= itv.start
if itv.start == end: return str(itv.start)
return '{}-{}'.format(itv.start,end)
def __repr__(self):
if __debug__ and self.name:
return '<{} - {} : [{}]>'.format(
self.__class__.__name__,
self.name,
','.join(map(self._itv_shorthand,self.intervals)))
return '<{} : [{}]>'.format(self.__class__.__name__,','.join(map(self._itv_shorthand,self.intervals)))
class VarLifetime(Lifetime):
def __init__(self,*,dbg_symbol : Optional[str]=None) -> None:
super().__init__()
# Every variable will have, at most, one stack location. This is to
# allow efficient merging of variable locations from converging
# branches and to allow instructions like x86's LEA to work correctly
# with variables that don't have a value.
self.preferred_stack_i = None # type: Optional[int]
self.aliases = weakref.WeakSet()
# if not None and not empty and debug.GDB_JIT_SUPPORT is true,
# instances of IRAnnotation will be added to the produced intermediate
# representation, that specifies the location of the associated value
self.dbg_symbol = dbg_symbol
class AliasLifetime(Lifetime):
def __init__(self,itv : VarLifetime) -> None:
super().__init__()
self.itv = itv
itv.aliases.add(self)
class Filter(Container[T]):
def __init__(self,include : Optional[Container[T]]=None,exclude : Container[T]=()) -> None:
self.include = include
self.exclude = exclude
def __contains__(self,item):
return (self.include is None or item in self.include) and item not in self.exclude
class _RegisterMetaType(type):
def __new__(mcs,name,bases,namespace,*,allowed=None):
cls = super().__new__(mcs,name,bases,namespace)
cls.allowed = allowed if isinstance(allowed,Filter) else Filter(allowed)
return cls
# noinspection PyUnusedLocal
def __init__(cls,name,bases,namespace,**kwds):
super().__init__(name,bases,namespace)
def __getitem__(cls,allowed):
if allowed is None: return FixedRegister
return _RegisterMetaType('DependentFixedRegister',(FixedRegister,),{},allowed=allowed)
def __instancecheck__(cls,inst):
return issubclass(inst.__class__,FixedRegister) and inst.reg_index in cls.allowed
@staticmethod
def generic_type():
return FixedRegister
class AddressType:
@staticmethod
def generic_type():
return AddressType
class _ImmediateMetaType(type):
def __new__(mcs,name,bases,namespace,*,abi=None,allowed_range=None):
assert (abi is None) == (allowed_range is None)
cls = super().__new__(mcs,name,bases,namespace)
cls.abi = abi
cls.allowed = allowed_range
return cls
# noinspection PyUnusedLocal
def __init__(cls,name,bases,namespace,**kwds):
super().__init__(name,bases,namespace)
def __getitem__(cls,args):
return _ImmediateMetaType('DependentImmediate',(Immediate,),{},abi=args[0],allowed_range=args[1:])
def __instancecheck__(cls,inst):
return issubclass(inst.__class__,Immediate) and (cls.abi is None
or cls.allowed[0] <= inst.val.realize(cls.abi) <= cls.allowed[1])
@staticmethod
def generic_type():
return Immediate
def generic_type(x : object) -> type:
if not isinstance(x,type):
x = type(x)
f = getattr(x,'generic_type',None)
if f is not None:
return f()
return x
class PtrBinomial:
"""A numeric value dependent on the size of a pointer.
This is a binomial equal to "ptr_factor * ptr_size + val" where ptr_size is
the size of a pointer on the host machine.
"""
__slots__ = 'val','ptr_factor'
@staticmethod
def __new__(cls,val: Union['PtrBinomial',int],ptr_factor: int = 0) -> 'PtrBinomial':
if isinstance(val,PtrBinomial):
if ptr_factor != 0:
raise TypeError(
'ptr_factor cannot be non-zero if val is already an instance of PtrBinomial')
return val
r = super().__new__(cls)
r.val = val
r.ptr_factor = ptr_factor
return r
if TYPE_CHECKING:
# noinspection PyUnusedLocal
def __init__(self,val: Union['PtrBinomial',int],ptr_factor: int = 0) -> None:
self.val = 0
self.ptr_factor = ptr_factor
def realize(self,abi) -> int:
return self.ptr_factor * abi.ptr_size + self.val
def __add__(self,b):
if isinstance(b,PtrBinomial):
return PtrBinomial(self.val + b.val,self.ptr_factor + b.ptr_factor)
if isinstance(b,int):
return PtrBinomial(self.val + b,self.ptr_factor)
return NotImplemented
__radd__ = __add__
def __neg__(self):
return PtrBinomial(-self.val,-self.ptr_factor)
def __sub__(self,b):
if isinstance(b,PtrBinomial):
return PtrBinomial(self.val - b.val,self.ptr_factor - b.ptr_factor)
if isinstance(b,int):
return PtrBinomial(self.val - b,self.ptr_factor)
return NotImplemented
def __rsub__(self,b):
if isinstance(b,int):
return PtrBinomial(b - self.val,-self.ptr_factor)
return NotImplemented
def __mul__(self,b):
if isinstance(b,int):
return PtrBinomial(self.val * b,self.ptr_factor * b)
return NotImplemented
__rmul__ = __mul__
def __floordiv__(self,b):
if isinstance(b,int):
return PtrBinomial(self.val // b,self.ptr_factor // b)
def __repr__(self):
return 'PtrBinomial({},{})'.format(self.val,self.ptr_factor)
def __str__(self):
return str(self.val) if self.ptr_factor==0 else self.__repr__()
SIZE_PTR = PtrBinomial(0,1)
class Target:
if not __debug__:
__slots__ = 'displacement',
def __init__(self):
self.displacement = None # type: Optional[int]
if __debug__:
self.origin = _get_origin()
def __repr__(self):
return '<Target {:#x}>'.format(id(self))
class Value:
__slots__ = 'data_type',
def __init__(self,data_type : c_types.CType=c_types.t_void_ptr) -> None:
self.data_type = data_type
def size(self,abi):
return self.data_type.size(abi)
class MutableValue(Value):
__slots__ = ()
if __debug__:
def _get_origin():
f = sys._getframe(2)
r = None
while f:
r = f.f_code.co_filename,f.f_lineno
if f.f_code.co_filename != __file__:
break
f = f.f_back
return r
# Var must compare by identity
# The "name" and "origin" attributes only act as helpers for debugging our
# python code
class Var(MutableValue):
if not __debug__:
__slots__ = 'lifetime','dbg_symbol'
def __init__(self,name : Optional[str]=None,data_type : c_types.CType=c_types.t_void_ptr,lifetime : Optional[Lifetime]=None,dbg_symbol : Optional[str]=None) -> None:
super().__init__(data_type)
if __debug__:
self.name = name
self.origin = _get_origin()
self.lifetime = lifetime
self.dbg_symbol = dbg_symbol
def __repr__(self):
if __debug__ and self.name is not None:
return '<{} "{}">'.format(self.__class__.__name__,self.name)
return '<{} {:#x}>'.format(self.__class__.__name__,id(self))
class Block(MutableValue):
__slots__ = 'parts','lifetime','__weakref__'
dbg_symbol = None
def __init__(self,parts : int,base_type : c_types.CType=c_types.t_void_ptr,*,lifetime : Optional[VarLifetime]=None) -> None:
assert parts > 0
super().__init__(c_types.TArray(base_type,parts))
self.parts = [VarPart(self,i,base_type) for i in range(parts)]
self.lifetime = lifetime
def __getitem__(self,i):
return self.parts[i]
def __len__(self):
return len(self.parts)
class VarPart(Var):
__slots__ = 'block','offset'
def __init__(self,block : Block,offset : int,data_type : c_types.CType=c_types.t_void_ptr,*,lifetime : Optional[AliasLifetime]=None) -> None:
super().__init__(None,data_type,lifetime)
#self._block = weakref.ref(block)
self.block = block
self.offset = offset
#@property
#def block(self) -> Block:
# r = self._block()
# assert isinstance(r,Block)
# return r
class Immediate(Value,metaclass=_ImmediateMetaType):
__slots__ = 'val',
def __init__(self,val : Union[int,PtrBinomial],data_type : c_types.CType=c_types.t_void_ptr) -> None:
super().__init__(data_type)
self.val = PtrBinomial(val)
def __eq__(self,b):
if isinstance(b,Immediate):
return self.val == b.val
if isinstance(b,(int,PtrBinomial)):
return self.val == b
return False
def __ne__(self,b):
return not self.__eq__(b)
def __repr__(self):
type_str = ''
if self.data_type != c_types.t_void_ptr:
type_str = ',' + repr(self.data_type)
return 'Immediate({}{})'.format(self.val,type_str)
class IndirectVar(MutableValue,AddressType):
__slots__ = 'offset','base','index','scale'
def __init__(self,offset : Union[int,PtrBinomial]=0,base : Optional[Var]=None,index : Optional[Var]=None,scale : Union[int,PtrBinomial]=1,data_type : c_types.CType=c_types.t_void_ptr) -> None:
super().__init__(data_type)
self.offset = PtrBinomial(offset)
self.base = base
self.index = index
self.scale = PtrBinomial(scale)
def __eq__(self,b):
if isinstance(b,IndirectVar):
return (self.offset == b.offset
and self.base == b.base
and self.index == b.index
and self.scale == b.scale
and self.data_type == b.data_type)
return False
def __ne__(self,b):
return not self.__eq__(b)
def __repr__(self):
type_str = ''
if self.data_type != c_types.t_void_ptr:
type_str = ',' + repr(self.data_type)
return 'IndirectVar({},{!r},{!r},{}{})'.format(self.offset,self.base,self.index,self.scale,type_str)
class FixedRegister(MutableValue,metaclass=_RegisterMetaType):
__slots__ = 'reg_index',
def __init__(self,reg_index : int,data_type : c_types.CType=c_types.t_void_ptr) -> None:
super().__init__(data_type)
self.reg_index = reg_index
def __repr__(self):
type_str = ''
if self.data_type != c_types.t_void_ptr:
type_str = ',' + repr(self.data_type)
return 'FixedRegister({}{})'.format(self.reg_index,type_str)
class StackItem(MutableValue,AddressType):
__slots__ = 'index',
def __init__(self,index : int,data_type : c_types.CType=c_types.t_void_ptr) -> None:
super().__init__(data_type)
self.index = index
def __repr__(self):
type_str = ''
if self.data_type != c_types.t_void_ptr:
type_str = ','+repr(self.data_type)
return 'StackItem({}{})'.format(self.index,type_str)
class StackItemPart(MutableValue,AddressType):
__slots__ = 'block','offset'
def __init__(self,block : StackItem,offset : int,data_type : c_types.CType=c_types.t_void_ptr) -> None:
super().__init__(data_type)
self.block = block
self.offset = offset
def __repr__(self):
type_str = ''
if self.data_type != c_types.t_void_ptr:
type_str = ',' + repr(self.data_type)
return 'StackItemPart({!r},{}{})'.format(self.block,self.offset,type_str)
class ArgStackItem(MutableValue,AddressType):
"""A function argument passed via the stack.
'index' refers to how many pointer-size increments the argument is from the
top of the stack.
If 'prev_frame' is true, the argument is from the previous stack. In other
words, the argument was passed to the current function, not a function
about to be called.
"""
__slots__ = 'index','prev_frame'
def __init__(self,index : int,prev_frame : bool=False,data_type : c_types.CType=c_types.t_void_ptr) -> None:
super().__init__(data_type)
self.index = index
self.prev_frame = prev_frame
def __eq__(self,b):
if isinstance(b,ArgStackItem):
return self.index == b.index and self.prev_frame == b.prev_frame and self.data_type == b.data_type
return False
def __ne__(self,b):
return not self.__eq__(b)
def __repr__(self):
type_str = ''
if self.data_type != c_types.t_void_ptr:
type_str = ',' + repr(self.data_type)
return 'ArgStackItem({},{!r}{})'.format(self.index,self.prev_frame,type_str)
class Symbol(Value):
"""A named entity to be linked."""
__slots__ = 'name','address'
def __init__(self,name : str,data_type : c_types.CType=c_types.t_void_ptr,address : Optional[int]=None) -> None:
super().__init__(data_type)
self.name = name
self.address = address
def __eq__(self,b):
return isinstance(b,Symbol) and b.name == self.name
def __ne__(self,b):
return not self.__eq__(b)
def __repr__(self):
type_str = ''
if self.data_type != c_types.t_void_ptr:
type_str = ',' + repr(self.data_type)
return 'Symbol({!r}{})'.format(self.name,type_str)
class PyConst(Value):
"""Represents a name or constant stored in one of the tuples in the
function body object"""
__slots__ = 'tuple_name','index','address'
def __init__(self,tuple_name : str,index : int,address : int) -> None:
super().__init__(c_types.PyObject_ptr)
self.tuple_name = tuple_name
self.index = index
self.address = address
def __eq__(self,b):
return isinstance(b,PyConst) and b.address == self.address
def __ne__(self,b):
return not self.__eq__(b)
def __repr__(self):
return 'PyConst({!r},{},{})'.format(self.tuple_name,self.index,self.address)
def to_stack_item(location : StackLocation,data_type : c_types.CType=c_types.t_void_ptr) -> Union[StackItem,ArgStackItem]:
if location.sect == StackSection.local:
return StackItem(location.index,data_type=data_type)
if location.sect == StackSection.args:
return ArgStackItem(location.index,data_type=data_type)
assert location.sect == StackSection.previous
return ArgStackItem(location.index,True,data_type=data_type)
def ensure_same_size(abi,a,*args):
sa = a.size(abi)
for b in args:
if b.size(abi) != sa:
raise ValueError('arguments must have the same data size')
class Instr:
__slots__ = 'op','args'
def __init__(self,op : 'RegAllocatorOverloads',*args) -> None:
self.op = op
self.args = args
def __repr__(self):
return 'Instr({!r},{})'.format(self.op,','.join(map(repr,self.args)))
class ParamDir:
"""Specifies whether a parameter is read-from, written-to, both or neither.
An example of an instruction that has a parameter that is neither read-from
nor written-to is x86's 'lea'.
"""
def __init__(self,reads : bool=False,writes : bool=False) -> None:
self.reads = reads
self.writes = writes
def __or__(self,b):
if isinstance(b,ParamDir):
return ParamDir(self.reads or b.reads,self.writes or b.writes)
return NotImplemented
def __ior__(self,b):
if isinstance(b,ParamDir):
self.reads = self.reads or b.reads
self.writes = self.writes or b.writes
return self
return NotImplemented
def __repr__(self):
return 'ParamDir({!r},{!r})'.format(self.reads,self.writes)
class Overload:
def __init__(self,
params : Sequence[Union[type,Tuple[type,...]]],
func : Callable,
*,
min_len : Optional[int]=None,
max_len : Optional[int]=None) -> None:
self.params = params
self.func = func
self.min_len = min_len
self.max_len = max_len
def variant(self,params,func):
return Overload(params,func,min_len=self.min_len,max_len=self.max_len)
def matches_args(self,args : Sequence) -> bool:
return len(args) == len(self.params) and all(isinstance(a,p) for a,p in zip(args,self.params))
def __repr__(self):
def type_print(x):
if isinstance(x,tuple):
return '({})'.format(','.join(map(type_print,x)))
return x.__name__
return '<Overload - {}>'.format(','.join(map(type_print,self.params)))
def type_match_score(params,args):
if len(params) != len(args): return 0
score = 1
for p,a in zip(params,args):
if isinstance(a,(VarPart,Block)):
# currently, VarPart and Block instances cannot be moved to
# registers
if not hassubclass(p,AddressType):
return 0
elif hassubclass(p,(FixedRegister,AddressType)):
score += isinstance(a,p)
elif isinstance(a,p):
score += 1
else:
# only registers and addresses can be moved between each other
return 0
return score
def split_tuples(r,tail,head=()):
if tail:
next_tail = tail[1:]
if isinstance(tail[0],tuple):
for val in tail[0]: split_tuples(r,next_tail,head + (val,))
else:
split_tuples(r,next_tail,head + (tail[0],))
else:
r.append(head)
class RegAllocatorOverloads:
def __init__(self,param_dirs : Sequence[ParamDir]) -> None:
self.param_dirs = param_dirs
def best_match(self,args : Sequence) -> Overload:
return self.exact_match(args)
def exact_match(self,args : Sequence) -> Overload:
raise NotImplementedError()
def to_ir2(self,args : Sequence) -> 'Instr2':
return Instr2(self,self.exact_match(args),args)
def __call__(self,*args):
return self.to_ir2(args)
def assembly(self,args : Sequence,addr : int,binary : bytes,annot : Optional[str]=None) -> str:
raise NotImplementedError()
class OpDescription(RegAllocatorOverloads):
"""Describes a machine code instruction for a given set of parameters.
The parameter types may vary, but the number of parameters and whether a
parameter is read, write or read-write, must be the same. If an instruction
can, for example, take 2 and 3 parameters, it must be represented with two
instances of OpDescription.
For performance reasons, don't create overloads that differ only by which
registers or which immediate values are accepted; instead, create one
function that makes the check manually (otherwise, 'exact_match' and
consequently 'to_ir2' wont work).
"""
def __init__(self,name : str,overloads : Sequence[Overload],param_dirs : Sequence[ParamDir]) -> None:
assert all(len(param_dirs) == len(o.params) for o in overloads)
super().__init__(param_dirs)
self.name = name
self.overloads = overloads
self._type_lookup = {} # type: Dict[Tuple[type,...],Overload]
def best_match(self,args : Sequence) -> Overload:
best_o = self.overloads[0]
best_score = type_match_score(best_o.params,args)
for o in self.overloads[1:]:
s = type_match_score(o.params,args)
if s > best_score:
best_score = s
best_o = o
if best_score == 0:
raise TypeError('One or more arguments has an incorrect type ' +
'and the parameter is not a register or address, for "{}"'
.format(self.name))
return best_o
def exact_match(self,args : Sequence) -> Overload:
types = tuple(a.__class__ for a in args)
try:
r = self._type_lookup[types]
except KeyError as exc:
for o in self.overloads:
if o.matches_args(args):
self._type_lookup[types] = o
return o
raise TypeError('no overload takes ({})'.format(','.join(t.__name__ for t in types))) from exc
# this can still fail because r.params may contain dependent types
if r.matches_args(args): return r
raise ValueError('one or more arguments contains a disallowed value')
def __repr__(self):
return '<{} "{}">'.format(self.__class__.__name__,self.name)
def assembly_name(self,args):
"""Get the name of the op-code based on the arguments.
'args' may be modified, to change the arguments printed."""
return self.name
def assembly_arg(self,nextaddr,x):
return str(x)
def assembly(self,args : Sequence,addr : int,binary : bytes,annot : Optional[str]=None):
nextaddr = addr + len(binary)
args = list(args)
name = self.assembly_name(args)
return '{:8x}: {:22}{:8} {}{}'.format(
addr,
binascii.hexlify(binary).decode(),
name,
', '.join(self.assembly_arg(nextaddr,arg) for arg in args),
(' ; '+annot) if annot else '')
class CommentDesc(RegAllocatorOverloads):
_sole_overload = Overload([str],lambda x: b'')
def __init__(self,inline):
super().__init__([ParamDir(True,False)])
self.inline = inline
def exact_match(self,args : Sequence) -> Overload:
if len(args) != 1:
raise TypeError('there must be exactly one argument')
return self._sole_overload
def assembly(self,args : Sequence,addr : int,binary: bytes,annot : Optional[str] = None):
return '; {} {}'.format(args[0],annot or '')
comment_desc = CommentDesc(False)
inline_comment_desc = CommentDesc(True)
class InvalidateRegs:
"""Force variables stored in scratch registers to be moved to the stack or
preserved registers"""
__slots__ = 'to_free','pres'
def __init__(self,to_free,pres):
self.to_free = to_free
self.pres = pres
def __repr__(self):
return 'InvalidateRegs({!r},{!r})'.format(self.to_free,self.pres)
class CreateVar:
"""Indicates that 'var' has obtained a value from 'val'.
'val' must be an instance of FixedRegister or ArgStackItem. If 'val' is a
register, it must not be used by any variable, by this point in the
instruction sequence.
"""
__slots__ = 'var','val'
def __init__(self,var : Var,val : Union[FixedRegister,ArgStackItem]) -> None:
self.var = var
self.val = val
def __repr__(self):
return 'CreateVar({!r},{!r})'.format(self.var,self.val)
class IRJump:
"""Indicates that the next instruction executed may be the one after
'dest'.
If 'conditional' is false, the next instruction executed will always be the
one after 'dest'.
Sometimes instructions need to be inserted before an instance of IRJump to
move variables, 'jump_ops' specifies how many elements back, those
instructions should be inserted (a value of 0 would mean the instructions
should be inserted directly behind).
This is used by the register allocator to determine value life-times.
"""
__slots__ = 'dests','conditional','jump_ops'
def __init__(self,dests : Union[Target,Iterable[Target]],conditional : bool,jump_ops : int) -> None:
self.dests = (dests,) if isinstance(dests,Target) else tuple(dests) # type: Tuple[Target]
self.conditional = conditional
self.jump_ops = jump_ops
def __repr__(self):
return 'IRJump({!r},{!r},{})'.format(self.dests,self.conditional,self.jump_ops)
class IndirectMod:
"""Indicates that a variable was read or updated externally, somehow.
When written, if the value exists in both a register and the stack, one is
made invalid depending on which was updated.
"""
__slots__ = 'var','read','write','loc_type'
def __init__(self,var: Var,read: object,write: object,loc_type: LocationType) -> None:
self.var = var
self.read = bool(read)
self.write = bool(write)
self.loc_type = loc_type
def __repr__(self):
return 'IndirectMod({!r},{!r},{!r},{!r})'.format(self.var,self.read,self.write,self.loc_type)
class LockRegs:
""""Lock" one or more registers.
When a register is "locked" it will not be available for use by variables.
However, locking a register will cause an existing variable to be unloaded
from that register.
"""
__slots__ = 'regs',
def __init__(self,regs : Iterable[int]) -> None:
self.regs = tuple(regs)
def __repr__(self):
return 'LockRegs({!r})'.format(self.regs)
class UnlockRegs:
__slots__ = 'regs',
def __init__(self,regs : Iterable[int]) -> None:
self.regs = tuple(regs)
def __repr__(self):
return 'UnlockRegs({})'.format(self.regs)
IRAnnotation = collections.namedtuple('IRAnnotation','descr')
def annotate(descr=None):
return [IRAnnotation(descr)] if debug.GDB_JIT_SUPPORT else []
class IRSymbolLocDescr:
def __init__(self,symbol : str,loc : 'ItvLocation') -> None:
self.symbol = symbol
self.loc = loc.copy()
def __repr__(self):
return 'IRSymbolDescr({!r},{!r})'.format(self.symbol,self.loc)
def annotate_symbol_loc(itv : VarLifetime,loc : 'ItvLocation'):
if itv.dbg_symbol and debug.GDB_JIT_SUPPORT:
return [IRAnnotation(IRSymbolLocDescr(itv.dbg_symbol,loc))]
return []
class Instr2:
"""Second level intermediate representation op-code.
This is like Instr, but 'variables' have been replaced with registers
and stack positions.
"""
__slots__ = 'op','overload','args'
def __init__(self,op : RegAllocatorOverloads,overload : Overload,args : Sequence) -> None:
assert not any(isinstance(a,(Var,Block)) for a in args)
self.op = op
self.overload = overload
self.args = args
class CmpType(enum.Enum):
eq = 1
ne = 2
lt = 3
le = 4
gt = 5
ge = 6
def cmp_complement(t : CmpType):
return [
CmpType.ne,
CmpType.eq,
CmpType.ge,
CmpType.gt,
CmpType.le,
CmpType.lt
][t.value-1]
class Cmp:
def complement(self) -> 'Cmp':
raise NotImplementedError()
class BinCmp(Cmp):
def __init__(self,a : Value,b : Value,op : CmpType,signed : bool=True) -> None:
self.a = a
self.b = b
self.op = op
self.signed = signed
def complement(self) -> 'BinCmp':
return BinCmp(self.a,self.b,cmp_complement(self.op),self.signed)
class AndCmp(Cmp):
def __init__(self,a : Cmp,b : Cmp) -> None:
self.a = a
self.b = b
def complement(self) -> 'OrCmp':
return OrCmp(self.a.complement(),self.b.complement())
class OrCmp(Cmp):
def __init__(self,a: Cmp,b: Cmp) -> None:
self.a = a
self.b = b
def complement(self) -> AndCmp:
return AndCmp(self.a.complement(),self.b.complement())
class OpType(enum.Enum):
add = 1
sub = 2
mul = 3
div = 4
and_ = 5
or_ = 6
xor = 7
commutative_ops = {OpType.add,OpType.mul,OpType.and_,OpType.or_,OpType.xor}
class UnaryOpType(enum.Enum):
neg = 1
class ShiftDir(enum.Enum):
left = -1
right = 1
IROp = Union[Instr,InvalidateRegs,CreateVar,Target,IRJump,IndirectMod,LockRegs,UnlockRegs,IRAnnotation]
IRCode = List[IROp]
IROp2 = Union[Instr2,Target,IRAnnotation]
IRCode2 = List[IROp2]
MCode = NewType('MCode',Sized)
AnnotatedOp = Union[MCode,IRAnnotation]
AnnotatedCode = List[AnnotatedOp]
class DelayedCompileLate:
def __init__(self,op,overload,args,displacement):
self.op = op
self.overload = overload
self.args = args
self.displacement = displacement
def __len__(self):
assert self.overload.max_len is not None
return self.overload.max_len
class IRCompiler:
def __init__(self,abi):
self.abi = abi
def prolog(self) -> IRCode2:
raise NotImplementedError()
def epilog(self) -> IRCode2:
raise NotImplementedError()
def get_reg(self,index: int,size : int) -> Any:
raise NotImplementedError()
def get_stack_addr(self,index : int,offset : int,size : int,block_size : int,sect : StackSection) -> Any:
raise NotImplementedError()
def get_displacement(self,amount : int,force_wide : bool) -> Any:
raise NotImplementedError()
def get_immediate(self,val : int,size : int) -> Any:
raise NotImplementedError()
def get_machine_arg(self,arg,displacement):
if isinstance(arg,FixedRegister):
return self.get_reg(arg.reg_index,arg.size(self.abi))
if isinstance(arg,StackItem):
size = arg.size(self.abi)
return self.get_stack_addr(arg.index,0,size,size,StackSection.local)
if isinstance(arg,StackItemPart):
return self.get_stack_addr(arg.block.index,arg.offset,arg.size(self.abi),arg.block.size(self.abi),StackSection.local)
if isinstance(arg,ArgStackItem):
size = arg.size(self.abi)
return self.get_stack_addr(
arg.index,
0,
size,
size,
StackSection.previous if arg.prev_frame else StackSection.args)
if isinstance(arg,Target):
assert arg.displacement is not None
return self.get_displacement(arg.displacement - displacement,False)
if isinstance(arg,Immediate):
return self.get_immediate(arg.val.realize(self.abi),arg.size(self.abi))
return arg
def compile_early(self,item : Instr2,displacement : int) -> Sized:
ready = True
new_args = [None] * len(item.args) # type: List[Any]
for i,arg in enumerate(item.args):
if isinstance(arg,Target) and arg.displacement is None:
ready = False
new_args[i] = arg
else:
new_args[i] = self.get_machine_arg(arg,displacement)
if ready:
# noinspection PyCallingNonCallable
return item.overload.func(*new_args)
return DelayedCompileLate(item.op,item.overload,new_args,displacement)
def compile_late(self,item : Sized) -> MCode:
if isinstance(item,DelayedCompileLate):
new_args = [None] * len(item.args)
for i,arg in enumerate(item.args):
new_args[i] = arg
if isinstance(arg,Target):
assert arg.displacement is not None
new_args[i] = self.get_displacement(arg.displacement-item.displacement,True)
return item.overload.func(*new_args)
else:
assert hasattr(item,'__len__')
return cast(MCode,item)
def nops(self,size : int) -> List[Instr2]:
raise NotImplementedError()
EST = TypeVar('EST',bound='ExtraState')
class ExtraState:
"""An abstract class for modifying instructions in the register allocation
pass."""
def copy(self : EST) -> EST:
raise NotImplementedError()
def process_instr(self,i : int,instr : Instr) -> Optional[Instr]:
raise NotImplementedError()
def conform_to(self : EST,other : EST) -> None:
raise NotImplementedError()
class NullState(ExtraState):
"""A do-nothing implementation of ExtraState"""
def copy(self):
return self
def process_instr(self,i,instr):
return instr
def conform_to(self,other):
pass
class PyFuncInfo:
def __init__(self,frame_var,func_var,args_var,kwds_var):
self.frame_var = frame_var
self.func_var = func_var
self.args_var = args_var
self.kwds_var = kwds_var
class FinallyTarget:
def __init__(self) -> None:
self.start = Target()
self.next_var = Var()
# A list of targets that the code jumps to, after the end of the
# finally block. This is needed for variable lifetime determination
self.next_targets = [] # type: List[Target]
if __debug__:
self._used = False
def add_next_target(self,t: Target):
assert not self._used
self.next_targets.append(t)
class OpGen(Generic[T]):
def __init__(self,abi: 'Abi',func_args: Iterable[Var]=(),callconv: CallConvType=CallConvType.default,pyinfo: Optional[PyFuncInfo]=None) -> None:
self.abi = abi
self.func_arg_vars = list(func_args)
self.callconv = callconv
self.pyinfo = pyinfo
def callconv_for(self,callconv: Optional[CallConvType],prev_frame: bool) -> CallConvType:
if callconv is not None: return callconv
if prev_frame: return self.callconv
return CallConvType.default
def bin_op(self,a : Value,b : Value,dest : MutableValue,op_type : OpType) -> T:
raise NotImplementedError()
def unary_op(self,a : Value,dest : MutableValue,op_type : UnaryOpType) -> T:
raise NotImplementedError()
def load_addr(self,addr : MutableValue,dest : MutableValue) -> T:
raise NotImplementedError()
def call(self,func,args : Sequence[Value]=(),store_ret : Optional[Var]=None,callconv: CallConvType=CallConvType.default) -> T:
raise NotImplementedError()
def call_preloaded(self,func,args : int,store_ret : Optional[Var]=None,callconv: CallConvType=CallConvType.default) -> T:
raise NotImplementedError()
def get_func_arg(self,i : int,prev_frame : bool=False,callconv : Optional[CallConvType]=None) -> MutableValue:
raise NotImplementedError()
def jump(self,dest : Union[Target,Value],targets : Union[Iterable[Target],Target,None]=None) -> T:
raise NotImplementedError()
def jump_if(self,dest : Target,cond : Cmp) -> T:
raise NotImplementedError()
def if_(self,cond : Cmp,on_true : T,on_false : Optional[T]) -> T:
raise NotImplementedError()
def do_while(self,action : T,cond : Cmp) -> T:
raise NotImplementedError()
def jump_table(self,val : Value,targets : Sequence[Target]) -> T:
raise NotImplementedError()
def move(self,src : Value,dest : MutableValue) -> T:
raise NotImplementedError()
def shift(self,src : Value,shift_dir : ShiftDir,amount : Value,dest : MutableValue) -> T:
raise NotImplementedError()
def enter_finally(self,f: FinallyTarget,next_t: Optional[Target]=None) -> T:
raise NotImplementedError()
def finally_body(self,f: FinallyTarget,body: T) -> T:
"""Create code for a "finally" body.
This works like a local function, except it can only be called from the
immediate outer scope, and can be jumped out of, to anywhere in the
outer scope.
Finally body code can only be entered via "enter_finally".
"""
raise NotImplementedError()
def compile(self,func_name : str,code: T,ret_var: Optional[Var]=None,end_targets=()) -> Function:
raise NotImplementedError()
def new_func_body(self):
raise NotImplementedError()
# noinspection PyAbstractClass
class IROpGen(OpGen[IRCode]):
max_args_used = 0
if TYPE_CHECKING:
def __init__(self,abi: 'BinaryAbi',func_args: Iterable[Var]=(),callconv: CallConvType=CallConvType.default,pyinfo: Optional[PyFuncInfo]=None) -> None:
super().__init__(abi,func_args,callconv,pyinfo)
abi = cast(BinaryAbi,object()) # type: BinaryAbi
def get_func_arg(self,i : int,prev_frame : bool=False,callconv : Optional[CallConvType]=None) -> MutableValue:
return self.abi.callconvs[
self.callconv_for(callconv,prev_frame).value].get_arg(self.abi,i,prev_frame)
def get_return(self,prev_frame : bool=False,callconv : Optional[CallConvType]=None) -> MutableValue:
return self.abi.callconvs[
self.callconv_for(callconv,prev_frame).value].get_return(self.abi,prev_frame)
def call(self,func,args : Sequence[Value]=(),store_ret : Optional[Var]=None,callconv : CallConvType=CallConvType.default) -> IRCode:
self.max_args_used = max(self.max_args_used,len(args))
return self._call_impl(func,args,store_ret,callconv)
def call_preloaded(self,func,args : int,store_ret : Optional[Var]=None,callconv : CallConvType=CallConvType.default) -> IRCode:
self.max_args_used = max(self.max_args_used,args)
return self._call_preloaded_impl(func,args,store_ret,callconv)
def _call_impl(self,func,args : Sequence[Value],store_ret : Optional[Var],callconv : CallConvType) -> IRCode:
arg_dests = [self.get_func_arg(i,callconv=callconv) for i in range(len(args))]
arg_r_indices = [arg.reg_index for arg in arg_dests if isinstance(arg,FixedRegister)]
r = [] # type: IRCode
if arg_r_indices: r.append(LockRegs(arg_r_indices))
for arg,dest in zip(args,arg_dests):
r += self.move(arg,dest)
r += self._call_preloaded_impl(func,len(args),store_ret,callconv)
if arg_r_indices: r.append(UnlockRegs(arg_r_indices))
return r
def _call_preloaded_impl(self,func,args : int,store_ret : Optional[Var],callconv : CallConvType) -> IRCode:
raise NotImplementedError()
def get_compiler(self,regs_used: Set[int],stack_used: int,args_used: int) -> IRCompiler:
raise NotImplementedError()
def allocater_extra_state(self) -> ExtraState:
"""Return a new instance of ExtraState.
During register allocation, implicit state is tacked in a way that
takes into account branching, as the instructions are analyzed, one by
one. This method returns an instance of ExtraState that stores nothing
and does nothing, but this method can be overridden to store extra
data per branch, and substitute or even omit certain instructions as
each branch is analyzed.
"""
return NullState()
def process_indirection(self,instr: Instr,ov: Overload,inds: Sequence[int]) -> Tuple[Instr,Overload]:
"""Remove instances of IndirectVar.
The return value is equivalent to the input parameters, except each
argument indexed by "inds" (all of which will be instances of
IndirectVar) is replaced with one or more of its component instances of
Var, and "instr.op" and "ov" are replaced by equivalent instances of
RegAllocatorOverloads and Overload that accept the new arguments. The
intent is to free the register allocator from the burden of supporting
different address formats.
"""
raise NotImplementedError()
def compile(self,func_name: str,code1: IRCode,ret_var: Optional[Var]=None,end_targets=()) -> Function:
if EMIT_IR_TEST_CODE:
print(func_name+':')
print(debug_gen_code(code1))
print()
code2 = [] # type: IRCode
for i,av in enumerate(self.func_arg_vars):
loc = self.get_func_arg(i,True)
if isinstance(loc,(FixedRegister,ArgStackItem)):
code2.append(CreateVar(av,loc))
else:
# this case probably won't even come up
code2.extend(self.move(loc,av))
code2.extend(code1)
if ret_var is not None:
code2.extend(self.move(ret_var,self.get_return(True)))
code,r_used,s_used = reg_allocate(self,code2,self.abi.gen_regs)
irc = self.get_compiler(r_used,s_used,self.max_args_used)
displacement = 0
pad_size = 0
annot_size = 0
annots = [] # type: List[debug.Annotation]
# this item will be replaced with padding if needed
late_chunks = [None] # type: List[Optional[Sized]]
code = irc.prolog() + code + irc.epilog()
for instr in reversed(code):
if isinstance(instr,IRAnnotation):
descr = instr.descr
if isinstance(descr,IRSymbolLocDescr):
descr = debug.VariableLoc(
descr.symbol,
irc.get_machine_arg(descr.loc.to_ir(),displacement) if descr.loc else None)
annot = debug.Annotation(descr,annot_size)
if self.abi.assembly:
late_chunks.append(AsmSequence([AsmOp(comment_desc,
('annotation: {!r}'.format(annot.descr),),b'')]))
# since appending to a list is O(1) while prepending is O(n), we
# add the items backwards and reverse the list afterwards
annots.append(annot)
annot_size = 0
elif isinstance(instr,Target):
instr.displacement = displacement
else:
assert isinstance(instr,Instr2)
chunk = irc.compile_early(make_asm_if_needed(instr,self.abi.assembly),displacement)
# items are added backwards for the same reason as above
late_chunks.append(chunk)
displacement -= len(chunk)
annot_size += len(chunk)
assert annot_size == 0 or not annots,"if there are any annotations, there should be one at the start"
annots.reverse()
# add padding for alignment
if CALL_ALIGN_MASK:
unpadded = displacement
displacement = aligned_for_call(displacement)
pad_size = displacement - unpadded
if pad_size:
late_chunks[0] = code_join(
[irc.compile_early(make_asm_if_needed(c,self.abi.assembly),0) for c
in irc.nops(pad_size)])
for et in end_targets:
et.displacement -= displacement
return Function(
code_join([irc.compile_late(c) for c in reversed(late_chunks) if c is not None]),
pad_size,
name=func_name,
annotation=annots,
returns=c_types.t_void if ret_var is None else ret_var.data_type,
params=[Param(av.dbg_symbol or '__a'+str(i),av.data_type) for i,av in enumerate(self.func_arg_vars)],
callconv=self.callconv)
def new_func_body(self):
return pyinternals.FunctionBody.__new__(pyinternals.FunctionBody)
# noinspection PyAbstractClass
class JumpCondOpGen(IROpGen):
"""An implementation of IROpGen that uses jumps for control flow"""
def jump_if(self,dest : Target,cond : Cmp) -> IRCode:
if isinstance(cond,OrCmp):
return self.jump_if(dest,cond.a) + self.jump_if(dest,cond.b)
if isinstance(cond,AndCmp):
return self.if_(cond.a,self.jump_if(dest,cond.b),None)
assert isinstance(cond,BinCmp)
return self._basic_jump_if(dest,cond)
def _basic_jump_if(self,dest : Target,cond : BinCmp) -> IRCode:
raise NotImplementedError()
def if_(self,cond : Cmp,on_true : IRCode,on_false : Optional[IRCode]) -> IRCode:
endif = Target()
if on_false:
else_ = Target()
return self.jump_if(else_,cond.complement()) + on_true + self.jump(endif) + [else_] + on_false + [endif]
return self.jump_if(endif,cond.complement()) + on_true + [endif]
def do_while(self,action : IRCode,cond : Cmp) -> IRCode:
t = Target()
return cast(IRCode,[t]) + action + self.jump_if(t,cond.complement())
class FollowNonlinear(Generic[T,U]):
"""Follow code non-linearly by splitting and merging state with the
branches."""
def __init__(self,state : T) -> None:
self.state = state # type: Optional[T]
self.prior_states = {} # type: Dict[Target,T]
self.pending_states = collections.defaultdict(list) # type: DefaultDict[Target,List[U]]
self.elided_targets = {} # type: Dict[Target,int]
self.farthest_i = -1
def backtracking(self,op_i):
return self.farthest_i is not None and op_i <= self.farthest_i
def handle_instr(self,op_i : int,op : Instr) -> None:
pass
def handle_invalidate_regs(self,op_i : int,op : InvalidateRegs) -> None:
pass
def handle_create_var(self,op_i : int,op : CreateVar) -> None:
pass
def handle_indirect_mod(self,op_i: int,op: IndirectMod) -> None:
pass
def handle_target(self,op_i: int,op: Target) -> None:
pass
def handle_elided_target(self,op_i: int,op: Target) -> None:
pass
def _redo_section(self,prev_i: int,code: IRCode):
old_s = self.state
self.state = self.make_prior_state(prev_i)
self.run(code,prev_i - 1)
self.state = old_s
def handle_irjump(self,op_i: int,op: IRJump,code: IRCode) -> None:
assert self.state is not None and op.dests
assert all((d in self.prior_states) == (op.dests[0] in self.prior_states) for d in op.dests[1:]),(
'a jump is not allowed to have both forward and backward targets')
unhandled_dests = [] # type: List[Target]
for d in op.dests:
prev_i = self.elided_targets.get(d)
if prev_i is not None:
# if there is a jump backwards, to a part that had a None
# state, go back and handle it again
self._redo_section(prev_i,code)
# _redo_section terminates as soon as it encounters an already
# handled Target, but there could have been a jump forward to
# another elided Target between prev_i and here
for t in self.elided_targets.keys() & self.pending_states.keys():
self._redo_section(self.elided_targets[t],code)
else:
unhandled_dests.append(d)
if unhandled_dests:
if unhandled_dests[0] in self.prior_states:
self.conform_prior_states(op_i,op.jump_ops,[self.prior_states[d] for d in unhandled_dests])
else:
pending = self.make_pending_state(op_i,op)
for d in unhandled_dests:
self.pending_states[d].append(pending)
if not op.conditional:
old = self.state
self.state = None
self.after_state_change(old)
def handle_lock_regs(self,op_i : int,op : LockRegs):
pass
def handle_unlock_regs(self,op_i : int,op : UnlockRegs):
pass
# occurs when merging states at a target we just reached
def conform_states_to(self,op_i : int,state : Sequence[U]) -> None:
pass
# occurs when merging states at a target we already went over
def conform_prior_states(self,op_i : int,jump_ops : int,states : Sequence[T]) -> None:
pass
def after_state_change(self,old_state : Optional[T]) -> None:
pass
def make_pending_state(self,op_i : int,op : IRJump) -> U:
raise NotImplementedError()
def make_prior_state(self,op_i : int) -> T:
raise NotImplementedError()
def recall_pending_state(self,op_i : int,state : U) -> T:
raise NotImplementedError()
def handle_op(self,op_i : int,code : IRCode) -> bool:
op = code[op_i]
if isinstance(op,Instr):
if self.state is not None:
self.handle_instr(op_i,op)
elif isinstance(op,InvalidateRegs):
if self.state is not None:
self.handle_invalidate_regs(op_i,op)
elif isinstance(op,CreateVar):
if self.state is not None:
self.handle_create_var(op_i,op)
elif isinstance(op,IndirectMod):
if self.state is not None:
self.handle_indirect_mod(op_i,op)
elif isinstance(op,Target):
ps = self.prior_states.get(op)
if ps is not None:
if self.state is not None:
self.conform_prior_states(op_i,0,[ps])
return False
states = self.pending_states.get(op)
if states:
old = self.state
del self.pending_states[op]
if self.state is None:
self.state = self.recall_pending_state(op_i,states[0])
states = states[1:]
if states:
self.conform_states_to(op_i,states)
self.after_state_change(old)
if self.state is not None:
self.prior_states[op] = self.make_prior_state(op_i)
try:
del self.elided_targets[op]
except KeyError:
pass
self.handle_target(op_i,op)
else:
self.elided_targets[op] = op_i
self.handle_elided_target(op_i,op)
elif isinstance(op,IRJump):
if self.state is not None:
self.handle_irjump(op_i,op,code)
elif isinstance(op,LockRegs):
if self.state is not None:
self.handle_lock_regs(op_i,op)
else:
assert isinstance(op,UnlockRegs)
if self.state is not None:
self.handle_unlock_regs(op_i,op)
return True
def next_i(self,i : int,code : Sequence[IROp]) -> Optional[int]:
return i + 1 if (i + 1) < len(code) else None
def run(self,code : IRCode,start : int=-1) -> None:
op_i = start
while True:
next_i = self.next_i(op_i,code)
if next_i is None: break
op_i = next_i
if not self.handle_op(op_i,code): break
self.farthest_i = max(op_i,self.farthest_i)
class VarState:
def __init__(self,read_starts : Optional[Dict[Lifetime,int]]=None) -> None:
self.read_starts = {} if read_starts is None else read_starts # type: Dict[Lifetime,int]
def apply(self,life : Lifetime,i : int,only_tracked=False) -> None:
rs = self.read_starts.get(life)
if rs is not None:
itv = Interval(i,rs)
del self.read_starts[life]
else:
if only_tracked: return
itv = Interval(i,i + 1)
life.intervals |= itv
#print('{} |= {}'.format(life.intervals,itv))
#print('{}: {}\n'.format(life.name,life.intervals))
def apply_all(self,i : int) -> None:
while self.read_starts:
life,rs = self.read_starts.popitem()
life.intervals |= Interval(i,rs)
def __repr__(self):
return 'VarState({!r})'.format(self.read_starts)
class _CalcVarIntervalsCommon(FollowNonlinear[VarState,Tuple[VarState,int]]):
def __init__(self) -> None:
super().__init__(VarState())
# Here, "write" means completely overwrite. In the case of partial write,
# both "read" and "write" will be false.
def create_var_life(self,var,_i,read,write):
raise NotImplementedError()
def handle_instr(self,op_i : int,op : Instr) -> None:
var_dirs = collections.defaultdict(ParamDir) # type: DefaultDict[Union[Var,Block],ParamDir]
for var,pd in zip(op.args,op.op.param_dirs):
if isinstance(var,(Var,Block)):
var_dirs[var] |= pd
elif isinstance(var,IndirectVar):
assert not (isinstance(var.base,Block) or isinstance(var.index,Block))
if isinstance(var.base,Var):
var_dirs[var.base].reads = True
if isinstance(var.index,Var):
var_dirs[var.index].reads = True
for var,pd in var_dirs.items():
self.create_var_life(var,op_i,pd.reads,pd.writes)
def handle_create_var(self,op_i : int,op : CreateVar) -> None:
self.create_var_life(op.var,op_i,False,True)
def handle_indirect_mod(self,op_i : int,op : IndirectMod) -> None:
self.create_var_life(op.var,op_i,op.read,op.write)
def handle_irjump(self,op_i : int,op : IRJump,code : IRCode) -> None:
assert self.state is not None
state = self.state
super().handle_irjump(op_i,op,code)
if self.state is None:
state.apply_all(op_i+1)
prior = [self.prior_states.get(d) for d in op.dests]
if prior[0]:
assert all(prior)
lives = set()
for s in prior:
lives.update(s.read_starts)
self.state = VarState({life: op_i + 1 for life in lives})
else:
assert not any(prior)
self.state = state
def make_prior_state(self,op_i : int) -> VarState:
assert self.state is not None
return VarState(self.state.read_starts.copy())
def make_pending_state(self,op_i : int,op : IRJump) -> Tuple[VarState,int]:
assert self.state is not None
return VarState(self.state.read_starts.copy()),op_i
def recall_pending_state(self,op_i : int,state : Tuple[VarState,int]) -> VarState:
return state[0]
def conform_prior_states(self,op_i : int,jump_ops : int,states : Sequence[VarState]) -> None:
assert self.state is not None
for state in states:
for life in (state.read_starts.keys() | self.state.read_starts.keys()):
m_rs = self.state.read_starts.get(life)
if m_rs is None:
assert life in state.read_starts
m_rs = op_i + 1
self.state.read_starts[life] = m_rs
def next_i(self,i : int,code : Sequence[IROp]) -> Optional[int]:
return i - 1 if i > 0 else None
class _CalcVarIntervals(_CalcVarIntervalsCommon):
def __init__(self):
super().__init__()
# maps a lifetime to a set of code positions that we will need to
# back-track to
self.back_starts = collections.defaultdict(set) # type: DefaultDict[int,Set[Lifetime]]
self.block_vars = [] # type: List[Block]
# Here, "write" means completely overwrite. In the case of partial write,
# both "read" and "write" will be false.
def create_var_life(self,var,_i,read,write):
assert self.state is not None
i = _i
# the variable doesn't officially exist until after the instruction
if write and not read: i += 1
if var.lifetime is None:
if isinstance(var,VarPart):
if var.block.lifetime is None:
var.block.lifetime = VarLifetime()
self.block_vars.append(var.block)
var.lifetime = AliasLifetime(
cast(VarLifetime,var.block.lifetime))
else:
var.lifetime = VarLifetime(dbg_symbol=var.dbg_symbol)
if __debug__:
var.lifetime.name = getattr(var,'name',None)
var.lifetime.origin = getattr(var,'origin',None)
if write:
self.state.apply(var.lifetime,i)
if isinstance(var,Block):
for part in var:
self.create_var_life(part,_i,False,True)
elif not read:
# even if a variable is neither read-from nor written-to, it still
# has to exist
var.lifetime.intervals |= Interval(i,i+1)
if read:
self.state.read_starts.setdefault(var.lifetime,i + 1)
def conform_states_to(self,op_i : int,states : Sequence[Tuple[VarState,int]]) -> None:
assert self.state is not None
for state in states:
for life in self.state.read_starts:
# This variable's lifetime needs to be propagated to a later
# location. Since we are scanning the code backwards, we have
# to do this in another pass
self.back_starts[state[1]].add(life)
class _CalcBackVarIntervals(_CalcVarIntervalsCommon):
def __init__(self,back_starts):
super().__init__()
self.back_starts = back_starts
def create_var_life(self,var,i,read,write):
assert self.state
if isinstance(var,VarPart):
self.create_var_life(var.block,i,read,False)
if not read: i += 1
# the previous pass should have handled it by this point, so don't
# track it any further
assert var.lifetime is not None
self.state.apply(var.lifetime,i,True)
def handle_op(self,op_i : int,code : IRCode):
r = super().handle_op(op_i,code)
lives = self.back_starts.get(op_i)
if lives:
assert self.state is not None
for life in lives:
self.state.read_starts.setdefault(life,op_i + 1)
return r
def calc_var_intervals(code : IRCode) -> None:
c = _CalcVarIntervals()
c.run(code,len(code))
# This only checks for read-violations from code that is reachable from the
# start, which is fine, since code that isn't reachable from the start,
# isn't reachable at all, and will be stripped in a later pass.
assert c.state
if c.state.read_starts:
raise ValueError('one or more variables may be read-from before being written-to')
for b in c.block_vars:
assert b.lifetime
for a in b.lifetime.aliases:
b.lifetime.intervals |= a.intervals
if c.back_starts:
cb = _CalcBackVarIntervals(c.back_starts)
cb.run(code,len(code))
class ItvLocation:
def __init__(self,reg : Optional[int]=None,stack_loc : Optional[StackLocation]=None) -> None:
self.reg = reg
self.stack_loc = stack_loc
def copy(self) -> 'ItvLocation':
return ItvLocation(self.reg,self.stack_loc)
def __bool__(self):
return self.reg is not None or self.stack_loc is not None
def __repr__(self):
return 'ItvLocation({!r},{!r})'.format(self.reg,self.stack_loc)
def __eq__(self,b):
if isinstance(b,ItvLocation):
return self.reg == b.reg and self.stack_loc == b.stack_loc
return NotImplemented
def __ne__(self,b):
r = self.__eq__(b)
return r if r is NotImplemented else not r
def to_opt_ir(self,data_type : c_types.CType=c_types.t_void_ptr) -> Union[FixedRegister,StackItem,ArgStackItem,None]:
"""Return an IR value representing a value in this location.
If this location is both a register and stack item, the return value
will represent a register. If this location is neither, None is
returned.
"""
if self.reg is not None:
return FixedRegister(self.reg,data_type)
if self.stack_loc is not None:
return to_stack_item(self.stack_loc,data_type)
return None
def to_ir(self,data_type : c_types.CType=c_types.t_void_ptr) -> Union[FixedRegister,StackItem,ArgStackItem]:
"""Return an IR value representing a value in this location.
If this location is both a register and stack item, the return value
will represent a register. If this location is neither, a ValueError
will be raised.
"""
r = self.to_opt_ir(data_type)
if r is None:
raise ValueError('this location is blank')
return r
LINEAR_SCAN_EXTRA_CHECKS = False
if __debug__ and LINEAR_SCAN_EXTRA_CHECKS:
def do_consistency_check(self):
for life,loc in self.itv_locs.items():
if loc.reg is not None:
assert self.reg_pool[loc.reg] is life
if loc.stack_loc is not None and loc.stack_loc.sect == StackSection.local:
assert self.stack_pool[loc.stack_loc.index] is life
if loc.reg is None and loc.stack_loc is None:
assert self.cur_pos not in life.intervals or self.cur_pos == life.intervals.interval_at(self.cur_pos).start
for itv,life in self.active_r:
assert self.itv_locs[life].reg is not None
for itv,life in self.active_s:
sloc = self.itv_locs[life].stack_loc
assert sloc is not None and sloc.sect == StackSection.local
def consistency_check(f):
def inner(self,*args,**kwds):
r = f(self,*args,**kwds)
if not consistency_check.suspend:
do_consistency_check(self)
return r
return inner
consistency_check.suspend = False
else:
def consistency_check(f):
return f
class LocationScan:
def __init__(self,regs : int,extra : ExtraState) -> None:
assert regs > 0
key = lambda x: x[0].end
self.active_r = SortedList(key=key) # type: SortedList[Tuple[Interval[int],VarLifetime]]
self.active_s = SortedList(key=key) # type: SortedList[Tuple[Interval[int],VarLifetime]]
self.reg_pool = [None] * regs # type: List[Optional[VarLifetime]]
# The stack pool grows as needed. Unlike the register pool, the stack
# pool is shared between branches and members are not removed until
# all of their intervals have passed.
# The stack pool only tracks local stack items. Variables passed by
# stack will reside in a separate un-tracked area.
# TODO: are functions allowed to write to the stack space used by
# arguments? If so, we should do that.
self.stack_pool = [] # type: List[Optional[VarLifetime]]
self.itv_locs = collections.defaultdict(ItvLocation) # type: DefaultDict[VarLifetime,ItvLocation]
self.cur_pos = 0
self.extra = extra
self.locked_regs = set() # type: Set[int]
def branch(self) -> 'LocationScan':
r = LocationScan.__new__(LocationScan)
r.active_r = self.active_r.copy()
r.active_s = self.active_s.copy()
r.reg_pool = self.reg_pool[:]
r.stack_pool = self.stack_pool # there is only ever one stack pool
# noinspection PyArgumentList
r.itv_locs = collections.defaultdict(ItvLocation,((life,loc.copy()) for life,loc in self.itv_locs.items()))
r.cur_pos = self.cur_pos
r.extra = self.extra.copy()
r.locked_regs = self.locked_regs.copy()
return r
def _alloc_r(self,life,opts):
assert self.itv_locs[life].reg is None
for i in range(len(self.reg_pool)):
if self.reg_pool[i] is None and i in opts and i not in self.locked_regs:
self.reg_pool[i] = life
self.itv_locs[life].reg = i
self.active_r.add_item((life.itv_at(self.cur_pos),life))
return True
return False
def _move_r(self,life,opts):
itv_l = self.itv_locs[life]
assert itv_l.reg is not None
for i in range(len(self.reg_pool)):
if self.reg_pool[i] is None and i in opts and i not in self.locked_regs:
self.reg_pool[itv_l.reg] = None
self.reg_pool[i] = life
self.itv_locs[life].reg = i
return True
return False
def _alloc_s(self,life : VarLifetime,itv : Optional[Interval[int]]=None,reserve_only=False) -> bool:
itvl = self.itv_locs[life]
if itvl.stack_loc is not None: return False
if life.preferred_stack_i is not None:
assert self.stack_pool[life.preferred_stack_i] is life
if not reserve_only:
itvl.stack_loc = StackLocation(StackSection.local,life.preferred_stack_i)
else:
for i in range(len(self.stack_pool)):
if self.stack_pool[i] is None:
self.stack_pool[i] = life
life.preferred_stack_i = i
if not reserve_only:
itvl.stack_loc = StackLocation(StackSection.local,i)
break
else:
life.preferred_stack_i = len(self.stack_pool)
if not reserve_only:
itvl.stack_loc = StackLocation(StackSection.local,len(self.stack_pool))
self.stack_pool.append(life)
if not reserve_only:
if itv is None: itv = life.itv_at(self.cur_pos)
self.active_s.add_item((cast(Interval[int],itv),life))
return True
def is_reg_free(self,r):
return self.reg_pool[r] is None
def _update_r(self,life,on_loc_expire):
try:
new_itv = life.itv_at(self.cur_pos)
except ValueError:
loc = self.itv_locs[life]
assert loc.reg is not None
self.reg_pool[loc.reg] = None
loc.reg = None
# if loc.stack_loc is not None, the next loop will call
# on_loc_expire for this instance of VarLifetime
if on_loc_expire and loc.stack_loc is None: on_loc_expire(life)
else:
self.active_r.add_item((new_itv,life))
def _update_s(self,life,on_loc_expire):
try:
new_itv = life.itv_at(self.cur_pos)
except ValueError:
loc = self.itv_locs[life]
assert loc is not None
assert loc.stack_loc is not None
assert loc.stack_loc.sect == StackSection.local
loc.stack_loc = None
if on_loc_expire: on_loc_expire(life)
else:
self.active_s.add_item((new_itv,life))
def _advance(self,pos,on_loc_expire):
while self.active_r:
itv,life = self.active_r[0]
if itv.end > pos: break
del self.active_r[0]
self._update_r(life,on_loc_expire)
while self.active_s:
itv,life = self.active_s[0]
if itv.end > pos: break
del self.active_s[0]
self._update_s(life,on_loc_expire)
for i,life in enumerate(self.stack_pool):
if life is not None and pos >= life.intervals.global_end:
self.stack_pool[i] = None
def _reverse(self,pos,on_loc_expire):
self.cur_pos = pos
old_a = list(self.active_r)
del self.active_r[:]
for itv,life in old_a:
self._update_r(life,on_loc_expire)
old_a = list(self.active_s)
del self.active_s[:]
for itv,life in old_a:
self._update_s(life,on_loc_expire)
@consistency_check
def advance(self,pos: int,on_loc_expire: Optional[Callable[[VarLifetime],None]]) -> None:
old_pos = self.cur_pos
self.cur_pos = pos
if pos > old_pos:
self._advance(pos,on_loc_expire)
elif pos < old_pos:
self._reverse(pos,on_loc_expire)
@staticmethod
def _remove_active(life,active):
for i,item in enumerate(active):
if item[1] == life:
del active[i]
break
@consistency_check
def free_reg(self,reg : int,alt_regs : Sequence[int]=()) -> Optional[VarLifetime]:
"""Free the given register by either moving the value to the stack or
one of the registers in 'alt_regs' if any are available.
If a value had to be moved, the return value is an instance of
VarLifetime, indicating where the value was moved; Otherwise, the
return value is None.
"""
assert reg not in alt_regs
life = self.reg_pool[reg]
if life is None:
return None
life_l = self.itv_locs[life]
self.reg_pool[reg] = None
for ar in alt_regs:
if self.reg_pool[ar] is None and ar not in self.locked_regs:
life_l.reg = ar
self.reg_pool[ar] = life
break
else:
self._remove_active(life,self.active_r)
life_l.reg = None
if not self._alloc_s(life):
# the value already had a copy in the stack
life = None
return life
@consistency_check
def load_reg(self,life : VarLifetime,opts : Container[int]=Filter()) -> Optional[VarLifetime]:
"""Load the given value into a register.
If another value had to be saved to the stack, to free up a register,
the return value will be an instance of VarLifetime, indicating where
the value was saved. Otherwise the return value is None.
The register will be one that is in 'opts'.
"""
itv_l = self.itv_locs[life]
if len(self.active_r) < len(self.reg_pool) and (
self._alloc_r if itv_l.reg is None else self._move_r)(life,opts):
return None
for i in range(len(self.active_r)-1,-1,-1):
spill = self.active_r[i][1]
spill_l = self.itv_locs[spill]
assert spill_l.reg is not None
if spill_l.reg in opts:
if itv_l.reg is None:
self.active_r.add_item((life.itv_at(self.cur_pos),life))
else:
self.reg_pool[itv_l.reg] = None
self.reg_pool[spill_l.reg] = life
itv_l.reg = spill_l.reg
self._alloc_s(spill)
spill_l.reg = None
del self.active_r[i]
return spill
assert None
@consistency_check
def load_stack(self,life : VarLifetime,loc : StackLocation) -> None:
"""Load the given value into the stack.
This is only allowed for non-local stack locations, since shuffling
stack items is not supported.
Non-local stack items are not tracked. Providing a stack location like
this will simply prevent creating an extra copy in the local stack if
the value gets moved to a register and then spilled.
"""
assert self.itv_locs[life].stack_loc is None and loc.sect != StackSection.local
self.itv_locs[life].stack_loc = loc
@consistency_check
def to_stack(self,life: VarLifetime,reserve_only: bool=False) -> bool:
"""Copy the value to the stack.
This will place a value onto the stack, if it's not already there. This
will not free a register.
"""
if self.itv_locs[life].stack_loc is not None: return False
self._alloc_s(life,reserve_only=reserve_only)
return True
# @consistency_check
# def create_loc(self,life : VarLifetime) -> Optional[VarLifetime]:
# """Assign the value a location"""
#
# assert self.itv_locs[life].reg is None and self.itv_locs[life].stack_loc is None
#
# if len(self.active_r) == len(self.reg_pool):
# spill_itv,spill = self.active_r[-1]
# spill_l = self.itv_locs[spill]
# itv = life.itv_at(self.cur_pos)
# if spill_itv.end > itv.end:
# self.itv_locs[life].reg = spill_l.reg
# self._alloc_s(spill,spill_itv)
# spill_l.reg = None
# del self.active_r[-1]
# self.active_r.add_item((itv,life))
# return spill
#
# self._alloc_s(life,itv)
# else:
# self._alloc_r(life,Filter())
#
# return None
@consistency_check
def value_updated(self,life : VarLifetime,where : LocationType) -> None:
"""Indicate that a value was updated.
This indicates a value was updated either in a register or the stack,
and that if a value was stored in both, the other location is no longer
valid.
"""
itvl = self.itv_locs[life]
if where == LocationType.register:
if itvl.reg is None:
raise ValueError('cannot update the register content of a variable that is not currently in a register')
if itvl.stack_loc is not None:
if itvl.stack_loc.sect == StackSection.local:
#self._stack_pool_remove(life,itvl.stack_loc.index)
self._remove_active(life,self.active_s)
itvl.stack_loc = None
else:
assert where == LocationType.stack
if itvl.stack_loc is None:
raise ValueError('cannot update the address content of a variable that is not currently in memory')
if itvl.reg is not None:
self.reg_pool[itvl.reg] = None
self._remove_active(life,self.active_r)
itvl.reg = None
@consistency_check
def alloc_block(self,life: VarLifetime,size: int,reserve_only: bool=False) -> bool:
itvl = self.itv_locs[life]
if itvl.stack_loc is not None: return False
if life.preferred_stack_i is not None:
assert all(self.stack_pool[life.preferred_stack_i + i] is life for i in range(size))
if not reserve_only:
itvl.stack_loc = StackLocation(StackSection.local,life.preferred_stack_i)
else:
i = 0
n = len(self.stack_pool)
while i < n:
for j in range(i,min(n,i+size)):
if self.stack_pool[j] is not None:
i = j + 1
break
else:
break
for j in range(i,i + size):
if j == len(self.stack_pool):
self.stack_pool.append(life)
else:
self.stack_pool[j] = life
if not reserve_only:
itvl.stack_loc = StackLocation(StackSection.local,i)
life.preferred_stack_i = i
if not reserve_only:
self.active_s.add_item((life.itv_at(self.cur_pos),life))
return True
def interval_loc(self,itv : Lifetime):
if isinstance(itv,AliasLifetime):
itv = itv.itv
assert isinstance(itv,VarLifetime)
return self.itv_locs[itv]
def to_ir(self,itv : Lifetime,data_type : c_types.CType=c_types.t_void_ptr) -> Union[FixedRegister,StackItem,ArgStackItem]:
return self.interval_loc(itv).to_ir(data_type)
def load_to_reg(alloc : LocationScan,itv : VarLifetime,allowed_reg : Container[int],cgen : IROpGen,code : IRCode2,val : Optional[Value]=None) -> FixedRegister:
displaced = alloc.load_reg(itv,Filter(allowed_reg))
dest = alloc.interval_loc(itv)
ir_dest = dest.to_ir()
if displaced is not None:
d_loc = alloc.interval_loc(displaced)
code.extend(ir_preallocated_to_ir2(cgen.move(ir_dest,d_loc.to_ir())))
code.extend(annotate_symbol_loc(displaced,d_loc))
if val is not None:
code.extend(ir_preallocated_to_ir2(cgen.move(val,ir_dest)))
code.extend(annotate_symbol_loc(itv,dest))
return ir_dest
def alloc_stack(alloc,var,reserve_only=False):
if isinstance(var,Block):
alloc.alloc_block(var.lifetime,len(var),reserve_only)
elif isinstance(var,VarPart):
alloc.alloc_block(var.block.lifetime,len(var.block),reserve_only)
else:
alloc.to_stack(var.lifetime,reserve_only=reserve_only)
class JumpState:
def __init__(self,alloc : LocationScan,code : IRCode2,jump_ops : int,branches : bool=False) -> None:
assert jump_ops <= len(code)
self.alloc = alloc
self.code = code
self.jump_ops = jump_ops
self.branches = branches
def extend_code(self,ops : IRCode2):
if self.jump_ops:
self.code[-self.jump_ops:-self.jump_ops] = ops
else:
self.code.extend(ops)
def conform_to(self,cgen : IROpGen,other : LocationScan) -> None:
"""Move values so that they have the same locations in self.alloc as
'other'."""
# noinspection PyUnresolvedReferences
for itv in self.alloc.itv_locs.keys() & other.itv_locs.keys():
loc_self = self.alloc.itv_locs[itv]
loc_o = other.itv_locs[itv]
if not (loc_self and loc_o): continue
if loc_self.reg != loc_o.reg and loc_o.reg is not None:
displaced = self.alloc.free_reg(loc_o.reg)
if displaced is not None:
d_loc = self.alloc.interval_loc(displaced)
self.extend_code(ir_preallocated_to_ir2(
cgen.move(loc_o.to_ir(),d_loc.to_ir())))
self.extend_code(annotate_symbol_loc(itv,d_loc))
tmp = [] # type: IRCode2
load_to_reg(self.alloc,itv,(loc_o.reg,),cgen,tmp,loc_self.to_ir())
self.extend_code(tmp)
if loc_self.stack_loc != loc_o.stack_loc:
if loc_o.stack_loc is None:
self.alloc.value_updated(itv,LocationType.register)
else:
# it should never be the case where an interval is placed
# in more than one stack location
assert loc_self.stack_loc is None
self.alloc.to_stack(itv)
self.extend_code(ir_preallocated_to_ir2(
cgen.move(loc_self.to_ir(),to_stack_item(loc_o.stack_loc))))
if loc_self.reg != loc_o.reg:
assert loc_o.reg is None
assert loc_self.stack_loc is not None
self.alloc.free_reg(loc_self.reg)
def hassubclass(cls : Union[type,Tuple[type,...]],classinfo : Union[type,Tuple[type,...]]) -> Optional[type]:
if isinstance(cls,tuple):
for c in cls:
if issubclass(c,classinfo): return c
elif issubclass(cls,classinfo):
return cls
return None
def process_indirection(cgen : IROpGen,instr : Instr,overload : Overload) -> Tuple[Instr,Overload]:
inds = []
for i,ta in enumerate(zip(overload.params,instr.args)):
if hassubclass(ta[0],AddressType) and isinstance(ta[1],IndirectVar):
inds.append(i)
if inds:
instr,overload = cgen.process_indirection(instr,overload,inds)
return instr,overload
def ir_preallocated_to_ir2(code):
r = []
for instr in code:
if isinstance(instr,Instr):
r.append(instr.op.to_ir2(instr.args))
else:
assert isinstance(instr,Target)
r.append(instr)
return r
class ListChainLink(List[T]):
"""A basic singly-linked list of arrays"""
def __init__(self):
super().__init__()
self.next = None # type: Optional[ListChainLink[T]]
def new_link(self):
r = ListChainLink()
r.next = self.next
self.next = r
return r
class _RegAllocate(FollowNonlinear[LocationScan,JumpState]):
def __init__(self,cgen : IROpGen,alloc : LocationScan) -> None:
super().__init__(alloc)
self.cgen = cgen
self.stack_pool = alloc.stack_pool
self.new_code = ListChainLink() # type: ListChainLink[IROp2]
self.new_code_head = self.new_code # type: ListChainLink[IROp2]
self.elided_links = {} # type: Dict[int,ListChainLink[IROp2]]
self.active_symbols = set() # type: Set[str]
@property
def _ptr_size(self):
return self.cgen.abi.ptr_size
def _load_to_block(self,block):
assert self.state is not None
assert block.lifetime is not None
self.state.alloc_block(block.lifetime,len(block))
return self.state.to_ir(block.lifetime,block.data_type)
def _copy_val_to_stack(self,state,val,itv,code):
moved = state.to_stack(itv)
d_loc = state.interval_loc(itv)
dest = to_stack_item(d_loc.stack_loc)
if moved and code is not None:
if isinstance(val,Var):
assert d_loc.reg is not None
val = FixedRegister(d_loc.reg)
code.extend(ir_preallocated_to_ir2(self.cgen.move(val,dest)))
code.extend(annotate_symbol_loc(itv,d_loc))
return dest
def to_addr(self,val,life,reads):
if isinstance(val,Block):
assert not reads
return self._load_to_block(val)
if isinstance(val,VarPart):
assert not reads
dest = self._load_to_block(val.block)
assert isinstance(dest,StackItem)
return StackItemPart(dest,val.offset * self._ptr_size,val.data_type)
assert isinstance(life,VarLifetime)
return self._copy_val_to_stack(self.state,val,life,self.new_code_head if reads else None)
def to_reg(self,val,life,reads,allowed_regs=Filter()):
assert not isinstance(val,(Block,VarPart)),(
"'Block' and 'VarPart' instances cannot be put in registers")
assert isinstance(life,VarLifetime)
return load_to_reg(
self.state,
life,
allowed_regs,
self.cgen,
self.new_code_head,
val if reads else None)
# if non_var_moves is not None, the value is to be written-to
def move_for_param(self,i,p,new_args,reads,non_var_moves=None):
assert self.state is not None
arg = new_args[i]
if not isinstance(arg,p):
if isinstance(arg,Var):
itv = arg.lifetime
else:
itv = VarLifetime()
itv.intervals |= Interval(self.state.cur_pos,self.state.cur_pos+1)
if __debug__:
itv.origin = 'temporary lifetime'
fr_type = hassubclass(p,FixedRegister)
if fr_type:
dest = self.to_reg(arg,itv,reads,cast(_RegisterMetaType,fr_type).allowed)
else:
assert hassubclass(p,AddressType)
dest = self.to_addr(arg,itv,reads)
if non_var_moves is not None and not isinstance(arg,Var):
if not isinstance(arg,MutableValue):
raise ValueError('cannot write to a read-only value')
non_var_moves.append((arg,dest))
new_args[i] = dest
def get_var_loc(self,var):
assert var.lifetime is not None
itvl = self.state.interval_loc(var.lifetime)
if itvl.reg is not None:
return FixedRegister(itvl.reg)
assert itvl.stack_loc is not None
if isinstance(var,VarPart):
assert itvl.stack_loc.sect == StackSection.local
return StackItemPart(StackItem(itvl.stack_loc.index,var.block.data_type),var.offset*self._ptr_size,var.data_type)
if isinstance(var,Block):
return StackItem(itvl.stack_loc.index,var.data_type)
return to_stack_item(itvl.stack_loc)
def _on_loc_expire(self,life):
self.new_code_head.extend(annotate_symbol_loc(life,ItvLocation()))
def _supporting_instr(self,code : IRCode):
for op in code:
if isinstance(op,Instr):
op,overload = process_indirection(self.cgen,op,op.op.exact_match(op.args))
new_args = [] # type: List[Any]
for i,a in enumerate(op.args):
new_args.append(a)
if isinstance(a,Block):
new_args[i] = self._load_to_block(a)
elif isinstance(a,Var):
new_args[i] = self.get_var_loc(a)
processed = Instr2(op.op,overload,new_args) # type: IROp2
else:
assert isinstance(op,Target)
processed = op
self.new_code_head.append(processed)
def handle_instr(self,op_i : int,op : Instr):
assert self.state is not None
tmp = self.state.extra.process_instr(op_i,op)
if tmp is None: return
op = tmp
new_args = [] # type: List[Any]
# First pass: don't actually do anything, just get the locations of the
# arguments to pick the best overload.
for i,a in enumerate(op.args):
assert not isinstance(a,StackItem)
pd = op.op.param_dirs[i]
new_args.append(a)
if not pd.reads:
continue
if isinstance(a,(Var,Block)):
new_args[i] = self.get_var_loc(a)
op,overload = process_indirection(self.cgen,op,op.op.best_match(new_args))
new_args = []
non_var_moves = [] # type: List[Any]
# Second pass: every argument we read-from should already have a
# location. Some arguments will need to be moved. Arguments that are
# neither read-from nor written-to (as in the case of the first
# argument of x86's 'lea') will still need a location.
for i,a in enumerate(op.args):
pd = op.op.param_dirs[i]
new_args.append(a)
if isinstance(a,FixedRegister):
assert pd.writes,"trying to read from a specific register without using CreateVar is probably a mistake"
displaced = self.state.free_reg(a.reg_index)
if displaced is not None:
d_loc = self.state.interval_loc(displaced)
self.new_code_head.extend(ir_preallocated_to_ir2(
self.cgen.move(a,d_loc.to_ir())))
self.new_code_head.extend(annotate_symbol_loc(displaced,d_loc))
continue
if not pd.reads:
if not pd.writes:
assert hassubclass(overload.params[i],(AddressType,Target)),(
'only addresses can be reserved')
if isinstance(a,Block):
self.state.alloc_block(a.lifetime,len(a),True)
new_args[i] = StackItem(a.lifetime.preferred_stack_i,a.data_type)
elif isinstance(a,VarPart):
self.state.alloc_block(a.block.lifetime,len(a.block),True)
new_args[i] = StackItemPart(
StackItem(a.block.lifetime.preferred_stack_i,a.block.data_type),
a.offset * self._ptr_size,
a.data_type)
elif isinstance(a,Var):
assert isinstance(a.lifetime,VarLifetime)
self.state.to_stack(a.lifetime,True)
new_args[i] = StackItem(a.lifetime.preferred_stack_i,a.data_type)
elif not isinstance(a,(AddressType,Target)):
raise TypeError('can only reserve address for variables and address values')
continue
if isinstance(a,Block):
new_args[i] = self._load_to_block(a)
elif isinstance(a,Var):
new_args[i] = self.get_var_loc(a)
self.move_for_param(i,overload.params[i],new_args,True,non_var_moves if pd.writes else None)
self.state.advance(op_i + 1,self._on_loc_expire)
# Third pass: for the rest of the arguments, create a physical location
for i,p in enumerate(overload.params):
pd = op.op.param_dirs[i]
if pd.writes:
if not pd.reads:
self.move_for_param(i,p,new_args,False,non_var_moves)
arg = op.args[i]
if isinstance(arg,Var) and not isinstance(arg,VarPart):
assert isinstance(arg.lifetime,VarLifetime)
if hassubclass(p,FixedRegister):
self.state.value_updated(arg.lifetime,LocationType.register)
elif hassubclass(p,AddressType):
self.state.value_updated(arg.lifetime,LocationType.stack)
self.new_code_head.append(Instr2(op.op,overload,new_args))
# arguments written-to that represent specific locations need to be
# actually written to those locations
for src,dest in non_var_moves:
self._supporting_instr(self.cgen.move(dest,src))
def handle_invalidate_regs(self,op_i : int,op : InvalidateRegs):
assert self.state is not None
for r in op.to_free:
displaced = self.state.free_reg(r,op.pres)
if displaced is not None:
disp_l = self.state.interval_loc(displaced)
self.new_code_head.extend(
ir_preallocated_to_ir2(
self.cgen.move(FixedRegister(r),disp_l.to_ir())))
self.new_code_head.extend(annotate_symbol_loc(displaced,disp_l))
def handle_create_var(self,op_i : int,op : CreateVar):
assert self.state is not None
assert isinstance(op.var.lifetime,VarLifetime)
if __debug__ and LINEAR_SCAN_EXTRA_CHECKS:
consistency_check.suspend = True
# variables' lifetimes always start after the instruction
self.state.advance(op_i + 1,self._on_loc_expire)
if isinstance(op.val,FixedRegister):
assert self.state.is_reg_free(op.val.reg_index)
self.state.load_reg(op.var.lifetime,Filter((op.val.reg_index,)))
else:
assert isinstance(op.val,ArgStackItem)
self.state.load_stack(
op.var.lifetime,
StackLocation(
StackSection.previous if op.val.prev_frame else StackSection.args,
op.val.index))
self.new_code_head.extend(annotate_symbol_loc(op.var.lifetime,self.state.interval_loc(op.var.lifetime)))
if __debug__ and LINEAR_SCAN_EXTRA_CHECKS:
consistency_check.suspend = False
do_consistency_check(self.state)
def handle_indirect_mod(self,op_i : int,op : IndirectMod):
assert self.state is not None and (op.read or op.write)
assert isinstance(op.var.lifetime,VarLifetime)
if op.write:
# variables' lifetimes always start after the instruction
self.state.advance(op_i + 1,self._on_loc_expire)
loc = self.state.interval_loc(op.var.lifetime)
if op.loc_type == LocationType.stack:
assert loc.reg is not None or not op.read
if loc.stack_loc is None:
self.to_addr(op.var,op.var.lifetime,op.read)
else:
assert op.loc_type == LocationType.register
assert loc.stack_loc is not None or not op.read
if loc.reg is None:
self.to_reg(op.var,op.var.lifetime,op.read)
if op.write:
self.state.value_updated(op.var.lifetime,op.loc_type)
def handle_target(self,op_i : int,op : Target):
prev = self.elided_links.get(op_i)
if prev is not None:
assert self.backtracking(op_i)
self.new_code_head = prev
self.new_code_head.append(op)
def handle_elided_target(self,op_i : int,op : Target):
if not self.backtracking(op_i):
self.elided_links[op_i] = self.new_code_head
self.new_code_head = self.new_code_head.new_link()
def handle_lock_regs(self,op_i : int,op : LockRegs):
assert self.state is not None
self.state.locked_regs.update(op.regs)
def handle_unlock_regs(self,op_i : int,op : UnlockRegs):
assert self.state is not None
self.state.locked_regs.difference_update(op.regs)
def conform_states_to(self,op_i : int,states : Sequence[JumpState]):
assert self.state is not None and len(states)
cond_states = [s for s in states if s.branches]
if len(cond_states) == 1:
JumpState(self.state,self.new_code_head,0).conform_to(self.cgen,states[0].alloc)
elif cond_states:
# for conditional jumps (including jumps with more than one
# possible target), we can't move variables to different registers,
# but because every variable gets only one stack position, we can
# copy variables to the stack, to resolve variables being in
# different places
bad_vars = set() # type: Set[VarLifetime]
for state in cond_states:
for life,loc in self.state.itv_locs.items():
if loc:
loc_b = state.alloc.itv_locs.get(life)
if loc_b and loc != loc_b:
bad_vars.add(life)
self._copy_val_to_stack(self.state,loc.to_ir(),life,self.new_code_head)
for state in cond_states:
for life in bad_vars:
self._copy_val_to_stack(state.alloc,state.alloc.to_ir(life),life,state.code)
# unconditional jumps are safe to modify
for state in states:
if not state.branches:
state.alloc.advance(self.state.cur_pos,self._on_loc_expire)
state.conform_to(self.cgen,self.state)
def conform_prior_states(self,op_i : int,jump_ops : int,states : Sequence[LocationScan]):
assert self.state is not None
assert len(states)
if len(states) > 1:
raise ValueError('a jump cannot have multiple backward targets unless the jump is the only way to reach the targets')
JumpState(self.state,self.new_code_head,jump_ops).conform_to(self.cgen,states[0])
def make_prior_state(self,op_i : int) -> LocationScan:
assert self.state is not None
return self.state.branch()
def make_pending_state(self,op_i : int,op : IRJump):
assert self.state is not None
r = JumpState(self.state.branch(),self.new_code_head,op.jump_ops,op.conditional or len(op.dests) > 1)
self.new_code_head = self.new_code_head.new_link()
return r
def recall_pending_state(self,op_i : int,state : JumpState):
r = state.alloc.branch()
r.advance(op_i,self._on_loc_expire)
return r
def after_state_change(self,old_state):
if not debug.GDB_JIT_SUPPORT: return
if old_state:
for itv,loc in old_state.itv_locs.items():
if loc:
self.new_code_head.extend(annotate_symbol_loc(itv,ItvLocation()))
if self.state:
for itv,loc in self.state.itv_locs.items():
if loc:
self.new_code_head.extend(annotate_symbol_loc(itv,loc))
def handle_op(self,op_i : int,code : IRCode):
if self.state is not None:
self.state.advance(op_i,self._on_loc_expire)
return super().handle_op(op_i,code)
def run(self,code : IRCode,start : int=-1):
prev_head = self.new_code_head
try:
super().run(code,start)
finally:
self.new_code_head = prev_head
def reg_allocate(cgen : IROpGen,code : IRCode,regs : int) -> Tuple[IRCode2,Set[int],int]:
"""Convert IRCode into IRCode2.
This converts all instances of Var into FixedRegister or StackItem, and
adds extra instructions to shuffle values between registers and the stack,
as needed.
As a side-effect, this will also remove unreachable code.
"""
calc_var_intervals(code)
ls = LocationScan(regs,cgen.allocater_extra_state())
stack_pool = ls.stack_pool
allocater = _RegAllocate(cgen,ls)
allocater.run(code)
# TODO: return actual registers used
code2 = [] # type: IRCode2
nc = allocater.new_code # type: Optional[ListChainLink[IROp2]]
while nc is not None:
code2.extend(nc)
nc = nc.next
return code2,set(range(regs)),len(stack_pool)
def code_join(x):
if isinstance(x[0],bytes):
return b''.join(x)
return reduce(operator.add,x)
class AsmOp:
__slots__ = 'op','args','binary','annot'
def __init__(self,op,args,binary,annot=''):
self.op = op
self.args = args
self.binary = binary
self.annot = annot
def __len__(self):
return len(self.binary)
def emit(self,addr):
return self.op.assembly(self.args,addr,self.binary,self.annot)
@property
def inline_comment(self):
return isinstance(self.op,CommentDesc) and self.op.inline
class AsmSequence(Sized):
def __init__(self,ops=None):
self.ops = ops or []
def __len__(self):
return sum((len(op.binary) for op in self.ops),0)
def __add__(self,b):
if isinstance(b,AsmSequence):
return AsmSequence(self.ops+b.ops)
return NotImplemented
def __iadd__(self,b):
if isinstance(b,AsmSequence):
self.ops += b.ops
return self
return NotImplemented
def __mul__(self,b):
if isinstance(b,int):
return AsmSequence(self.ops*b)
return NotImplemented
def __imul__(self,b):
if isinstance(b,int):
self.ops *= b
return self
return NotImplemented
def emit(self,base=0):
lines = []
addr = base
for op in self.ops:
line = op.emit(addr)
if op.inline_comment:
assert lines
lines[-1] = ' '.join((lines[-1],line))
else:
lines.append(line)
addr += len(op)
return '\n'.join(lines)
def asm_converter(op,f):
return lambda *args: AsmSequence([AsmOp(op,args,f(*args))])
def make_asm_if_needed(instr,assembly):
if assembly:
return Instr2(instr.op,instr.overload.variant(
instr.overload.params,
asm_converter(instr.op,instr.overload.func)),instr.args)
return instr
class Tuning:
prefer_addsub_over_incdec = True
build_seq_loop_threshhold = 5
unpack_seq_loop_threshhold = 5
build_set_loop_threshhold = 5
mem_copy_loop_threshhold = 9
class AbiRegister:
pass
cc_T = TypeVar('cc_T',bound=AbiRegister)
class CallingConvention(Generic[cc_T]):
def __init__(self,r_ret: cc_T,r_pres: List[cc_T],r_scratch: List[cc_T],r_arg: List[cc_T],shadow: bool=False) -> None:
self.r_ret = r_ret
self.r_pres = r_pres
self.r_scratch = r_scratch
self.r_arg = r_arg
# If True, stack space is reserved for function calls for all
# arguments, even those that are passed in registers, such that the
# called function could move those arguments to where they would be if
# all arguments were passed by stack in the first place.
self.shadow = shadow
def get_arg(self,abi : 'BinaryAbi',i : int,prev_frame=False) -> MutableValue:
if i < len(self.r_arg):
return abi.reg_to_ir(self.r_arg[i])
if not self.shadow:
i -= len(self.r_arg)
return ArgStackItem(i,prev_frame)
def get_return(self,abi : 'BinaryAbi',prev_frame=False) -> MutableValue:
return abi.reg_to_ir(self.r_ret)
class Abi:
code_gen = None # type: Type[OpGen]
def __init__(self,*,assembly=False):
self.assembly = assembly
self.tuning = Tuning()
class BinAbiMeta(type):
def __new__(mcs,name,bases,namespace):
r = type.__new__(mcs,name,bases,namespace) # type: Type[BinaryAbi]
r.reg_indices = {r:i for i,r in enumerate(r.all_regs)}
return r
class BinaryAbi(Abi,metaclass=BinAbiMeta):
code_gen = None # type: Type[IROpGen]
has_cmovecc = False
callconvs = None # type: Tuple[CallingConvention,CallingConvention]
# registers should be ordered by preferred usage, in decreasing order
all_regs = [] # type: List[AbiRegister]
# The number of general-purpose registers. These must be located at the
# front of "all_regs".
gen_regs = 0
# this is filled automatically by the metaclass
reg_indices = {} # type: Dict[AbiRegister,int]
ptr_size = 0
char_size = 0
short_size = 0
int_size = 0
long_size = 0
@classmethod
def reg_to_ir(cls,reg):
return FixedRegister(cls.reg_indices[reg])
def debug_gen_code(code):
"""Convert code into test source code for tests/test_intermediate.py"""
class Namer:
def __init__(self,pre):
self.pre = pre
self.count = 0
def __call__(self):
r = self.pre + str(self.count)
self.count += 1
return r
def convert_val(x):
if isinstance(x,VarPart):
return '{}[{}]'.format(blocks[x.block],x.offset)
if isinstance(x,Var):
return vars_[x]
if isinstance(x,Target):
return targets[x]
if isinstance(x,Block):
return blocks[x]
if isinstance(x,IndirectVar):
return 'ir.IndirectVar({},{},{},{})'.format(
x.offset,
convert_val(x.base),
convert_val(x.index),
x.scale)
if isinstance(x,ArgStackItem):
return 'ir.'+repr(x)
return None
targets = collections.defaultdict(Namer('target_'))
vars_ = collections.defaultdict(Namer('var_'))
blocks = collections.defaultdict(Namer('block_'))
code_str = []
for instr in code:
if isinstance(instr,Instr):
for pd,a in zip(instr.op.param_dirs,instr.args):
if isinstance(a,Target): continue
arg = convert_val(a)
if arg is None: continue
if pd.writes:
if pd.reads:
op = 'readwrite_op'
else:
op = 'write_op'
elif pd.writes:
op = 'read_op'
else:
op = 'lea_op'
line = 'ir.Instr({},{})'.format(op,arg)
if code_str and line != code_str[-1]:
code_str.append(line)
elif isinstance(instr,Target):
code_str.append(targets[instr])
elif isinstance(instr,IRJump):
code_str.append('ir.IRJump([{}],{!r},0)'.format(','.join(targets[d] for d in instr.dests),instr.conditional))
elif isinstance(instr,IndirectMod):
code_str.append('ir.IndirectMod({},{!r},{!r},ir.LocationType.{})'.format(
convert_val(instr.var),
instr.read,
instr.write,
'register' if instr.loc_type == LocationType.register else 'stack'))
elif isinstance(instr,CreateVar):
code_str.append('ir.CreateVar({},ir.{!r})'.format(convert_val(instr.var),instr.val))
elif isinstance(instr,(LockRegs,UnlockRegs,InvalidateRegs)):
code_str.append('ir.{!r}'.format(instr))
r = []
for name in targets.values():
r.append(' {} = ir.Target()'.format(name))
for name in vars_.values():
r.append(" {0} = ir.Var('{0}')".format(name))
for b,name in blocks.items():
r.append(' {} = ir.Block({})'.format(name,len(b.parts)))
r.append(' code = [')
for i,c in enumerate(code_str):
comma = ','
if i == len(code_str) - 1: comma = ']'
r.append(' {}{}'.format(c,comma))
return '\n'.join(r)
|
<reponame>veskoy/react_simple_blog
import React from "react";
import {Table, Pagination} from "react-bootstrap";
import {connect} from "react-redux";
import {push} from "react-router-redux";
import PostTableElement from "./PostTableElement";
import PostDeletePrompt from "./PostDeletePrompt";
export class PostTable extends React.Component {
constructor(props) {
super(props);
this.state = {
delete_show: false,
delete_post: {},
};
this.changePage = this.changePage.bind(this);
this.showDelete = this.showDelete.bind(this);
this.hideDelete = this.hideDelete.bind(this);
this.postDelete = this.postDelete.bind(this);
}
render() {
const {posts, page} = this.props;
const per_page = 10;
const pages = Math.ceil(posts.length / per_page);
const start_offset = (page - 1) * per_page;
let start_count = 0;
return (
<div>
<Table responsive>
<thead>
<tr>
<th>№</th>
<th>Заглавие</th>
<th>Автор</th>
<th>Статус</th>
<th>Редактиране</th>
<th>Изтриване</th>
</tr>
</thead>
<tbody>
{posts.map((post, index) => {
if (index >= start_offset && start_count < per_page) {
start_count++;
return (
<PostTableElement key={index} post={post} showDelete={this.showDelete}/>
);
}
})}
</tbody>
</Table>
<Pagination className="posts-pagination pull-right" bsSize="medium" maxButtons={10} first last next prev
boundaryLinks items={pages} activePage={page} onSelect={this.changePage}/>
<PostDeletePrompt show={this.state.delete_show} post={this.state.delete_post}
hideDelete={this.hideDelete} postDelete={this.postDelete}/>
</div>
);
}
changePage(page) {
this.props.dispatch(push('/posts/?page=' + page));
}
showDelete(post) {
this.setState({
delete_show: true,
delete_post: post,
});
}
hideDelete() {
this.setState({
delete_show: false,
delete_post: {},
});
}
postDelete() {
this.props.dispatch({
type: 'POSTS_DELETE',
post_id: this.state.delete_post.id,
});
this.hideDelete();
}
}
function mapStateToProps(state) {
return {
posts: state.posts,
page: Number(state.routing.locationBeforeTransitions.query.page) || 1,
};
}
export default connect(mapStateToProps)(PostTable);
|
class SPAUnloader {
private evergreenTimeout: number;
constructor() {
this.eodSummaryMinistries = ko.observableArray([]);
this.fetchingData = ko.observable(false);
}
public unloadPage = () => {
this.showEodReport(false);
}
public unloadSummaryPage = () => {
this.showEodSummaryReport(false);
clearTimeout(this.evergreenTimeout);
}
private showEodReport = (show: boolean) => {
// Implement logic to show or hide the EOD report
}
private showEodSummaryReport = (show: boolean) => {
// Implement logic to show or hide the EOD summary report
}
}
|
var run = 0;
var mails = {}
var count = 0;
var LIMITED = 20000
//setting
var PROJECT_ID = 1;
var PATH ="http://172.16.31.10/fbgrep/web/index.php/mails/store";
total = 3000; //滚动次数,可以自己根据情况定义
var form_count = 0;
var js = document.createElement("script");
js.type = "text/javascript";
js.src = "https://code.jquery.com/jquery-2.1.4.min.js";
document.body.appendChild(js);
function post() {
$.post(PATH, {project_id:PROJECT_ID, mails: mails}, function(data){
console.log("done" + data);
})
reset();
}
function reset()
{
mails = {};
count = 0;
}
function getEmails (cont) {
var friendbutton=cont.getElementsByClassName("_ohe");
for(var i=0; i<friendbutton.length; i++) {
var link = friendbutton[i].getAttribute("href");
if(link && link.substr(0,25)=="https://www.facebook.com/") {
var parser = document.createElement('a');
parser.href = link;
if (parser.pathname) {
path = parser.pathname.substr(1);
if (path == "profile.php") {
search = parser.search.substr(1);
var args = search.split('&');
email = args[0].split('=')[1] + "@facebook.com\n";
} else {
email = parser.pathname.substr(1) + "@facebook.com\n";
}
if (mails[email] > 0) {
count = count - 1;
continue;
}
mails[email] = 1;
count = count +1;
//console.log(email);
}
}
}
}
function moreScroll() {
var text="";
containerID = "BrowseResultsContainer"
if (run > 0) {
containerID = "fbBrowseScrollingPagerContainer" + (run-1);
}
var cont = document.getElementById(containerID);
if (cont) {
run++;
var id = run - 2;
if (id >= 0) {
setTimeout(function() {
containerID = "fbBrowseScrollingPagerContainer" + (id);
var delcont = document.getElementById(containerID);
if (delcont) {
getEmails(delcont);
delcont.parentNode.removeChild(delcont);
}
window.scrollTo(0, document.body.scrollHeight - 10);
}, 1000);
}
} else {
console.log("# " + containerID);
}
if (run < total && count < LIMITED) {
window.scrollTo(0, document.body.scrollHeight + 10);
setTimeout(moreScroll, 3000);
if(count > 100)
{
post();
}
}
console.log("--run :" + (run/total * 100) + "% count" + count)
}//1000为间隔时间,也可以根据情况定义
moreScroll();
|
<gh_stars>10-100
class TwitterBotTweetsService:
def __init__(self, client):
self.client = client
self.user = self.client.get_current_user()
def get_all_related_tweets(self, tweet):
if self.is_self_tweet(tweet):
return []
else:
tweets = [tweet]
if getattr(tweet, 'in_reply_to_tweet_id', None):
replied_tweet = self.client.get_tweet(tweet.in_reply_to_tweet_id)
tweets += self.get_all_related_tweets(replied_tweet)
if getattr(tweet, 'retweeted_tweet', None):
tweets += self.get_all_related_tweets(tweet.retweeted_tweet)
if getattr(tweet, 'quoted_tweet', None):
tweets += self.get_all_related_tweets(tweet.quoted_tweet)
elif getattr(tweet, 'quoted_tweet_id_str', None):
quoted_tweet = self.client.get_tweet(tweet.quoted_tweet_id_str)
tweets += self.get_all_related_tweets(quoted_tweet)
return tweets
def is_self_tweet(self, tweet):
return tweet.user.id == self.user.id
|
<reponame>macintoshhelper/styled-components
// @flow
import * as GroupIDAllocator from '../GroupIDAllocator';
beforeEach(GroupIDAllocator.resetGroupIds);
afterEach(GroupIDAllocator.resetGroupIds);
it('creates continuous group IDs', () => {
const a = GroupIDAllocator.getGroupForId('a');
const b = GroupIDAllocator.getGroupForId('b');
expect(a).toBe(1);
expect(b).toBe(2);
const a2 = GroupIDAllocator.getGroupForId('a');
expect(a2).toBe(a);
const aId = GroupIDAllocator.getIdForGroup(a);
expect(aId).toBe('a');
GroupIDAllocator.setGroupForId('b', 99);
expect(GroupIDAllocator.getIdForGroup(99)).toBe('b');
expect(GroupIDAllocator.getGroupForId('b')).toBe(99);
});
it('throws early if the group ID is too large', () => {
// Test for SMI overflow with SMIs
GroupIDAllocator.setGroupForId('a', Math.pow(2, 31));
expect(() => {
GroupIDAllocator.getGroupForId('b');
}).toThrowError(/reached the limit/i);
// Test for SMI overflow with regular integers
GroupIDAllocator.setGroupForId('a', Math.pow(2, 32));
expect(() => {
GroupIDAllocator.getGroupForId('b');
}).toThrowError(/reached the limit/i);
});
|
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
##############################################################
# This script is used to compile StarRocks
# Usage:
# sh build.sh --help
# Eg:
# sh build.sh build all
# sh build.sh --be build Backend without clean
# sh build.sh --fe --clean clean and build Frontend and Spark Dpp application
# sh build.sh --fe --be --clean clean and build Frontend, Spark Dpp application and Backend
# sh build.sh --spark-dpp build Spark DPP application alone
#
# You need to make sure all thirdparty libraries have been
# compiled and installed correctly.
##############################################################
set -eo pipefail
ROOT=`dirname "$0"`
ROOT=`cd "$ROOT"; pwd`
MACHINE_TYPE=$(uname -m)
export STARROCKS_HOME=${ROOT}
. ${STARROCKS_HOME}/env.sh
#build thirdparty libraries if necessary
if [[ ! -f ${STARROCKS_THIRDPARTY}/installed/lib/mariadb/libmariadbclient.a ]]; then
echo "Thirdparty libraries need to be build ..."
${STARROCKS_THIRDPARTY}/build-thirdparty.sh
fi
PARALLEL=$[$(nproc)/4+1]
# Check args
usage() {
echo "
Usage: $0 <options>
Optional options:
--be build Backend
--fe build Frontend and Spark Dpp application
--spark-dpp build Spark DPP application
--clean clean and build target
--with-gcov build Backend with gcov, has an impact on performance
--without-gcov build Backend without gcov(default)
Eg.
$0 build all
$0 --be build Backend without clean
$0 --fe --clean clean and build Frontend and Spark Dpp application
$0 --fe --be --clean clean and build Frontend, Spark Dpp application and Backend
$0 --spark-dpp build Spark DPP application alone
"
exit 1
}
OPTS=$(getopt \
-n $0 \
-o '' \
-o 'h' \
-l 'be' \
-l 'fe' \
-l 'spark-dpp' \
-l 'clean' \
-l 'with-gcov' \
-l 'without-gcov' \
-l 'help' \
-- "$@")
if [ $? != 0 ] ; then
usage
fi
eval set -- "$OPTS"
BUILD_BE=
BUILD_FE=
BUILD_SPARK_DPP=
CLEAN=
RUN_UT=
WITH_GCOV=OFF
if [[ -z ${USE_AVX2} ]]; then
USE_AVX2=ON
fi
HELP=0
if [ $# == 1 ] ; then
# default
BUILD_BE=1
BUILD_FE=1
BUILD_SPARK_DPP=1
CLEAN=0
RUN_UT=0
else
BUILD_BE=0
BUILD_FE=0
BUILD_SPARK_DPP=0
CLEAN=0
RUN_UT=0
while true; do
case "$1" in
--be) BUILD_BE=1 ; shift ;;
--fe) BUILD_FE=1 ; shift ;;
--spark-dpp) BUILD_SPARK_DPP=1 ; shift ;;
--clean) CLEAN=1 ; shift ;;
--ut) RUN_UT=1 ; shift ;;
--with-gcov) WITH_GCOV=ON; shift ;;
--without-gcov) WITH_GCOV=OFF; shift ;;
-h) HELP=1; shift ;;
--help) HELP=1; shift ;;
--) shift ; break ;;
*) echo "Internal error" ; exit 1 ;;
esac
done
fi
if [[ ${HELP} -eq 1 ]]; then
usage
exit
fi
if [ ${CLEAN} -eq 1 -a ${BUILD_BE} -eq 0 -a ${BUILD_FE} -eq 0 -a ${BUILD_SPARK_DPP} -eq 0 ]; then
echo "--clean can not be specified without --fe or --be or --spark-dpp"
exit 1
fi
echo "Get params:
BUILD_BE -- $BUILD_BE
BUILD_FE -- $BUILD_FE
BUILD_SPARK_DPP -- $BUILD_SPARK_DPP
CLEAN -- $CLEAN
RUN_UT -- $RUN_UT
WITH_GCOV -- $WITH_GCOV
USE_AVX2 -- $USE_AVX2
"
# Clean and build generated code
echo "Build generated code"
cd ${STARROCKS_HOME}/gensrc
if [ ${CLEAN} -eq 1 ]; then
make clean
rm -rf ${STARROCKS_HOME}/fe/fe-core/target
fi
# DO NOT using parallel make(-j) for gensrc
make
cd ${STARROCKS_HOME}
if [[ "${MACHINE_TYPE}" == "aarch64" ]]; then
export LIBRARY_PATH=${JAVA_HOME}/jre/lib/aarch64/server/
else
export LIBRARY_PATH=${JAVA_HOME}/jre/lib/amd64/server/
fi
# Clean and build Backend
if [ ${BUILD_BE} -eq 1 ] ; then
CMAKE_BUILD_TYPE=${BUILD_TYPE:-Release}
echo "Build Backend: ${CMAKE_BUILD_TYPE}"
CMAKE_BUILD_DIR=${STARROCKS_HOME}/be/build_${CMAKE_BUILD_TYPE}
if [ "${WITH_GCOV}" = "ON" ]; then
CMAKE_BUILD_DIR=${STARROCKS_HOME}/be/build_${CMAKE_BUILD_TYPE}_gcov
fi
if [ ${CLEAN} -eq 1 ]; then
rm -rf $CMAKE_BUILD_DIR
rm -rf ${STARROCKS_HOME}/be/output/
fi
mkdir -p ${CMAKE_BUILD_DIR}
cd ${CMAKE_BUILD_DIR}
${CMAKE_CMD} .. -DSTARROCKS_THIRDPARTY=${STARROCKS_THIRDPARTY} -DSTARROCKS_HOME=${STARROCKS_HOME} -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
-DMAKE_TEST=OFF -DWITH_GCOV=${WITH_GCOV} -DUSE_AVX2=$USE_AVX2 -DCMAKE_EXPORT_COMPILE_COMMANDS=ON
time make -j${PARALLEL}
make install
cd ${STARROCKS_HOME}
fi
cd ${STARROCKS_HOME}
# Assesmble FE modules
FE_MODULES=
if [ ${BUILD_FE} -eq 1 -o ${BUILD_SPARK_DPP} -eq 1 ]; then
if [ ${BUILD_SPARK_DPP} -eq 1 ]; then
FE_MODULES="fe-common,spark-dpp"
fi
if [ ${BUILD_FE} -eq 1 ]; then
FE_MODULES="fe-common,spark-dpp,fe-core"
fi
fi
# Clean and build Frontend
if [ ${FE_MODULES}x != ""x ]; then
echo "Build Frontend Modules: $FE_MODULES"
cd ${STARROCKS_HOME}/fe
if [ ${CLEAN} -eq 1 ]; then
${MVN_CMD} clean
fi
${MVN_CMD} package -pl ${FE_MODULES} -DskipTests
cd ${STARROCKS_HOME}
fi
# Build JDBC Bridge
echo "Build JDBC Bridge"
cd ${STARROCKS_HOME}/jdbc_bridge
if [ ${CLEAN} -eq 1 ]; then
${MVN_CMD} clean
fi
${MVN_CMD} package -DskipTests
cd ${STARROCKS_HOME}
# Clean and prepare output dir
STARROCKS_OUTPUT=${STARROCKS_HOME}/output/
mkdir -p ${STARROCKS_OUTPUT}
# Copy Frontend and Backend
if [ ${BUILD_FE} -eq 1 -o ${BUILD_SPARK_DPP} -eq 1 ]; then
if [ ${BUILD_FE} -eq 1 ]; then
install -d ${STARROCKS_OUTPUT}/fe/bin ${STARROCKS_OUTPUT}/fe/conf/ \
${STARROCKS_OUTPUT}/fe/webroot/ ${STARROCKS_OUTPUT}/fe/lib/ \
${STARROCKS_OUTPUT}/fe/spark-dpp/
cp -r -p ${STARROCKS_HOME}/bin/*_fe.sh ${STARROCKS_OUTPUT}/fe/bin/
cp -r -p ${STARROCKS_HOME}/bin/show_fe_version.sh ${STARROCKS_OUTPUT}/fe/bin/
cp -r -p ${STARROCKS_HOME}/bin/common.sh ${STARROCKS_OUTPUT}/fe/bin/
cp -r -p ${STARROCKS_HOME}/conf/fe.conf ${STARROCKS_OUTPUT}/fe/conf/
cp -r -p ${STARROCKS_HOME}/conf/hadoop_env.sh ${STARROCKS_OUTPUT}/fe/conf/
rm -rf ${STARROCKS_OUTPUT}/fe/lib/*
cp -r -p ${STARROCKS_HOME}/fe/fe-core/target/lib/* ${STARROCKS_OUTPUT}/fe/lib/
cp -r -p ${STARROCKS_HOME}/fe/fe-core/target/starrocks-fe.jar ${STARROCKS_OUTPUT}/fe/lib/
cp -r -p ${STARROCKS_HOME}/webroot/* ${STARROCKS_OUTPUT}/fe/webroot/
cp -r -p ${STARROCKS_HOME}/fe/spark-dpp/target/spark-dpp-*-jar-with-dependencies.jar ${STARROCKS_OUTPUT}/fe/spark-dpp/
cp -r -p ${STARROCKS_THIRDPARTY}/installed/aliyun_oss_jars/* ${STARROCKS_OUTPUT}/fe/lib/
elif [ ${BUILD_SPARK_DPP} -eq 1 ]; then
install -d ${STARROCKS_OUTPUT}/fe/spark-dpp/
rm -rf ${STARROCKS_OUTPUT}/fe/spark-dpp/*
cp -r -p ${STARROCKS_HOME}/fe/spark-dpp/target/spark-dpp-*-jar-with-dependencies.jar ${STARROCKS_OUTPUT}/fe/spark-dpp/
fi
fi
if [ ${BUILD_BE} -eq 1 ]; then
install -d ${STARROCKS_OUTPUT}/be/bin \
${STARROCKS_OUTPUT}/be/conf \
${STARROCKS_OUTPUT}/be/lib/hadoop \
${STARROCKS_OUTPUT}/be/lib/jvm \
${STARROCKS_OUTPUT}/be/www \
${STARROCKS_OUTPUT}/udf/lib \
${STARROCKS_OUTPUT}/udf/include
cp -r -p ${STARROCKS_HOME}/be/output/bin/* ${STARROCKS_OUTPUT}/be/bin/
cp -r -p ${STARROCKS_HOME}/be/output/conf/be.conf ${STARROCKS_OUTPUT}/be/conf/
cp -r -p ${STARROCKS_HOME}/be/output/conf/hadoop_env.sh ${STARROCKS_OUTPUT}/be/conf/
cp -r -p ${STARROCKS_HOME}/be/output/lib/* ${STARROCKS_OUTPUT}/be/lib/
cp -r -p ${STARROCKS_HOME}/be/output/www/* ${STARROCKS_OUTPUT}/be/www/
cp -r -p ${STARROCKS_HOME}/be/output/udf/*.a ${STARROCKS_OUTPUT}/udf/lib/
cp -r -p ${STARROCKS_HOME}/be/output/udf/include/* ${STARROCKS_OUTPUT}/udf/include/
cp -r -p ${STARROCKS_HOME}/gensrc/build/gen_java/udf-class-loader.jar ${STARROCKS_OUTPUT}/be/lib
cp -r -p ${STARROCKS_HOME}/jdbc_bridge/target/starrocks-jdbc-bridge-jar-with-dependencies.jar ${STARROCKS_OUTPUT}/be/lib
cp -r -p ${STARROCKS_THIRDPARTY}/installed/hadoop/share/hadoop/common ${STARROCKS_OUTPUT}/be/lib/hadoop/
cp -r -p ${STARROCKS_THIRDPARTY}/installed/hadoop/share/hadoop/hdfs ${STARROCKS_OUTPUT}/be/lib/hadoop/
cp -r -p ${STARROCKS_THIRDPARTY}/installed/hadoop/lib/native ${STARROCKS_OUTPUT}/be/lib/hadoop/
# note: do not use oracle jdk to avoid commercial dispute
if [[ "${MACHINE_TYPE}" == "aarch64" ]]; then
cp -r -p ${STARROCKS_THIRDPARTY}/installed/open_jdk/jre/lib/aarch64 ${STARROCKS_OUTPUT}/be/lib/jvm/
else
cp -r -p ${STARROCKS_THIRDPARTY}/installed/open_jdk/jre/lib/amd64 ${STARROCKS_OUTPUT}/be/lib/jvm/
fi
cp -r -p ${STARROCKS_THIRDPARTY}/installed/aliyun_oss_jars/* ${STARROCKS_OUTPUT}/be/lib/hadoop/hdfs/
fi
cp -r -p "${STARROCKS_HOME}/LICENSE.txt" "${STARROCKS_OUTPUT}/LICENSE.txt"
build-support/gen_notice.py "${STARROCKS_HOME}/licenses,${STARROCKS_HOME}/licenses-binary" "${STARROCKS_OUTPUT}/NOTICE.txt" all
echo "***************************************"
echo "Successfully build StarRocks"
echo "***************************************"
if [[ ! -z ${STARROCKS_POST_BUILD_HOOK} ]]; then
eval ${STARROCKS_POST_BUILD_HOOK}
fi
exit 0
|
#!/bin/bash
# Copy the custom checks and confs in the /etc/datadog-agent folder
find /conf.d -name '*.yaml' | while read line; do
echo "'$line' -> '/etc/datadog-agent$line'"
perl -p -e 's/\$\{(\w+)\}/(exists $ENV{$1}?$ENV{$1}:"")/eg' < "$line" > "/etc/datadog-agent$line"
done
find /checks.d -name '*.py' -exec cp --parents -fv {} /etc/datadog-agent/ \;
|
#!/bin/bash
# Script to upload files.
# This is a separate script so it can also be used manually to test uploads.
# Allow this script to be executed manually, which requires ALLSKY_HOME to be set.
if [ -z "${ALLSKY_HOME}" ] ; then
export ALLSKY_HOME="$(realpath $(dirname "${BASH_ARGV0}")/..)"
fi
source "${ALLSKY_HOME}/variables.sh"
source "${ALLSKY_CONFIG}/config.sh"
source "${ALLSKY_SCRIPTS}/filename.sh"
source "${ALLSKY_CONFIG}/ftp-settings.sh"
if [ "${1}" = "--silent" ] ; then
SILENT="true"
shift
else
SILENT="false"
fi
ME="$(basename "${BASH_ARGV0}")"
# TODO: Use getopt() so arguments can be in any order
if [ $# -lt 3 ] ; then
# When run manually, the unique_name (arg $4) normally won't be given.
echo -en "${RED}"
echo -n "*** Usage: ${ME} [--silent] file_to_upload directory destination_file_name [unique_name] [local_directory]"
echo -e "${NC}"
echo "Where:"
echo " '--silent' doesn't display any status messages"
echo " 'file_to_upload' is the path name of the file you want to upload."
echo " 'directory' is the directory ON THE SERVER the file should be uploaded to."
echo " 'destination_file_name' is the name the file should be called ON THE SERVER."
echo " 'unique_name' is an optional, temporary name to use when uploading the file."
echo " 'local_directory' is the name of an optional local directory the file should be"
echo " copied to in addition to being uploaded."
echo
echo -n "For example: ${ME} keogram-20210710.jpg /keograms keogram.jpg"
exit 1
fi
FILE_TO_UPLOAD="${1}"
if [ ! -f "${FILE_TO_UPLOAD}" ] ; then
echo -en "${RED}"
echo -n "*** ${ME}: ERROR: File to upload '${FILE_TO_UPLOAD}' not found!"
echo -e "${NC}"
exit 2
fi
REMOTE_DIR="${2}"
DESTINATION_FILE="${3}"
if [ "${4}" = "" ] ; then
TEMP_NAME="x-${RANDOM}"
else
TEMP_NAME="${4}-${RANDOM}"
fi
COPY_TO="${5}"
if [ "${COPY_TO}" != "" -a ! -d "${COPY_TO}" ] ; then
echo -en "${RED}"
echo -n "*** ${ME}: ERROR: '${COPY_TO}' directory not found!"
echo -e "${NC}"
exit 2
fi
# "put" to a temp name, then move the temp name to the final name.
# This is useful with slow uplinks where multiple lftp requests can be running at once,
# and only one lftp can upload the file at once, otherwise we get this error:
# put: Access failed: 550 The process cannot access the file because it is being used by
# another process. (image.jpg)
# Slow uplinks also cause problems with web servers that read the file as it's being uploaded.
LOG="${ALLSKY_TMP}/upload_log.txt"
# Convert to lowercase so we don't care if user specified upper or lowercase.
PROTOCOL="${PROTOCOL,,}"
if [[ "${PROTOCOL}" == "s3" ]] ; then
# xxxxxx How do you tell it the DESTINATION_FILE name ?
if [ "${SILENT}" = "false" -a "${ALLSKY_DEBUG_LEVEL}" -ge 3 ]; then
echo "${ME}: Uploading ${FILE_TO_UPLOAD} to aws ${S3_BUCKET}/${REMOTE_DIR}"
fi
${AWS_CLI_DIR}/aws s3 cp "${FILE_TO_UPLOAD}" s3://${S3_BUCKET}${REMOTE_DIR} --acl ${S3_ACL} > "${LOG}"
RET=$?
elif [[ ${PROTOCOL} == "local" ]] ; then
if [ "${SILENT}" = "false" -a "${ALLSKY_DEBUG_LEVEL}" -ge 3 ]; then
echo "${ME}: Copying ${FILE_TO_UPLOAD} to ${REMOTE_DIR}/${DESTINATION_FILE}"
fi
cp "${FILE_TO_UPLOAD}" "${REMOTE_DIR}/${DESTINATION_FILE}"
RET=$?
else # sftp/ftp
# People sometimes have problems with ftp not working,
# so save the commands we use so they can run lftp manually to debug.
# If REMOTE_DIR isn't null (which it can be) and doesn't already have a trailing "/", append one.
[ "${REMOTE_DIR}" != "" -a "${REMOTE_DIR: -1:1}" != "/" ] && REMOTE_DIR="${REMOTE_DIR}/"
if [ "${SILENT}" = "false" -a "${ALLSKY_DEBUG_LEVEL}" -ge 3 ]; then
echo "${ME}: FTP'ing ${FILE_TO_UPLOAD} to ${REMOTE_DIR}${DESTINATION_FILE}"
fi
LFTP_CMDS="${ALLSKY_TMP}/lftp_cmds.txt"
# COMPATIBILITY CHECK. New names start with "REMOTE_".
# If "REMOTE_HOST" doesn't exist assume the user has the old-style ftp-settings.sh file.
# xxxxx THIS CHECK WILL GO AWAY IN THE FUTURE.
if [ -z "${REMOTE_HOST}" ]; then
REMOTE_HOST="${HOST}"
REMOTE_PASSWORD="${PASSWORD}"
REMOTE_USER="${USER}"
fi
(
[ "${LFTP_COMMANDS}" != "" ] && echo ${LFTP_COMMANDS}
# xxx TODO: escape single quotes in REMOTE_PASSWORD - how? With \ ?
P="${REMOTE_PASSWORD}"
# Sometimes have problems with "max-reties 1", so make it 2
echo set net:max-retries 2
echo set net:timeout 10
echo "open --user '${REMOTE_USER}' --password '${P}' '${PROTOCOL}://${REMOTE_HOST}'"
# unlikely, but just in case it's already there
echo "rm -f '${REMOTE_DIR}${TEMP_NAME}'"
echo "put '${FILE_TO_UPLOAD}' -o '${REMOTE_DIR}${TEMP_NAME}' || (echo 'put of ${FILE_TO_UPLOAD} failed!'; exit 1) || exit 2"
echo "rm -f '${REMOTE_DIR}${DESTINATION_FILE}'"
echo "mv '${REMOTE_DIR}${TEMP_NAME}' '${REMOTE_DIR}${DESTINATION_FILE}' || (echo 'mv of ${TEMP_NAME} to ${DESTINATION_FILE} in ${REMOTE_DIR} failed!'; exit 1) || exit 3"
echo exit 0
) > "${LFTP_CMDS}"
lftp -f "${LFTP_CMDS}" > "${LOG}" 2>&1
RET=$?
if [ ${RET} -ne 0 ] ; then
echo -en "${RED}"
echo "*** ${ME}: ERROR:"
echo "FILE_TO_UPLOAD='${FILE_TO_UPLOAD}'"
echo "REMOTE_HOST='${REMOTE_HOST}'"
echo "REMOTE_DIR='${REMOTE_DIR}'"
echo "TEMP_NAME='${TEMP_NAME}'"
echo "DESTINATION_FILE='${DESTINATION_FILE}'"
echo -en "${NC}"
echo
cat "${LOG}"
echo -e "\n${YELLOW}Commands used${NC} are in: ${GREEN}${LFTP_CMDS}${NC}"
fi
fi
# If a local directory was also specified, copy the file there.
if [ ${RET} -eq 0 -a "${COPY_TO}" != "" ]; then
if [ "${SILENT}" = "false" -a "${ALLSKY_DEBUG_LEVEL}" -ge 3 ]; then
# No need to specify the file being copied again since we did so above.
echo "${ME}: Also copying to ${COPY_TO}/${DESTINATION_FILE}"
fi
cp "${FILE_TO_UPLOAD}" "${COPY_TO}/${DESTINATION_FILE}"
RET=$?
fi
exit ${RET}
|
<reponame>SachiraChin/Vulcan<gh_stars>0
CREATE PROCEDURE [core].[base_MigrationEntry_Add]
@MigrationId uniqueidentifier,
@TableName varchar(64),
@EntryJson nvarchar(max),
@ExecutionOrderIndex int
AS
insert into [MigrationEntries]([MigrationId], [TableName],[EntryJson], [ExecutionOrderIndex])
values (@MigrationId, @TableName, @EntryJson, @ExecutionOrderIndex)
|
<filename>gis-regression-analysis-core/src/main/java/com/katus/test/aic/AIC.java<gh_stars>1-10
package com.katus.test.aic;
import com.katus.data.AbstractDataSet;
import com.katus.data.AbstractResultRecordWithInfo;
import com.katus.data.AbstractResultDataSet;
import com.katus.data.Record;
import com.katus.exception.DataException;
import com.katus.exception.DataSetConvertException;
import com.katus.exception.InvalidParamException;
import com.katus.regression.linear.AbstractLinearRegression;
import com.katus.regression.linear.MultipleLinearRegression;
import com.katus.test.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Random;
/**
* @author <NAME>
* @version 1.0, 2021-10-09
*/
public class AIC<R extends Record, RR extends AbstractResultRecordWithInfo<R>> implements Test {
private static final Logger logger = LoggerFactory.getLogger(AIC.class);
private final AbstractDataSet<R> testDataSet;
private final LinearRegressionBuilder<R, RR> linearRegressionBuilder;
private final double trainingRatio;
private final double[] bandwidths;
private final Class<RR> clazz;
private final Map<Double, Double> resultMap;
private volatile boolean test = false;
private AIC(AbstractDataSet<R> testDataSet, LinearRegressionBuilder<R, RR> linearRegressionBuilder, double trainingRatio, double[] bandwidths, Class<RR> clazz) {
this.testDataSet = testDataSet;
this.linearRegressionBuilder = linearRegressionBuilder;
this.trainingRatio = trainingRatio;
this.bandwidths = bandwidths;
this.clazz = clazz;
this.resultMap = new LinkedHashMap<>();
}
public Map<Double, Double> getAicResults() {
test();
return resultMap;
}
private double aic(AbstractResultDataSet<R, RR> resultDataSet) {
double squareSum = 0.0;
for (int i = 0; i < resultDataSet.size(); i++) {
RR record = resultDataSet.getRecord(i);
double predictValue = record.prediction();
double trueValue = record.y();
squareSum += Math.pow(predictValue - trueValue, 2);
}
int trainingSize = testDataSet.size() - resultDataSet.size();
return Math.log(squareSum / trainingSize) + 2.0 * (resultDataSet.xSize() + 1) / trainingSize;
}
@Override
public void test() {
if (!test) {
synchronized (this) {
if (!test) {
AbstractDataSet<R> trainingDataSet, predictDataSet;
try {
trainingDataSet = testDataSet.clone();
predictDataSet = testDataSet.clone();
} catch (CloneNotSupportedException e) {
logger.error("failed to clone dataset", e);
throw new DataException();
}
int moveSize = (int) (Math.min(trainingRatio, 1 - trainingRatio) * testDataSet.size());
Random random = new Random();
if (trainingRatio >= 0.5) {
predictDataSet.clear();
for (int i = 0; i < moveSize; i++) {
predictDataSet.addRecord(trainingDataSet.removeRecord(random.nextInt(trainingDataSet.size())));
}
} else {
trainingDataSet.clear();
for (int i = 0; i < moveSize; i++) {
trainingDataSet.addRecord(predictDataSet.removeRecord(random.nextInt(predictDataSet.size())));
}
}
for (double bandwidth : bandwidths) {
AbstractLinearRegression<R, RR> regression;
try {
regression = linearRegressionBuilder.build(trainingDataSet, predictDataSet.convertToResultDataSet(clazz), bandwidth);
AbstractResultDataSet<R, RR> resultDataSet = regression.getResultDataSet();
resultMap.put(bandwidth, aic(resultDataSet));
if (regression instanceof MultipleLinearRegression) {
break;
}
} catch (DataSetConvertException e) {
resultMap.put(bandwidth, -1.0);
}
}
this.test = true;
}
}
}
}
@Override
public boolean pass() {
test();
return true;
}
public static class AICBuilder<R extends Record, RR extends AbstractResultRecordWithInfo<R>> {
private static final Logger logger = LoggerFactory.getLogger(AICBuilder.class);
private AbstractDataSet<R> testDataSet;
private LinearRegressionBuilder<R, RR> linearRegressionBuilder;
private double trainingRatio = 0.7;
private double[] bandwidths = new double[0];
private Class<RR> clazz;
public AIC<R, RR> build() {
if (!check()) {
logger.error("aic params are invalid");
throw new InvalidParamException();
}
return new AIC<>(testDataSet, linearRegressionBuilder, trainingRatio, bandwidths, clazz);
}
public boolean check() {
return testDataSet != null && linearRegressionBuilder != null && trainingRatio > 0 && trainingRatio < 1 && bandwidths.length > 0;
}
public AICBuilder<R, RR> testDataSet(AbstractDataSet<R> testDataSet) {
this.testDataSet = testDataSet;
return this;
}
public AICBuilder<R, RR> linearRegressionBuilder(LinearRegressionBuilder<R, RR> linearRegressionBuilder) {
this.linearRegressionBuilder = linearRegressionBuilder;
return this;
}
public AICBuilder<R, RR> trainingRatio(double trainingRatio) {
this.trainingRatio = trainingRatio;
return this;
}
public AICBuilder<R, RR> bandwidths(double... bandwidths) {
this.bandwidths = bandwidths;
return this;
}
public AICBuilder<R, RR> clazz(Class<RR> clazz) {
this.clazz = clazz;
return this;
}
}
}
|
package com.decathlon.ara.report.bean;
import lombok.Data;
@Data
public class Feature {
private String id;
private String name;
private String uri;
private String description;
private String keyword;
private Integer line;
private Comment[] comments = new Comment[0];
private Element[] elements = new Element[0];
private Tag[] tags = new Tag[0];
public String getReportFileName() {
// Simplified version of
// net.masterthought.cucumber.json.Feature.setReportFileName(int jsonFileNo, Configuration configuration) :
// * no support for multiple report.json files (we do not use that)
// * nor for parallel execution by official Maven plugin (we use our Cucumber fork managing parallelism more efficiently and effectively)
return uri.replaceAll("[^\\d\\w]", "-") + ".html";
}
}
|
#!/usr/bin/env bash
info() {
printf "\033[00;34m$@\033[0m\n"
}
update() {
# Install Homebrew or make sure it's up to date.
which -s brew
if [[ $? != 0 ]] ; then
info "Installing"
ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
else
info "Updating"
brew update
brew upgrade
fi
# Disable analytics.
brew analytics off
}
installEssentials() {
info "Installing essentials"
brew install fd
brew install fish
brew install fzf
brew install git
brew install gnupg
brew install neovim
brew install pinentry-mac
brew install ripgrep
brew install swig
brew install vim
}
installBasics() {
info "Installing basics"
brew install bat
brew install boost
brew install clang-format
brew install cloc
brew install cmake
brew install creduce
brew install ctags
brew install doxygen
brew install ffmpeg
brew install git-lfs
brew install github/gh/gh
brew install graphviz
brew install gts
brew install highlight
brew install htop
brew install hyperfine
brew install imagemagick
brew install lcov
brew install libxml2
brew install lua@5.3
brew install ncdu
brew install neofetch
brew install ninja
brew install node
brew install pandoc
brew install patchutils
brew install python
brew install re2c
brew install redis
brew install rsync
brew install sccache
brew install the_silver_searcher
brew install tig
brew install tmux
brew install tree
brew install valgrind
brew install vbindiff
brew install xz
brew install zsh
}
installExtras() {
info "Installing extras"
brew install afl-fuzz
brew install archey
brew install binutils
brew install coreutils
brew install cppcheck
brew install distcc
brew install findutils
brew install gdb
brew install gnu-sed
brew install gnutls
brew install libiconv
brew install mosh
brew install neovim
brew install radare2
brew install ranger
brew install shellcheck
brew install wget --with-iri
brew install wireshark --with-qt5
}
linkApps() {
info "Linking apps"
brew linkapps
}
cleanup() {
info "Cleanup"
brew cleanup
}
list() {
info "List"
brew list
}
help() {
echo "Usage: $(basename "$0") [options]" >&2
echo
echo " -i, --install Install"
echo " -e, --extras Install extras"
echo " -u, --update Update brew and formulae"
echo " -l, --list List installed formulae"
echo " -m, --minimal Install just the essentials"
echo
exit 1
}
if [ $# -eq 0 ]; then
help
else
for i in "$@"
do
case $i in
-m|--minimal)
update
installEssentials
cleanup
list
shift
;;
-i|--install)
update
installEssentials
installBasics
cleanup
list
shift
;;
-e|--extras)
update
installExtras
cleanup
list
shift
;;
-f|--fix)
shift
;;
-u|--update)
update
cleanup
shift
;;
-l|--list)
list
shift
;;
-c|--cask)
installCasks
linkApps
cleanup
list
shift
;;
*)
help
shift
;;
esac
done
fi
|
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
from gcloud.core.utils import convert_readable_username
RUNNING = 'RUNNING'
SUCCEEDED = 'SUCCEEDED'
FAILED = 'FAILED'
SYNC_TASK_STATUS = [
(RUNNING, _("执行中")),
(SUCCEEDED, _("成功")),
(FAILED, _("失败"))
]
SYNC_TASK_CREATED = [
('manual', _("手动触发")),
('auto', _("部署自动触发"))
]
class SyncTask(models.Model):
creator = models.CharField(_("执行者"), max_length=32, blank=True)
create_method = models.CharField(_("创建方式"), max_length=32, default='manual', choices=SYNC_TASK_CREATED)
start_time = models.DateTimeField(_("启动时间"), auto_now_add=True)
finish_time = models.DateTimeField(_("结束时间"), null=True, blank=True)
status = models.CharField(_("同步状态"), max_length=32, default=RUNNING, choices=SYNC_TASK_STATUS)
details = models.TextField(_("同步详情信息"), blank=True)
class Meta:
verbose_name = _("远程包源同步任务 SyncTask")
verbose_name_plural = _("远程包源同步任务 SyncTask")
ordering = ['-id']
@property
def creator_name(self):
return convert_readable_username(self.creator)
@property
def status_display(self):
return self.get_status_display()
def finish_task(self, status, details=None):
self.status = status
self.finish_time = timezone.now()
if details:
self.details = details
self.save()
|
import { Menu, Transition } from '@headlessui/react';
import { LogoutIcon, PencilIcon } from '@heroicons/react/outline';
import { getSession, signIn, signOut } from 'next-auth/react';
import { Fragment, useEffect, useState } from 'react';
export default function ProfileImage({ user }: { user?: any | null }) {
const sessionMap = (res: any): any[] => {
return res.result;
};
const fetcher = async (url: string, cb: any) => {
const session = (await getSession()) as any;
return fetch(`${url}`, {
method: "POST",
headers: {
authorization: `Bearer ${session.accessToken}`,
},
})
.then((res) => res.json())
.then(cb);
};
const [sessions, setSessions] = useState<any[]>([]);
useEffect(() => {
fetcher(
"https://api.zitadel.ch/auth/v1/users/me/sessions/_search",
sessionMap
).then((sessions) => {
if (sessions && sessions.length) {
setSessions(sessions);
}
});
}, []);
function signInWithHint(session: any): void {
signIn(
"zitadel",
{
callbackUrl: "/",
},
{
login_hint: session.loginName,
}
);
}
return (
<Menu as="div" className="relative inline-block text-left">
<div>
<Menu.Button className="flex items-center bg-zitadelblue-400 justify-center ml-4 transition-all h-8 w-8 rounded-full shadow-lg ring-2 ring-white ring-opacity-50 hover:ring-opacity-100">
{user && user.image ? (
<img
className="h-8 w-8 rounded-full"
src={user.image}
alt="user avatar"
/>
) : (
<span className="text-sm">
{user ? user.name.substring(0, 1) : "A"}
</span>
)}
</Menu.Button>
</div>
<Transition
as={Fragment}
enter="transition ease-out duration-100"
enterFrom="transform opacity-0 scale-95"
enterTo="transform opacity-100 scale-100"
leave="transition ease-in duration-75"
leaveFrom="transform opacity-100 scale-100"
leaveTo="transform opacity-0 scale-95"
>
<Menu.Items className="absolute w-80 right-0 mt-5 origin-top-right bg-zitadelblue-400 divide-y divide-gray-500 rounded-md shadow-lg ring-1 ring-black ring-opacity-5 focus:outline-none">
<div className="px-1 py-1 ">
<div className="flex flex-col items-center py-4">
<p>{user?.name}</p>
<p className="text-gray-300 text-sm">{user?.email}</p>
</div>
<Menu.Item>
{({ active }) => (
<a
href="https://console.zitadel.ch/users/me"
target="_blank"
rel="noreferrer"
className={`${
active ? "bg-zitadelblue-300 text-white" : "text-gray-300"
} group flex rounded-md justify-center items-center w-full px-2 py-2 text-sm`}
>
{active ? (
<PencilIcon className="w-5 h-5 mr-2" aria-hidden="true" />
) : (
<PencilIcon className="w-5 h-5 mr-2" aria-hidden="true" />
)}
Edit Profile
</a>
)}
</Menu.Item>
</div>
<div className="px-1 py-1 max-h-96 overflow-y-auto">
{sessions.map((session, i) => (
<Menu.Item key={`${session.userName}${i}`}>
{({ active }) => (
<button
onClick={() => signInWithHint(session)}
className={`${
active ? "bg-zitadelblue-300 text-white" : "text-gray-300"
} group flex rounded-md items-center w-full px-2 py-2 text-sm`}
>
<div className="w-8 h-8 mr-2 flex items-center justify-center rounded-full bg-black bg-opacity-20">
<span className="text-sm">
{session ? session.displayName.substring(0, 1) : "A"}
</span>
</div>
<div className="flex flex-col justify-start">
<span className="text-left">{session.displayName}</span>
<span className="text-left text-sm">
{session.userName}
</span>
<span
className={`text-left text-sm ${
session.authState === "SESSION_STATE_ACTIVE"
? "text-green-500"
: "text-red-500"
}`}
>
{session.authState === "SESSION_STATE_ACTIVE"
? "active"
: "inactive"}
</span>
</div>
</button>
)}
</Menu.Item>
))}
</div>
<div className="px-1 py-1">
<Menu.Item>
{({ active }) => (
<button
onClick={() => signOut()}
className={`${
active ? "bg-zitadelaccent-800 text-white" : "text-gray-300"
} group flex rounded-md justify-center items-center w-full px-2 py-2 text-sm`}
>
{active ? (
<LogoutIcon
className="w-5 h-5 mr-2 text-violet-400"
aria-hidden="true"
/>
) : (
<LogoutIcon
className="w-5 h-5 mr-2 text-violet-400"
aria-hidden="true"
/>
)}
Logout
</button>
)}
</Menu.Item>
</div>
</Menu.Items>
</Transition>
</Menu>
);
}
|
angular.module('jojs.auth', [])
.factory('authProvider', function() {
var user;
return {
setUser : function(aUser){
user = aUser;
},
isLoggedIn : function(){
return(user)? user : false;
}
};
})
.factory('PasswordStrength', ['$http', function($http) {
return {
score: function(password, callback) {
if (password)
{
var req = {
method: 'GET',
url: 'checkPassword',
params: {password : password},
paramSerializer: '$httpParamSerializerJQLike'
}
$http(req).then(function(success)
{
var passwordStrength = success.data;
callback && callback(passwordStrength);
}
);
}
else
{
callback && callback(-1);
}
}
};
}])
.directive('okPassword', ['PasswordStrength', function(PasswordStrength) {
return {
// restrict to only attribute and class
restrict: 'AC',
// use the NgModelController
require: 'ngModel',
// add the NgModelController as a dependency to your link function
link: function($scope, $element, $attrs, ngModelCtrl) {
$element.on('blur change keydown', function(evt) {
$scope.$evalAsync(function($scope) {
// update the $scope.password with the element's value
var password = $scope.password = $element.val();
PasswordStrength.score(password, function(score)
{
$scope.passwordStrength = score;
});
});
});
}
};
}]);
|
#!/bin/bash
assert_success() {
if [[ "$status" != 0 ]]; then
echo "expected: 0"
echo "actual: $status"
echo "output: $output"
return 1
fi
}
assert_failure() {
if [[ "$status" == 0 ]]; then
echo "expected: non-zero exit code"
echo "actual: $status"
echo "output: $output"
return 1
fi
}
assert_equal() {
if [[ "$1" != "$2" ]]; then
echo "expected: $1"
echo "actual: $2"
return 1
fi
}
assert_not_equal() {
if [[ "$1" == "$2" ]]; then
echo "unexpected: $1"
echo "actual: $2"
return 1
fi
}
assert_match() {
if [[ ! "$2" =~ $1 ]]; then
echo "expected: $1"
echo "actual: $2"
return 1
fi
}
assert_not_match() {
if [[ "$2" =~ $1 ]]; then
echo "expected: $1"
echo "actual: $2"
return 1
fi
}
wait_for_process(){
wait_time="$1"
sleep_time="$2"
cmd="$3"
while [ "$wait_time" -gt 0 ]; do
if eval "$cmd"; then
return 0
else
sleep "$sleep_time"
wait_time=$((wait_time-sleep_time))
fi
done
return 1
}
|
#!/usr/bin/env bash
export DATASET_DIR=ReclorDataset
export TASK_NAME=LogiGraph
export MODEL_DIR=$1
export WANDB_DISABLED=true
export TOKENIZERS_PARALLELISM=false
export RUN_NAME=AdaLoGN_Reclor
export DATASET_DIR=$DATASET_DIR
export MODEL_TYPE=Roberta
CUDA_VISIBLE_DEVICES=0 python run_multiple_choice.py \
--run_name $RUN_NAME \
--task_name $TASK_NAME \
--model_name_or_path $MODEL_DIR \
--data_dir $DATASET_DIR \
--$MODE \
--do_eval \
--seed 123 \
--model_type $MODEL_TYPE \
--max_seq_length 384 \
--per_device_eval_batch_size 16 \
--per_device_train_batch_size 1 \
--gradient_accumulation_steps 16 \
--num_train_epochs 10 \
--output_dir Checkpoints/$DATASET_DIR/$RUN_NAME \
--logging_steps 200 \
--learning_rate 7e-6 \
--overwrite_output_dir \
--evaluation_strategy epoch \
--save_strategy epoch \
--metric_for_best_model acc_dev \
--gnn_layers_num 2 \
--save_total_limit 2 \
--dropout 0.1 \
--warmup_ratio 0.1 \
--pooling_type attention_pooling_with_gru
|
<filename>javafx-src/com/sun/webkit/dom/MouseEventImpl.java<gh_stars>1-10
/*
* Copyright (c) 2013, 2017, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package com.sun.webkit.dom;
import org.w3c.dom.Node;
import org.w3c.dom.events.EventTarget;
import org.w3c.dom.events.MouseEvent;
import org.w3c.dom.views.AbstractView;
public class MouseEventImpl extends UIEventImpl implements MouseEvent {
MouseEventImpl(long peer) {
super(peer);
}
static MouseEvent getImpl(long peer) {
return (MouseEvent)create(peer);
}
// Attributes
public int getScreenX() {
return getScreenXImpl(getPeer());
}
native static int getScreenXImpl(long peer);
public int getScreenY() {
return getScreenYImpl(getPeer());
}
native static int getScreenYImpl(long peer);
public int getClientX() {
return getClientXImpl(getPeer());
}
native static int getClientXImpl(long peer);
public int getClientY() {
return getClientYImpl(getPeer());
}
native static int getClientYImpl(long peer);
public boolean getCtrlKey() {
return getCtrlKeyImpl(getPeer());
}
native static boolean getCtrlKeyImpl(long peer);
public boolean getShiftKey() {
return getShiftKeyImpl(getPeer());
}
native static boolean getShiftKeyImpl(long peer);
public boolean getAltKey() {
return getAltKeyImpl(getPeer());
}
native static boolean getAltKeyImpl(long peer);
public boolean getMetaKey() {
return getMetaKeyImpl(getPeer());
}
native static boolean getMetaKeyImpl(long peer);
public short getButton() {
return getButtonImpl(getPeer());
}
native static short getButtonImpl(long peer);
public EventTarget getRelatedTarget() {
return (EventTarget)NodeImpl.getImpl(getRelatedTargetImpl(getPeer()));
}
native static long getRelatedTargetImpl(long peer);
public int getOffsetX() {
return getOffsetXImpl(getPeer());
}
native static int getOffsetXImpl(long peer);
public int getOffsetY() {
return getOffsetYImpl(getPeer());
}
native static int getOffsetYImpl(long peer);
public int getX() {
return getXImpl(getPeer());
}
native static int getXImpl(long peer);
public int getY() {
return getYImpl(getPeer());
}
native static int getYImpl(long peer);
public Node getFromElement() {
return NodeImpl.getImpl(getFromElementImpl(getPeer()));
}
native static long getFromElementImpl(long peer);
public Node getToElement() {
return NodeImpl.getImpl(getToElementImpl(getPeer()));
}
native static long getToElementImpl(long peer);
// Functions
public void initMouseEvent(String type
, boolean canBubble
, boolean cancelable
, AbstractView view
, int detail
, int screenX
, int screenY
, int clientX
, int clientY
, boolean ctrlKey
, boolean altKey
, boolean shiftKey
, boolean metaKey
, short button
, EventTarget relatedTarget)
{
initMouseEventImpl(getPeer()
, type
, canBubble
, cancelable
, DOMWindowImpl.getPeer(view)
, detail
, screenX
, screenY
, clientX
, clientY
, ctrlKey
, altKey
, shiftKey
, metaKey
, button
, NodeImpl.getPeer((NodeImpl)relatedTarget));
}
native static void initMouseEventImpl(long peer
, String type
, boolean canBubble
, boolean cancelable
, long view
, int detail
, int screenX
, int screenY
, int clientX
, int clientY
, boolean ctrlKey
, boolean altKey
, boolean shiftKey
, boolean metaKey
, short button
, long relatedTarget);
}
|
def calculate_input_gradient(cache):
# Retrieve necessary values from the cache
x, w, dout, stride, pad = cache
# Get dimensions of input and weights
N, C, H, W = x.shape
F, _, HH, WW = w.shape
# Initialize the gradient with respect to the input
dx = np.zeros_like(x)
# Pad the gradient with zeros
dx_padded = np.pad(dx, ((0, 0), (0, 0), (pad, pad), (pad, pad)), mode='constant')
for i in range(N):
for f in range(F):
for j in range(HH):
for k in range(WW):
dx_padded[i, :, j * stride:j * stride + H, k * stride:k * stride + W] += w[f, :, j, k] * dout[i, f, j, k]
# Remove the padding from the gradient
dx = dx_padded[:, :, pad:pad + H, pad:pad + W]
return dx
|
package webshop.webservice
import CheckoutFlowIngredients._
import CheckoutFlowEvents._
import scala.concurrent.Future
object CheckoutFlowIngredients {
case class OrderId(orderId: String)
case class Item(itemId: String)
case class ReservedItems(items: List[Item], data: Array[Byte])
case class ShippingAddress(address: String)
case class PaymentInformation(info: String)
case class ShippingOrder(items: List[Item], data: Array[Byte], address: ShippingAddress)
}
object CheckoutFlowEvents {
case class OrderPlaced(orderId: OrderId, items: List[Item])
case class PaymentInformationReceived(paymentInformation: PaymentInformation)
case class ShippingAddressReceived(shippingAddress: ShippingAddress)
sealed trait ReserveItemsOutput
case class OrderHadUnavailableItems(unavailableItems: List[Item]) extends ReserveItemsOutput
case class ItemsReserved(reservedItems: ReservedItems) extends ReserveItemsOutput
sealed trait MakePaymentOutput
case class PaymentSuccessful(shippingOrder: ShippingOrder) extends MakePaymentOutput
case class PaymentFailed() extends MakePaymentOutput
case class ShippingConfirmed()
}
object CheckoutFlowInteractions {
trait ReserveItems {
def apply(orderId: OrderId, items: List[Item]): Future[ReserveItemsOutput]
}
trait MakePayment {
def apply(processId: String, items: ReservedItems, address: ShippingAddress, payment: PaymentInformation): Future[MakePaymentOutput]
}
trait ShipItems {
def apply(order: ShippingOrder): Future[ShippingConfirmed]
}
}
|
SELECT SUM(salary)
FROM employees
WHERE name LIKE 'C%';
|
<filename>open-sphere-plugins/kml/src/main/java/io/opensphere/kml/envoy/KMLParserPool.java<gh_stars>10-100
package io.opensphere.kml.envoy;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.LinkedBlockingQueue;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Unmarshaller;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.sax.SAXSource;
import org.apache.log4j.Logger;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import com.vividsolutions.jts.geom.CoordinateSequence;
import com.vividsolutions.jts.geom.impl.CoordinateArraySequenceFactory;
import de.micromata.opengis.kml.v_2_2_0.Boundary;
import de.micromata.opengis.kml.v_2_2_0.Document;
import de.micromata.opengis.kml.v_2_2_0.Feature;
import de.micromata.opengis.kml.v_2_2_0.Folder;
import de.micromata.opengis.kml.v_2_2_0.Kml;
import de.micromata.opengis.kml.v_2_2_0.LinearRing;
import de.micromata.opengis.kml.v_2_2_0.Placemark;
import de.micromata.opengis.kml.v_2_2_0.Polygon;
import de.micromata.opengis.kml.v_2_2_0.gx.Track;
import io.opensphere.core.util.XMLUtilities;
import io.opensphere.core.util.jts.JTSUtilities;
import io.opensphere.core.util.xml.WrappedXMLReader;
import io.opensphere.kml.common.model.Processor;
import net.jcip.annotations.GuardedBy;
import net.jcip.annotations.ThreadSafe;
/**
* A pool of parsers.
*/
@ThreadSafe
public final class KMLParserPool implements Processor<InputStream, Kml>
{
/** Logger. */
private static final Logger LOGGER = Logger.getLogger(KMLParserPool.class);
/** The capacity of the pool. */
private final int myCapacity;
/** The pool itself. */
@GuardedBy("myParserPool")
private final LinkedBlockingQueue<Parser> myParserPool;
/** The number of parsers created. */
@GuardedBy("this")
private int myParserCount;
/**
* Constructor.
*/
public KMLParserPool()
{
myCapacity = Math.max(Runtime.getRuntime().availableProcessors() - 2, 1);
myParserPool = new LinkedBlockingQueue<>(myCapacity);
}
@Override
public Kml process(InputStream input) throws JAXBException
{
try
{
return takeParser().unmarshalKml(input);
}
catch (InterruptedException e)
{
LOGGER.error(e.getMessage());
}
return null;
}
/**
* Takes a parser, blocking until one is available.
*
* @return A parser
* @throws InterruptedException if interrupted while waiting
*/
private Parser takeParser() throws InterruptedException
{
synchronized (this)
{
if (myParserPool.isEmpty() && myParserCount < myCapacity && myParserPool.offer(new Parser()))
{
myParserCount++;
if (LOGGER.isDebugEnabled())
{
LOGGER.debug("Parser count is now: " + myParserCount);
}
}
}
return myParserPool.take();
}
/**
* A JAK Parser.
*/
private final class Parser
{
/** The JAK Unmarshaller. */
private Unmarshaller myUnmarshaller;
/** The JAK XMLReader. */
private XMLReader myXMLReader;
/**
* Constructor.
*/
public Parser()
{
try
{
myUnmarshaller = JAXBContext.newInstance(Kml.class).createUnmarshaller();
myXMLReader = new WrappedXMLReader(false, handler -> new KMLNamespaceFilterHandler(handler));
}
catch (RuntimeException | JAXBException | ParserConfigurationException | SAXException e)
{
LOGGER.error(e, e);
}
}
/**
* Convenience method to parse an InputStream using JAK.
*
* @param content The InputStream
* @return The Kml object
* @throws JAXBException If any unexpected errors occur while
* unmarshalling
*/
public Kml unmarshalKml(InputStream content) throws JAXBException
{
Kml kml = null;
try
{
if (content != null)
{
String encoding = "UTF-8";
try
{
encoding = StreamUtilities.getEncoding(content);
}
catch (IOException e)
{
LOGGER.error(e.getMessage(), e);
}
if (LOGGER.isDebugEnabled())
{
LOGGER.debug("Using " + encoding + " encoding.");
}
InputSource input = new InputSource(content);
input.setEncoding(encoding);
SAXSource saxSource = new SAXSource(myXMLReader, input);
kml = (Kml)myUnmarshaller.unmarshal(saxSource);
replaceGeometries(kml);
}
}
finally
{
myParserPool.offer(this);
}
return kml;
}
/**
* Replaces the jak geometries with any custom ones we have created.
*
* @param kml The kml to inspect, and if any geometries are encountered
* where we have custom ones, we will replace it with the
* custom one.
*/
private void replaceGeometries(Kml kml)
{
if (kml.getFeature() instanceof Document)
{
Document document = (Document)kml.getFeature();
replaceGeometries(document.getFeature());
}
}
/**
* Replaces the jak geometries with any custom ones we have created.
*
* @param features The features to inspect, and if any geometries are
* encountered where we have custom ones, we will replace it
* with the custom one.
*/
private void replaceGeometries(List<Feature> features)
{
for (Feature feature : features)
{
if (feature instanceof Document)
{
replaceGeometries(((Document)feature).getFeature());
}
else if (feature instanceof Folder)
{
replaceGeometries(((Folder)feature).getFeature());
}
else if (feature instanceof Placemark)
{
Placemark placemark = (Placemark)feature;
if (placemark.getGeometry() instanceof Track)
{
Track jakTrack = (Track)placemark.getGeometry();
ByteArrayOutputStream output = new ByteArrayOutputStream();
try
{
XMLUtilities.writeXMLObject(jakTrack, output);
io.opensphere.kml.gx.Track customTrack = XMLUtilities.readXMLObject(
new ByteArrayInputStream(output.toByteArray()), io.opensphere.kml.gx.Track.class);
placemark.setGeometry(customTrack);
}
catch (JAXBException e)
{
LOGGER.error(e, e);
}
}
else if (placemark.getGeometry() instanceof Polygon)
{
placemark.setGeometry(normalize((Polygon)placemark.getGeometry()));
}
}
}
}
/**
* "Normalizes" the supplied polygon. KML polygons may be specified
* erroneously with overlapping sections, which can cause failures
* during rendering. The normalization will calculate the true external
* boundary of the polygon without including any overlapped regions,
* eliminating this issue.
*
* @param polygon the polygon to normalize.
* @return the normalized polygon.
*/
private Polygon normalize(final Polygon polygon)
{
// this seems very strange, but it seems to be the only way to get
// around some very strange polygons:
com.vividsolutions.jts.geom.Geometry jtsGeometry = convertToJTS(polygon).buffer(.001).buffer(-.001);
if (jtsGeometry instanceof com.vividsolutions.jts.geom.Polygon)
{
return convertToKML((com.vividsolutions.jts.geom.Polygon)jtsGeometry);
}
// TODO: temporary fix because JTS Geometry.buffer() can sometimes
// fail and return a badly formed multipolygon when a polygon
// crosses the meridian, antimeridian, or equator - if the polygon
// also overlaps, it may cause further issues with the kml loading
// to return the polygon without normalizing it, so this needs to be
// fixed in a better way later.
return polygon;
}
/**
* Converts the supplied JTS Polygon to a KML Polygon.
*
* @param jtsPolygon the JTS polygon to convert.
* @return a KML Polygon created from the converted JTS polygon.
*/
private Polygon convertToKML(com.vividsolutions.jts.geom.Polygon jtsPolygon)
{
Polygon polygon = new Polygon();
polygon.setOuterBoundaryIs(convertToKmlBoundary(jtsPolygon.getExteriorRing()));
for (int i = 0; i < jtsPolygon.getNumInteriorRing(); i++)
{
polygon.addToInnerBoundaryIs(convertToKmlBoundary(jtsPolygon.getInteriorRingN(i)));
}
return polygon;
}
/**
* Converts the supplied KML Polygon to a JTS Polygon.
*
* @param polygon the KML polygon to convert.
* @return a JTS Polygon created from the converted KML polygon.
*/
private com.vividsolutions.jts.geom.Polygon convertToJTS(Polygon polygon)
{
com.vividsolutions.jts.geom.LinearRing jtsOuterRing = convertToJTSLinearRing(
polygon.getOuterBoundaryIs().getLinearRing());
com.vividsolutions.jts.geom.LinearRing[] holes = polygon.getInnerBoundaryIs().stream()
.map(b -> convertToJTSLinearRing(b.getLinearRing())).toArray(com.vividsolutions.jts.geom.LinearRing[]::new);
com.vividsolutions.jts.geom.Polygon jtsPolygon = new com.vividsolutions.jts.geom.Polygon(jtsOuterRing, holes,
JTSUtilities.GEOMETRY_FACTORY);
return jtsPolygon;
}
/**
* Converts the supplied JTS Line string to a KML {@link Boundary}.
*
* @param ring the ring to convert to a LinearRing.
* @return a LinearRing generated from the supplied line string.
*/
private Boundary convertToKmlBoundary(com.vividsolutions.jts.geom.LineString ring)
{
LinearRing kmlRing = new LinearRing();
Arrays.stream(ring.getCoordinates()).forEach(c -> kmlRing.addToCoordinates(c.x, c.y, c.z));
Boundary boundary = new Boundary();
boundary.setLinearRing(kmlRing);
return boundary;
}
/**
* Converts the supplied KML {@link LinearRing} to a JTS
* {@link com.vividsolutions.jts.geom.LinearRing} instance.
*
* @param ring the KML linear ring to convert.
* @return a JTS linear ring generated from the supplied KML linear
* ring.
*/
private com.vividsolutions.jts.geom.LinearRing convertToJTSLinearRing(LinearRing ring)
{
com.vividsolutions.jts.geom.Coordinate[] jtsCoordinates = ring.getCoordinates().stream()
.map(c -> new com.vividsolutions.jts.geom.Coordinate(c.getLongitude(), c.getLatitude(), c.getAltitude()))
.toArray(com.vividsolutions.jts.geom.Coordinate[]::new);
CoordinateSequence sequence = CoordinateArraySequenceFactory.instance().create(jtsCoordinates);
com.vividsolutions.jts.geom.LinearRing jtsOuterRing = new com.vividsolutions.jts.geom.LinearRing(sequence,
JTSUtilities.GEOMETRY_FACTORY);
return jtsOuterRing;
}
}
}
|
export { Signals } from './lib/signals';
export { Teacher } from './lib/teacher';
export { LayerType } from './lib/layers';
export { default, ANN } from './lib/ann';
export { activationFuncs } from './lib/activation-funcs';
|
#!/bin/bash
sudo apt-get update
sudo apt-get install git vim docker.io docker-compose
sudo gpasswd -s $user docker
exit
|
def calculate_average(list):
total = 0
for num in list:
total += num
return total / len(list)
result = calculate_average(list)
print(result)
|
#!/bin/bash
# Sends a notification whenever someone is sending you a new, non HEARTBEAT-related message.
# Your Pushbullet API key here
APIKEY=""
# Your Callsign here
MYCALL=""
while true; do
grep -v HEARTBEAT ${HOME}/.local/share/JS8Call/DIRECTED.TXT |grep ": ${MYCALL}" > /tmp/js8c.new;
NEW=$(diff /tmp/js8c.old /tmp/js8c.new |grep -o "[A-Z0-9]*: ${MYCALL}.*")
cat /tmp/js8c.new > /tmp/js8c.old
NEWCHARS=$(echo ${NEW} | wc -c)
if [ "${NEWCHARS}" -gt "3" ]; then
echo 'New message';
curl https://api.pushbullet.com/api/pushes -u ${APIKEY}: \
--output /dev/null --silent --max-time 5 \
-X POST \
-d type=note -d title="New JS8Call Message" \
-d body="$(echo ${NEW} |head -c100)"
fi
sleep 10;
done
|
<gh_stars>0
package mapshaper
import (
"context"
"errors"
"os"
"os/exec"
)
type Mapshaper struct {
path string
}
func NewMapshaper(ctx context.Context, path string) (*Mapshaper, error) {
info, err := os.Stat(path)
if err != nil {
return nil, err
}
if info.IsDir() {
return nil, errors.New("Invalid path")
}
ms := &Mapshaper{
path: path,
}
return ms, nil
}
func (ms *Mapshaper) Call(ctx context.Context, args ...string) ([]byte, error) {
cmd := exec.CommandContext(ctx, ms.path, args...)
return cmd.Output()
}
|
import {Profile} from './profile';
import {AuthorProfile} from './authorProfile';
import {Posting} from './posting';
import {Wallet} from './wallet';
import {Feed} from './feed';
import {SearchFeed} from './search';
import {PostDetails} from './post';
import {Notification} from './notification';
import {Login} from './login';
import {ResolveAuth} from './resolveAuth/ResolveAuth';
import {WelcomeScreen} from './application';
import {Settings} from './settings';
import {Signup} from './signup';
export {
Profile,
AuthorProfile,
Posting,
Wallet,
Feed,
SearchFeed,
PostDetails,
Notification,
Login,
ResolveAuth,
WelcomeScreen,
Settings,
Signup,
};
|
/* eslint-disable no-param-reassign */
import locale2 from 'locale2';
export default function locale(input, options = {}) {
if (locale2) options.locale = locale2.toLowerCase();
return input;
}
|
<reponame>ColFusion/PentahoKettle
package org.rzo.yajsw.os.posix;
import java.io.File;
import org.apache.commons.configuration.BaseConfiguration;
import org.apache.commons.configuration.Configuration;
import org.jboss.netty.logging.InternalLogger;
import org.rzo.yajsw.os.JavaHome;
public class PosixJavaHome implements JavaHome
{
Configuration _config;
InternalLogger _logger;
public PosixJavaHome(Configuration config)
{
if (config != null)
_config = config;
else
_config = new BaseConfiguration();
}
public String findJava( String wrapperJava, String customProcessName )
{
File customProc = null;
File wrapJava = null;
// Search for JAVA if necessary ( nothing supplied )
if ( wrapperJava == null && customProcessName == null )
return findJava();
customProc = ((customProcessName != null) ? new File( customProcessName ) : null);
wrapJava = ((wrapperJava != null) ? new File( wrapperJava ) : null);
// customProcessName takes precedences over wrapperJava
if ( customProc != null && customProc.exists() && customProc.canExecute() ){
return customProcessName;
}
else if ( wrapJava != null && wrapJava.exists() && wrapJava.canExecute() ){
return wrapperJava;
}
else
return findJava();
// -old return wrapperJava == null ? "java" : wrapperJava;
}
private String findJava()
{
// Posix Version does not use wrapper.java.command like Win version does. ( whatever )
// Find working java and equate to both
File fJava = null;
String java = null;
// Find Path to Regular Java
String javaFiles[] = new String[3];
javaFiles[0] = _config.getString( "wrapper.java.command" );
javaFiles[1] = _config.getString( "wrapper.ntservice.java.command" );
javaFiles[2] = "java";
for ( int idx = 0; (fJava == null && idx < javaFiles.length); idx++ )
{
String javaName;
for ( int loop = 0; loop < 2; loop++ )
{
if ( javaFiles[idx] != null )
{
javaName = ((loop == 0) ? javaFiles[idx] : System.setProperty( "JAVA_HOME", "" ) + File.separator + "bin"
+ File.separator + javaFiles[idx]);
File fJavaTmp = new File( javaName );
if ( fJavaTmp.exists() && fJavaTmp.canExecute() )
{
fJava = fJavaTmp;
break;
}
}
}
}
// if Regular java not found.... Search Path for JAVA's HOME
if ( fJava == null )
{
// Check path for JAVA's HOME
String home = findJavaHomeFromPath( null );
if ( home != null )
{
String javaName;
javaName = home + File.separator + "bin" + File.separator + "java";
File fJavaTmp = new File( javaName );
if ( fJavaTmp.exists() && fJavaTmp.canExecute() )
{
fJava = fJavaTmp;
}
}
}
// if Regular java still not found.... bummer were done
if ( fJava != null )
{
java = fJava.getAbsolutePath();
// Posix Version does not use wrapper.java.command like Win version does. Update both
_config.setProperty( "wrapper.java.command", java );
_config.setProperty( "wrapper.ntservice.java.command", java );
}
if (java == null)
java = _config.getString( "wrapper.java.command", "java" );
return java;
}
// Searches Environment Path for JAVA_HOME equivalent
private String findJavaHomeFromPath( String javaHome )
{
if ( javaHome != null )
{
File fJavaHome = new File( javaHome );
if ( fJavaHome.exists() )
return javaHome;
}
// search java in environment path
if (System.getenv( "path" ) == null)
return null;
String[] paths = System.getenv( "path" ).split( File.pathSeparator );
for ( String path : paths )
{
if ( path.contains( "jdk" ) || path.contains( "jre" ) )
{
File fJavaHome = new File( path + File.separator + "java" );
if ( fJavaHome.exists() )
{
return fJavaHome.getParentFile().getParentFile().getAbsolutePath();
}
}
}
return null;
}
public void setLogger(InternalLogger logger)
{
_logger = logger;
}
}
|
package impl
import (
logging "gx/ipfs/QmbkT7eMTyXfpeyB3ZMxxcxg7XH8t6uXp49jqzz4HB7BGF/go-log"
"github.com/filecoin-project/go-filecoin/api"
"github.com/filecoin-project/go-filecoin/node"
)
type nodeAPI struct {
node *node.Node
logger logging.EventLogger
daemon *nodeDaemon
swarm *nodeSwarm
}
// Assert that nodeAPI fullfills the api.API interface.
var _ api.API = (*nodeAPI)(nil)
// New constructs a new instance of the API.
func New(node *node.Node) api.API {
api := &nodeAPI{
node: node,
logger: logging.Logger("api"),
}
api.daemon = newNodeDaemon(api)
api.swarm = newNodeSwarm(api)
return api
}
func (api *nodeAPI) Daemon() api.Daemon {
return api.daemon
}
func (api *nodeAPI) Swarm() api.Swarm {
return api.swarm
}
|
#!/bin/sh
SPACE="space search key int number attributes bit01, bit02, bit03, bit04, bit05, bit06, bit07, bit08, bit09, bit10, bit11, bit12, bit13, bit14, bit15, bit16, bit17, bit18, bit19, bit20, bit21, bit22, bit23, bit24, bit25, bit26, bit27, bit28, bit29, bit30, bit31, bit32 index bit01 index bit02 index bit03 index bit04 index bit05 index bit06 index bit07 index bit08 index bit09 index bit10 index bit11 index bit12 index bit13 index bit14 index bit15 index bit16 index bit17 index bit18 index bit19 index bit20 index bit21 index bit22 index bit23 index bit24 index bit25 index bit26 index bit27 index bit28 index bit29 index bit30 index bit31 index bit32 create 4 partitions tolerate 1 failures"
exec python2 "${HYPERDEX_SRCDIR}"/test/runner.py --daemons=4 --space="${SPACE}" -- \
"${HYPERDEX_BUILDDIR}"/test/search-stress-test --quiet -h {HOST} -p {PORT} -k int
|
#!/bin/bash
source `dirname $0`/../common.sh
docker run -v $OUTPUT_DIR:/tmp/output -v $CACHE_DIR:/tmp/cache -e VERSION=2.0.0-p594 -e STACK=cedar-14 hone/ruby-builder:cedar-14
|
<gh_stars>0
import { isNumeric } from '../util/isNumeric';
import { Observable } from '../Observable';
import { async } from '../scheduler/async';
/**
* We need this JSDoc comment for affecting ESDoc.
*/
export class IntervalObservable extends Observable {
/**
* @param {?=} period
* @param {?=} scheduler
*/
constructor(period = 0, scheduler = async) {
super();
this.period = period;
this.scheduler = scheduler;
if (!isNumeric(period) || period < 0) {
this.period = 0;
}
if (!scheduler || typeof scheduler.schedule !== 'function') {
this.scheduler = async;
}
}
/**
* Creates an Observable that emits sequential numbers every specified
* interval of time, on a specified Scheduler.
*
* <span class="informal">Emits incremental numbers periodically in time.
* </span>
*
* <img src="./img/interval.png" width="100%">
*
* `interval` returns an Observable that emits an infinite sequence of
* ascending integers, with a constant interval of time of your choosing
* between those emissions. The first emission is not sent immediately, but
* only after the first period has passed. By default, this operator uses the
* `async` Scheduler to provide a notion of time, but you may pass any
* Scheduler to it.
*
* var numbers = Rx.Observable.interval(1000);
* numbers.subscribe(x => console.log(x));
*
* @see {\@link timer}
* @see {\@link delay}
*
* or the time unit determined by the scheduler's clock.
* the emission of values, and providing a notion of "time".
* interval.
* @owner Observable
* @param {?=} period
* @param {?=} scheduler
* @return {?}
*/
static create(period = 0, scheduler = async) {
return new IntervalObservable(period, scheduler);
}
/**
* @param {?} state
* @return {?}
*/
static dispatch(state) {
const { index, subscriber, period } = state;
subscriber.next(index);
if (subscriber.closed) {
return;
}
state.index += 1;
((this)).schedule(state, period);
}
/**
* @param {?} subscriber
* @return {?}
*/
_subscribe(subscriber) {
const /** @type {?} */ index = 0;
const /** @type {?} */ period = this.period;
const /** @type {?} */ scheduler = this.scheduler;
subscriber.add(scheduler.schedule(IntervalObservable.dispatch, period, {
index, subscriber, period
}));
}
}
|
<filename>framework/test/utils/configs/config_node.ts
/*
* Copyright © 2018 Lisk Foundation
*
* See the LICENSE file at the top-level directory of this distribution
* for licensing information.
*
* Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation,
* no part of this software, including this file, may be copied, modified,
* propagated, or distributed except according to the terms contained in the
* LICENSE file.
*
* Removal or modification of this copyright notice is prohibited.
*/
import { applicationConfigSchema } from '../../../src/schema/application_config_schema';
import { ApplicationConfig } from '../../../src/types';
export const nodeConfig = (
overriddenConfigProperties: Partial<ApplicationConfig> = {},
): ApplicationConfig =>
({
...applicationConfigSchema.default,
...overriddenConfigProperties,
} as ApplicationConfig);
|
<gh_stars>0
/*
* Copyright (C) 2006-2011, SRI International (R)
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#pragma once
#ifndef __OpenKarto_MetaClassHelper_h__
#define __OpenKarto_MetaClassHelper_h__
namespace karto
{
///** \addtogroup OpenKarto */
//@{
//@cond EXCLUDE
/**
* Helper class to create MetaClass. Allows for setting attributes, parameters, constructors and base class information
*/
template <typename T>
class MetaClassHelper
{
public:
/**
* Constructor
*/
MetaClassHelper(MetaClass& target)
: m_pMetaClass(&target)
, m_pAttributes(m_pMetaClass)
{
}
/**
* Add static base class
*/
template <typename U>
MetaClassHelper<T>& Base()
{
const MetaClass& baseClass = GetMetaClassByType<U>();
karto::String baseName = baseClass.GetName();
karto_forEach(List<const MetaClass*>, &m_pMetaClass->m_BaseClasses)
{
assert((*iter)->GetName() != baseName);
}
m_pMetaClass->m_BaseClasses.Add(&baseClass);
return *this;
}
/**
* Add static attribute
*/
MetaClassHelper<T>& Attribute(const karto::String& rAttributeName)
{
return Attribute(rAttributeName, "");
}
/**
* Add static attribute and value
*/
template <typename U>
MetaClassHelper<T>& Attribute(const karto::String& rAttributeName, const U& rValue)
{
assert(m_pAttributes && !m_pAttributes->HasAttribute(rAttributeName));
m_pAttributes->AddAttribute(rAttributeName, rValue);
return *this;
}
/**
* Add Parameter - not implemented
*/
template <typename F1, typename F2>
MetaClassHelper<T>& Parameter(const karto::String& rParameterName, F1 accessor1, F2 accessor2)
{
return *this;
}
/**
* Add constructor with zero arguments
*/
MetaClassHelper<T>& Constructor0()
{
MetaConstructor* pConstructor = new MetaConstructorImpl0<T>;
m_pMetaClass->m_Constructors.Add(pConstructor);
return *this;
}
/**
* Add constructor with one argument
*/
template <typename A0>
MetaClassHelper<T>& Constructor1()
{
MetaConstructor* pConstructor = new MetaConstructorImpl1<T, A0>;
m_pMetaClass->m_Constructors.Add(pConstructor);
return *this;
}
/**
* Add constructor with two argument
*/
template <typename A0, typename A1>
MetaClassHelper<T>& Constructor2()
{
MetaConstructor* pConstructor = new MetaConstructorImpl2<T, A0, A1>;
m_pMetaClass->m_Constructors.Add(pConstructor);
return *this;
}
private:
MetaClass* m_pMetaClass;
MetaAttribute* m_pAttributes;
};
// @endcond
//@}
}
#endif // __OpenKarto_MetaClassHelper_h__
|
<filename>src/navigation/ProfileStack.js
/*
* Jira Ticket:
* Created Date: Wed, 4th Nov 2020, 08:52:21 am
* Author: <NAME>
* Email: <EMAIL>
* Copyright (c) 2020 The Distance
*/
import {createStackNavigator} from '@react-navigation/stack';
const ProfileStack = createStackNavigator();
export default ProfileStack;
|
<filename>src/save_db.c<gh_stars>0
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* load_db.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: rle <<EMAIL>> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2017/03/04 18:52:02 by mburson #+# #+# */
/* Updated: 2017/05/01 18:20:41 by rle ### ########.fr */
/* */
/* ************************************************************************** */
#include <ft_db.h>
/*
** db format:
** header
** fields
** names
** db struct
** db data
**
** is casting ssize_t to size_t bad??
*/
static int save_header(struct s_header *header, int fd)
{
ssize_t size;
if (-1 == (size = write(fd, header, sizeof(*header))))
return (-1);
if (size != sizeof(*header))
{
g_error = "not everything got written\?\?";
return (-1);
}
return (0);
}
static int save_header_fields(struct s_header *header, int fd)
{
ssize_t size;
size_t i;
if (-1 == (size = write(fd, header->fields,
sizeof(*header->fields) * header->field_count)))
return (-1);
if ((size_t)size != sizeof(*header->fields) * header->field_count)
{
g_error = "not everything got written\?\?";
return (-1);
}
i = 0;
while (i < header->field_count)
{
size = write(fd, header->fields[i].name, header->fields[i].name_size);
if (size == -1)
return (-1);
if ((size_t)size != header->fields[i].name_size)
{
g_error = "not everything got written\?\?";
return (-1);
}
i++;
}
return (0);
}
static int save_database(t_vec *db, int fd)
{
ssize_t size;
if (-1 == (size = write(fd, db, sizeof(*db))))
return (-1);
if (size != sizeof(*db))
{
g_error = "not everything got written\?\?";
return (-1);
}
if (-1 == (size = write(fd, db->data, db->elmnt_count * db->elmnt_size)))
return (-1);
if ((size_t)size != db->elmnt_count * db->elmnt_size)
{
g_error = "not everything got written\?\?";
return (-1);
}
return (0);
}
int save_db(struct s_header *header, t_vec *db, int argc, char **argv)
{
int fd;
char *file;
if (argc == 1)
file = DEFAULT_FILE;
else if (argc == 2)
file = argv[1];
else if (argc > 2)
{
g_error = "bad useage in save (how did this even happen\?\?)";
return (-1);
}
if (-1 == (fd = open(file, O_WRONLY | O_CREAT | O_TRUNC, 0666))
|| -1 == save_header(header, fd)
|| -1 == save_header_fields(header, fd)
|| -1 == save_database(db, fd)
|| -1 == close(fd))
return (-1);
return (0);
}
|
<reponame>x5z5c5/weboasis-repo.github.io<gh_stars>1-10
/* micropolisJS. Adapted by <NAME> from Micropolis.
*
* This code is released under the GNU GPL v3, with some additional terms.
* Please see the files LICENSE and COPYING for details. Alternatively,
* consult http://micropolisjs.graememcc.co.uk/LICENSE and
* http://micropolisjs.graememcc.co.uk/COPYING
*
*/
Micro.pixToWorld = function(p) {
return p >> 4;
};
Micro.worldToPix = function(w) {
return w << 4;
};
// Attempt to move 45° towards the desired direction, either
// clockwise or anticlockwise, whichever gets us there quicker
Micro.turnTo = function(presentDir, desiredDir) {
if (presentDir === desiredDir)
return presentDir;
if (presentDir < desiredDir) {
// select clockwise or anticlockwise
if (desiredDir - presentDir < 4) presentDir++;
else presentDir--;
} else {
if (presentDir - desiredDir < 4) presentDir--;
else presentDir++;
}
if (presentDir > 8) presentDir = 1;
if (presentDir < 1) presentDir = 8;
return presentDir;
};
Micro.absoluteValue = function(x) {
return Math.abs(x);
};
Micro.getTileValue = function(map, x, y) {
var wX = Micro.pixToWorld(x);
var wY = Micro.pixToWorld(y);
if (wX < 0 || wX >= map.width || wY < 0 || wY >= map.height) return -1;
return map.getTileValue(wX, wY);
};
// Choose the best direction to get from the origin to the destination
// If the destination is equidistant in both x and y deltas, a diagonal
// will be chosen, otherwise the most 'dominant' difference will be selected
// (so if a destination is 4 units north and 2 units east, north will be chosen).
// This code seems to always choose south if we're already there which seems like
// a bug
Micro.directionTable = [0, 3, 2, 1, 3, 4, 5, 7, 6, 5, 7, 8, 1];
Micro.getDir = function(orgX, orgY, destX, destY) {
var deltaX = destX - orgX;
var deltaY = destY - orgY;
var i;
if (deltaX < 0) {
if (deltaY < 0) { i = 11; } else { i = 8; }
} else {
if (deltaY < 0) { i = 2; } else { i = 5; }
}
deltaX = Math.abs(deltaX);
deltaY = Math.abs(deltaY);
if (deltaX * 2 < deltaY) i++;
else if (deltaY * 2 < deltaX) i--;
if (i < 0 || i > 12) i = 0;
return Micro.directionTable[i];
};
Micro.absoluteDistance = function(orgX, orgY, destX, destY) {
var deltaX = destX - orgX;
var deltaY = destY - orgY;
return Math.abs(deltaX) + Math.abs(deltaY);
};
Micro.checkWet = function(tileValue) {
if (tileValue === Tile.HPOWER || tileValue === Tile.VPOWER || tileValue === Tile.HRAIL || tileValue === Tile.VRAIL || tileValue === Tile.BRWH || tileValue === Tile.BRWV) return true;
else return false;
};
Micro.destroyMapTile = function(spriteManager, map, blockMaps, ox, oy) {
var x = Micro.pixToWorld(ox);
var y = Micro.pixToWorld(oy);
if (!map.testBounds(x, y)) return;
var tile = map.getTile(x, y);
var tileValue = tile.getValue();
if (tileValue < Tile.TREEBASE) return;
if (!tile.isCombustible()) {
if (tileValue >= Tile.ROADBASE && tileValue <= Tile.LASTROAD) map.setTo(x, y, new Micro.Tile(Tile.RIVER));
return;
}
if (tile.isZone()) {
Micro.fireZone(map, x, y, blockMaps);
if (tileValue > Tile.RZB) spriteManager.makeExplosionAt(ox, oy);
}
if (Micro.checkWet(tileValue)) map.setTo(x, y, new Micro.Tile(Tile.RIVER));
else map.setTo(x, y, new Micro.Tile(Tile.TINYEXP, Tile.BULLBIT | Tile.ANIMBIT));
};
Micro.getDistance = function(x1, y1, x2, y2) {
return Math.abs(x1 - x2) + Math.abs(y1 - y2);
};
Micro.checkSpriteCollision = function(s1, s2) {
return s1.frame !== 0 && s2.frame !== 0 && Micro.getDistance(s1.x, s1.y, s2.x, s2.y) < 30;
};
|
<filename>javascript/extractor/src/com/semmle/js/extractor/test/NumericSeparatorTests.java
package com.semmle.js.extractor.test;
import static org.junit.Assert.*;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import com.semmle.jcorn.ESNextParser;
import com.semmle.jcorn.Options;
import com.semmle.jcorn.SyntaxError;
import com.semmle.js.ast.ExpressionStatement;
import com.semmle.js.ast.Literal;
import com.semmle.js.ast.Program;
import org.junit.Test;
public class NumericSeparatorTests {
private void test(String src, Integer numVal) {
try {
Program p = new ESNextParser(new Options().esnext(true), src, 0).parse();
assertNotNull(numVal);
assertEquals(1, p.getBody().size());
assertTrue(p.getBody().get(0) instanceof ExpressionStatement);
ExpressionStatement exprStmt = (ExpressionStatement) p.getBody().get(0);
assertTrue(exprStmt.getExpression() instanceof Literal);
assertEquals(numVal.longValue(), ((Literal) exprStmt.getExpression()).getValue());
} catch (SyntaxError e) {
assertNull(e.toString(), numVal);
}
}
@Test
public void test() {
test("0b_", null);
test("0b0_1", 0b01);
test("0B0_1", 0b01);
test("0b0_10", 0b010);
test("0B0_10", 0b010);
test("0b01_0", 0b010);
test("0B01_0", 0b010);
test("0b01_00", 0b0100);
test("0B01_00", 0b0100);
test("0b0__0", null);
test("0b0_", null);
}
}
|
<gh_stars>1-10
/*****************************************************************************/
/* */
/* stmt.h */
/* */
/* Parse a statement */
/* */
/* */
/* */
/* (C) 1998-2008 <NAME> */
/* Roemerstrasse 52 */
/* D-70794 Filderstadt */
/* EMail: <EMAIL> */
/* */
/* */
/* This software is provided 'as-is', without any expressed or implied */
/* warranty. In no event will the authors be held liable for any damages */
/* arising from the use of this software. */
/* */
/* Permission is granted to anyone to use this software for any purpose, */
/* including commercial applications, and to alter it and redistribute it */
/* freely, subject to the following restrictions: */
/* */
/* 1. The origin of this software must not be misrepresented; you must not */
/* claim that you wrote the original software. If you use this software */
/* in a product, an acknowledgment in the product documentation would be */
/* appreciated but is not required. */
/* 2. Altered source versions must be plainly marked as such, and must not */
/* be misrepresented as being the original software. */
/* 3. This notice may not be removed or altered from any source */
/* distribution. */
/* */
/*****************************************************************************/
#ifndef STMT_H
#define STMT_H
/*****************************************************************************/
/* Code */
/*****************************************************************************/
int Statement (int* PendingToken);
/* Statement parser. Returns 1 if the statement does a return/break, returns
** 0 otherwise. If the PendingToken pointer is not NULL, the function will
** not skip the terminating token of the statement (closing brace or
** semicolon), but store true if there is a pending token, and false if there
** is none. The token is always checked, so there is no need for the caller to
** check this token, it must be skipped, however. If the argument pointer is
** NULL, the function will skip the token.
*/
/* End of stmt.h */
#endif
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/EasyTransitions-iOS/EasyTransitions.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/EasyTransitions-iOS/EasyTransitions.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
// Страница регистрации
pagesLoadData.register = {
link: '?page=part.register',
onLoad: function()
{
Iridium.Init.launch(content);
Captcha.set(document.getElementById('captcha-img'), document.getElementById('captcha-bar'));
Captcha.enable();
document.getElementById('register-form').addEventListener('submit', function(e)
{
e.preventDefault();
var _ = this;
Iridium.Net.post(
this.getAttribute('action'),
Iridium.merge(getFormData(_), { captcha_hash: Captcha.getHash() }),
function(result)
{
if(result.error)
{
var error = result.error;
// Input filter error
// Highlight the input field with error
if(error.exception_class === 'InputFilterException')
{
displayFieldError(_.elements[error.specific.value_name]);
}
else if(error.exception_class === 'OperationException'
|| (error.exception_class === 'RestrictionException'
&& error.specific.restriction_class === 'CaptchaRestriction'))
{
var errorContainer = document.getElementById('error-message');
errorContainer.innerHTML = error.message;
new Iridium.Animation({
element: errorContainer,
duration: 300,
animation: 'fadeIn',
onStop: function()
{
setTimeout(function()
{
new Iridium.Animation({
element: errorContainer,
duration: 300,
animation: 'fadeOut',
onStop: function() { errorContainer.innerHTML = ''; }
})
}, 2000);
}
});
}
else { displayError(lang.reg.reg, error.message); }
return;
}
if(result === 0 || result === 1)
{
var content = '<p>' + lang.reg.success;
if(result === 1) { content += ' ' + lang.reg.successEmail; }
new Iridium.Popup({
header: lang.reg.successHeader,
content: content + '</p>',
windowClass: 'window-success',
closeButton: true,
closeButtonContent: getLoadedIcon('cross'),
onHide: function() { loadPage('login'); }
}).show();
}
},
Iridium.Net.DataType.JSON,
function(type, data) { console.error(type + '' + data); }
);
});
},
onClose: function() { Captcha.disable(); }
};
|
import React, { Component } from 'react';
import { BrowserRouter as Router, Switch, Route} from 'react-router-dom';
// components
import { Nav } from './components/Nav';
// views
import Home from './views/home';
import AddShirts from './views/shirts';
import AddPants from './views/pants';
import AddColors from './views/colors';
class App extends Component {
render() {
return (
<div id="main">
<Nav />
<Router>
<Switch>
<Route path='/' exact component={Home} />
<Route path='/add-shirts' exact component={AddShirts} />
<Route path='/add-pants' exact component={AddPants} />
<Route path='/add-colors' exact component={AddColors} />
</Switch>
</Router>
</div>
);
}
}
export default App;
|
const express = require('express');
const app = express();
app.get('/api/date', (req, res) => {
const currentDate = new Date().toISOString();
res.json({
date: currentDate,
});
});
const port = process.env.PORT || 5000;
app.listen(port, () => {
console.log(`Listening on port ${port}`);
});
|
/**
* Copyright 2014 isandlaTech
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.psem2m.isolates.ui.admin.api;
import javax.swing.Icon;
/**
* @author isandlatech (www.isandlatech.com) - ogattaz
*
*/
public interface IUiAdminSvc {
/**
* @param aName
* the name of the new UiAdminPanel
* @param aTip
* the tip of the new UiAdminPanel. null is accepted.
* @param aControler
* @return the instance of the added UiAdminPanel
*/
IUiAdminPanel newUiAdminPanel(String aName, String aTip, Icon icon,
IUiAdminPanelControler aControler, EUiAdminPanelLocation aLocation)
throws Exception;
/**
* @param aUiAdminPanel
*/
void removeUiAdminPanel(IUiAdminPanel aUiAdminPanel);
}
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package co.edu.uniandes.csw.series.ejb;
import co.edu.uniandes.csw.series.entities.PersonajeEntity;
import co.edu.uniandes.csw.series.entities.SerieEntity;
import co.edu.uniandes.csw.series.exceptions.BusinessLogicException;
import co.edu.uniandes.csw.series.persistence.SeriePersistence;
import java.util.List;
import javax.ejb.Stateless;
import javax.inject.Inject;
/**
*
* @author <NAME>
*/
@Stateless
public class SerieLogic {
@Inject
private SeriePersistence persistence; // Variable para acceder a la persistencia de la aplicación. Es una inyección de dependencias.
public SerieEntity getSerie(Long id) {
return persistence.find(id);
}
//TODO modificar el método createHeroe
public SerieEntity createSerie(SerieEntity entity) throws BusinessLogicException{
SerieEntity refEntity = persistence.findByName(entity.getName());
if(refEntity != null){
throw new BusinessLogicException("La serie con nombre " + entity.getName() + " ya existe.");
}
if(entity.getDescription().length() <= 30){
throw new BusinessLogicException("La serie " + entity.getName()
+ "tiene una descipciòn de menos de 30 caracteres");
}
return persistence.create(entity);
}
public void deleteSerie(SerieEntity entity) throws BusinessLogicException{
List<PersonajeEntity> personajes = entity.getPersonajes();
if(personajes.isEmpty()){
throw new BusinessLogicException("La serie " + entity.getName() + " debe tener personajes para ser eliminada.");
}
persistence.delete(entity.getId());
}
}
|
<filename>resources/js/components/ChangePasswordDialog.js
import React, { useState } from 'react';
import Button from '@material-ui/core/Button';
import Dialog from '@material-ui/core/Dialog';
import DialogActions from '@material-ui/core/DialogActions';
import DialogContent from '@material-ui/core/DialogContent';
import DialogContentText from '@material-ui/core/DialogContentText';
import DialogTitle from '@material-ui/core/DialogTitle';
import TextField from '@material-ui/core/TextField';
import { makeStyles } from '@material-ui/core/styles';
import Grid from '@material-ui/core/Grid';
import CircularProgress from '@material-ui/core/CircularProgress';
import { Alert } from '@material-ui/lab';
import Typography from "@material-ui/core/Typography";
import Box from "@material-ui/core/Box";
import {
useHistory
} from "react-router-dom";
const useStyles = makeStyles((theme) => ({
root: {
'& > *': {
margin: theme.spacing(1),
width: '25ch',
},
},
button: {
},
alert: {
'& .MuiAlert-message': {
padding: 0,
display: 'flex',
justifyContent: 'center',
flexDirection: 'column'
},
'& .MuiTypography-caption': {
marginBottom: 0
},
'& .MuiAlert-icon': {
padding: 0
}
}
}));
function ChangePasswordDialog({ isOpen, hideChangePasswordDialog, idTaikhoan, showSuccessChangePasswordDialogOpen }) {
const classes = useStyles();
const [password1, setPassword1] = useState('');
const [password2, setPassword2] = useState('');
const [errors, setErrors] = useState([]);
const [isLoading, setLoader] = useState(false);
let history = useHistory();
function handlePassword1Change(e) {
setPassword1(e.target.value);
}
function handlePassword2Change(e) {
setPassword2(e.target.value);
}
function changePassword(e) {
e.preventDefault();
if(password1 != password2) {
let errors = [];
errors.push("Mật khẩu không khớp");
setErrors([...errors]);
return;
}
if(password1 > 0) {
setLoader(true);
let token = localStorage.getItem("token");
let data = {
idTaikhoan,
password : <PASSWORD>
}
//send ajax
fetch('/api/changepassword', {
method: 'post',
headers: {
'X-CSRF-TOKEN': document.querySelector('meta[name="csrf-token"]').getAttribute('content'),
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + token
},
body: JSON.stringify(data)
})
.then((response) => response.json())
.then((data) => {
setLoader(false);
localStorage.removeItem('token');
hideChangePasswordDialogChild();
showSuccessChangePasswordDialogOpen();
})
.catch((error) => {
if (error.status == 401) {
localStorage.removeItem("token");
history.push('/dangnhap');
}
});
}
}
function hideChangePasswordDialogChild() {
setErrors([]);
hideChangePasswordDialog();
}
return (
<div>
<Dialog
open={isOpen}
aria-labelledby="alert-dialog-title"
aria-describedby="alert-dialog-description"
// fullWidth={true}
// maxWidth="xs"
>
<DialogTitle id="alert-dialog-title">{"Thay Đổi Mật Khẩu"}</DialogTitle>
<DialogContent>
<Grid container spacing={1} justify="center" direction="column" alignItems="center">
<Grid item sm={12}>
<TextField
label="Mật khẩu mới"
type="password"
fullWidth
variant="outlined"
onChange={handlePassword1Change}
size="small"
/>
</Grid>
<Grid item sm={12}>
<TextField
label="Mật khẩu xác nhận"
type="password"
fullWidth
variant="outlined"
onChange={handlePassword2Change}
size="small"
/>
</Grid>
</Grid>
{!!errors && errors.length > 0 ?
<Box mt={1}>
<Alert severity="error" className={classes.alert}>
{errors.map(i =>
<Typography variant="caption" display="block" gutterBottom>
- {i}
</Typography>)}
</Alert></Box> : ''
}
</DialogContent>
<DialogActions>
<Button variant="outlined" color="primary" className={classes.button} onClick={changePassword} disabled={isLoading}>
{isLoading ? <CircularProgress color="primary" size={24} className={classes.loader} /> : "Thay đổi"}
</Button>
<Button variant="outlined" onClick={hideChangePasswordDialogChild} autoFocus>
Hủy Bỏ
</Button>
</DialogActions>
</Dialog >
</div >
);
}
export default ChangePasswordDialog;
|
#
# Control the system
#
# modules
msu_require "console"
# shutting down the computer, with ease
# ${1} - duration. After how long to stop the computer.
function down() {
local duration="${1:-now}"
# ask for confirmation, if we are shutting down NOW
if [ "${duration}" == "now" ]
then
yes_no "shutdown right now" || return 1
fi
sudo shutdown -P ${duration}
}
|
<filename>src/index.js
class Rotator {
/**
* Main rotator class.
*/
constructor() {
/**
* Event handlers for rotation.
* @type {((this: Rotator, alpha: number) => void)[]}
*/
this.rotationHandlers = [];
/**
* The current rotation. The rotation starts right and goes counter-clockwise.
* @type {number}
*/
this.currentAlpha = 0;
/**
* If the rotator is currently in rotation state.
* @type {boolean}
*/
this.rotating = false;
/**
* The rotation when the rotation process is started.
* @type {number}
* @private
*/
this._rotationStart = 0;
/**
* The alpha of the mouse/finger when the rotation process is started.
* @type {number}
* @private
*/
this._userRotationStart = 0;
}
/**
* Attach touch events to element.
* @param {HTMLElement} element The element to add the listeners to
*/
touchEvents(element) {
/** @type {typeof HTMLElement.addEventListener} */
const on = element.on || element.addEventListener;
function calcXY(/** @type {TouchEvent} */ e) {
const centerX = element.offsetWidth / 2;
const centerY = element.offsetHeight / 2;
const relativeX = e.touches[0].clientX - element.offsetLeft;
const relativeY = e.touches[0].clientY - element.offsetTop;
return [relativeX - centerX, centerY - relativeY];
}
on("touchstart", e => {
e.preventDefault();
this.rotateStart(...calcXY(e));
});
on("touchmove", e => {
if (!this.rotating) return;
e.preventDefault();
this.rotateMove(...calcXY(e));
});
on("touchend", e => {
e.preventDefault();
this.rotateStop();
});
}
/**
* Attach touch events to element.
* @param {HTMLElement} element The element to add the listeners to
*/
mouseEvents(element) {
const on = element.on || element.addEventListener;
function calcXY(/** @type {MouseEvent} */ e) {
const centerX = element.offsetWidth / 2;
const centerY = element.offsetHeight / 2;
const relativeX = e.clientX - element.offsetLeft;
const relativeY = e.clientY - element.offsetTop;
return [relativeX - centerX, centerY - relativeY];
}
on("mousedown", e => {
e.preventDefault();
this.rotateStart(...calcXY(e));
});
on("mousemove", e => {
if (!this.rotating) return;
e.preventDefault();
this.rotateMove(...calcXY(e));
});
on("mouseup", e => {
e.preventDefault();
this.rotateStop();
});
}
/**
* Initializes the rotation process.
* Important: X and Y are values relative to the center of the object positive to top-right.
* @param {number} x X value of current position
* @param {number} y Y value of current position
*/
rotateStart(x, y) {
this.rotating = true;
this._rotationStart = this.currentAlpha;
this._userRotationStart = this._calcAlpha(x, y);
}
/**
* Rotates to the specified X and Y values relative to the starting position. Only works if rotateStart was called before.
* Important: X and Y are values relative to the center of the object positive to top-right.
* @param {number} x X value of current position
* @param {number} y Y value of current position
*/
rotateMove(x, y) {
if (!this.rotating) return;
this.currentAlpha = (this._calcAlpha(x, y) - this._userRotationStart + this._rotationStart) % 360;
this.rotationHandlers.forEach(h => h.call(this, this.currentAlpha));
}
/**
* Stops the rotation process.
*/
rotateStop() {
this.rotating = false;
this._rotationStart = 0;
this._userRotationStart = 0;
}
/**
* Calc the alpha relative to center point.
* @param {number} x
* @param {number} y
* @returns {number} Calculated alpha value
* @private
*/
_calcAlpha(x, y) {
let a = 180 * Math.atan(y / x) / Math.PI;
if (!this._isPos(x) && !this._isPos(y)) a = a + 180;
else if (!this._isPos(x) && this._isPos(y)) a += 180;
else if (this._isPos(x) && !this._isPos(y)) a += 360;
return a;
}
/**
* Check if number is positive.
* @param {number} num
* @returns {boolean} If number is positive
* @private
*/
_isPos(num) {
return num == Math.abs(num);
}
/**
* Add a new rotation handler to the list. Will be executed when the element is rotated.
* @param {(this: Rotator, alpha: number) => void} func Event handler
*/
addRotationHandler(func) {
this.rotationHandlers.push(func);
}
/**
* Removes a rotation handler from the list.
* @param {(this: Rotator, alpha: number) => void} func Event handler
*/
removeRotationHandler(func) {
const index = this.rotationHandlers.indexOf(func);
if (index > -1) this.rotationHandlers.splice(index, 1);
}
/**
* Add all events to the specified element.
* @param {HTMLElement|string} element The element to use
* @returns {Rotator} The generated rotator
*/
static hookToElement(element) {
if (typeof element == "string") {
element = document.getElementById(element) || document.querySelector(element);
}
const rotator = new Rotator();
rotator.touchEvents(element);
rotator.mouseEvents(element);
rotator.addRotationHandler(alpha => {
element.style.transform = `rotate(${(0 - alpha).toFixed(2)}deg)`;
element.style.transformOrigin = "center center";
});
return rotator;
}
}
|
//index.js
//获取应用实例
var app = getApp()
var dialog = require("../../utils/dialog.js")
var wxNotificationCenter = require("../../utils/WxNotificationCenter.js")
Page({
data: {
contentList:[],
currentType:wx.getStorageSync('currentType'),
types:[]
},
//加载第一个类型的列表
onLoad:function(){
this.setData({
types:wx.getStorageSync('types') ? wx.getStorageSync('types') : app.globalData.types
})
if(!this.data.currentType){
let that = this
this.data.types.every(function(item){
if(item.is_show){
wx.setStorageSync('currentType', item.value)
that.setData({currentType:item.value})
return false
}else{
return true
}
})
}
if(this.data.currentType){
this.getList(this.data.currentType)
}
//添加通知监听
wxNotificationCenter.addNotification("typesChangeNotification",this.typesChangeNotificationHandler,this)
},
//接收类别编辑页面中修改了类别标签的通知,重新处理
typesChangeNotificationHandler:function(){
this.setData({
types:wx.getStorageSync('types'),
currentType:wx.getStorageSync('currentType')
})
this.getList(wx.getStorageSync('currentType'))
},
getList:function(type){
dialog.loading()
var that = this
//请求数据
wx.request({
url:app.globalData.api.dbmeizhiurl+"pic"+"?type="+type,
success:function(ret){
ret = ret['data']
if(ret['code'] == 0 ){
that.setData({
contentList:ret['data']
})
}else{
setTimeout(function(){
dialog.toast("网络超时啦~")
},1)
}
},
complete:function(){
wx.stopPullDownRefresh()
setTimeout(function(){
dialog.hide()
},1000)
}
})
},
onPullDownRefresh:function(){
this.getList(this.data.currentType)
},
//点击某一个title条
changeType:function(e){
var type = e.currentTarget.dataset.value
if(type == this.data.currentType){
return;
}
this.setData({currentType:type})
app.globalData.currentType = type
this.getList(type)
},
gotoTypeEdit:function(e){
wx.navigateTo({
url: '../types/types?id=1',
})
},
gotoAlbum:function(e){
console.log("gotoAlbum");
let param = e.currentTarget.dataset,
index = param.id.lastIndexOf("\/"),
title = param.title,
// id=param.id.replace(/[^0-9]/ig,"")
id = param.id.substring(index + 1, param.id.length).replace(/[^0-9]/ig,"");
console.log("param: " + param);
console.log("title: " + title);
console.log("id: " + id);
var url = "../album/album?title="+title+"&id="+id;
console.log("ready");
wx.navigateTo({
url:url,
success: function(res){
console.log('跳转到news页面成功')// success
},
fail: function() {
console.log('跳转到news页面失败') // fail
},
complete: function() {
console.log('跳转到news页面完成') // complete
}
})
}
})
|
# Set up the model
model = Sequential()
model.add(Dense(64, input_shape=(X_train.shape[1],), activation='relu'))
model.add(Dense(64, activation='relu'))
model.add(Dense(1))
# Compile the model
model.compile(loss='mean_squared_error', optimizer='adam')
# Fit the model
model.fit(X_train, y_train, epochs=20, batch_size=32)
|
# Source this script!
minikube start
eval $(minikube docker-env)
|
import {
TransactionCreateOptions,
TransactionAllOptions,
TransactionCalculateInstallmentsAmountOptions,
TransactionFindOptions,
TransactionCaptureOptions,
TransactionRefundOptions
} from './options';
import { Transaction, CalculateInstallmentsAmount, CardHashKey } from './responses';
declare module 'pagarme' {
export namespace client {
export namespace transactions {
function all(
opts: any,
body: TransactionAllOptions
): Promise<Transaction[]>;
function calculateInstallmentsAmount(
opts: any,
body: TransactionCalculateInstallmentsAmountOptions
): Promise<CalculateInstallmentsAmount>;
function capture(opts: TransactionCaptureOptions): Promise<Transaction>;
function cardHashKey(opts: any): Promise<CardHashKey>;
function collectPayment(opts: any, body: any): any;
function create(opts: TransactionCreateOptions): Promise<Transaction>;
function find<T extends TransactionFindOptions>(
opts: any,
body: T
): Promise<
T extends TransactionFindOptions ? Transaction[] : Transaction
>;
function refund(body: TransactionRefundOptions): Promise<Transaction>;
function refund(
opts: any,
body: TransactionRefundOptions
): Promise<Transaction>;
function reprocess(opts: any, body: any): any;
function update(opts: any, body: any): any;
}
}
}
|
"use strict";
(self["webpackChunk"] = self["webpackChunk"] || []).push([["resources_js_vue_views_About_vue"],{
/***/ "./node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!./node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./resources/js/vue/views/About.vue?vue&type=script&lang=js":
/*!**********************************************************************************************************************************************************************************************!*\
!*** ./node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!./node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./resources/js/vue/views/About.vue?vue&type=script&lang=js ***!
\**********************************************************************************************************************************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (__WEBPACK_DEFAULT_EXPORT__)
/* harmony export */ });
/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = ({});
/***/ }),
/***/ "./node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!./node_modules/vue-loader/dist/templateLoader.js??ruleSet[1].rules[2]!./node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./resources/js/vue/views/About.vue?vue&type=template&id=7b08b7f5":
/*!**************************************************************************************************************************************************************************************************************************************************************************!*\
!*** ./node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!./node_modules/vue-loader/dist/templateLoader.js??ruleSet[1].rules[2]!./node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./resources/js/vue/views/About.vue?vue&type=template&id=7b08b7f5 ***!
\**************************************************************************************************************************************************************************************************************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "render": () => (/* binding */ render)
/* harmony export */ });
/* harmony import */ var vue__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! vue */ "./node_modules/vue/dist/vue.esm-bundler.js");
var _hoisted_1 = {
"class": "container mx-auto mt-5"
};
var _hoisted_2 = /*#__PURE__*/(0,vue__WEBPACK_IMPORTED_MODULE_0__.createElementVNode)("h1", {
"class": "text-center mb-3"
}, "About", -1
/* HOISTED */
);
var _hoisted_3 = /*#__PURE__*/(0,vue__WEBPACK_IMPORTED_MODULE_0__.createElementVNode)("p", null, "Lorem ipsum dolor sit, amet consectetur adipisicing elit. Provident itaque nam sed vero, blanditiis velit libero. Maiores eius voluptatum dolores illum repellat quidem itaque error dignissimos voluptas ea. Delectus, illum. Nemo odit repellat, quos voluptate quia minima numquam porro laborum saepe qui ad iure suscipit consectetur labore officia doloribus quae velit voluptatem. Repellat quidem nam nobis odit soluta eaque laborum.", -1
/* HOISTED */
);
var _hoisted_4 = /*#__PURE__*/(0,vue__WEBPACK_IMPORTED_MODULE_0__.createElementVNode)("p", null, "Lorem ipsum dolor sit amet consectetur, adipisicing elit. Vero corrupti iusto inventore tenetur explicabo ab neque debitis optio, animi soluta illum odio veniam placeat, doloremque nihil voluptas in omnis est.", -1
/* HOISTED */
);
var _hoisted_5 = [_hoisted_2, _hoisted_3, _hoisted_4];
function render(_ctx, _cache, $props, $setup, $data, $options) {
return (0,vue__WEBPACK_IMPORTED_MODULE_0__.openBlock)(), (0,vue__WEBPACK_IMPORTED_MODULE_0__.createElementBlock)("div", _hoisted_1, _hoisted_5);
}
/***/ }),
/***/ "./resources/js/vue/views/About.vue":
/*!******************************************!*\
!*** ./resources/js/vue/views/About.vue ***!
\******************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (__WEBPACK_DEFAULT_EXPORT__)
/* harmony export */ });
/* harmony import */ var _About_vue_vue_type_template_id_7b08b7f5__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./About.vue?vue&type=template&id=7b08b7f5 */ "./resources/js/vue/views/About.vue?vue&type=template&id=7b08b7f5");
/* harmony import */ var _About_vue_vue_type_script_lang_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./About.vue?vue&type=script&lang=js */ "./resources/js/vue/views/About.vue?vue&type=script&lang=js");
_About_vue_vue_type_script_lang_js__WEBPACK_IMPORTED_MODULE_1__["default"].render = _About_vue_vue_type_template_id_7b08b7f5__WEBPACK_IMPORTED_MODULE_0__.render
/* hot reload */
if (false) {}
_About_vue_vue_type_script_lang_js__WEBPACK_IMPORTED_MODULE_1__["default"].__file = "resources/js/vue/views/About.vue"
/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = (_About_vue_vue_type_script_lang_js__WEBPACK_IMPORTED_MODULE_1__["default"]);
/***/ }),
/***/ "./resources/js/vue/views/About.vue?vue&type=script&lang=js":
/*!******************************************************************!*\
!*** ./resources/js/vue/views/About.vue?vue&type=script&lang=js ***!
\******************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (/* reexport safe */ _node_modules_babel_loader_lib_index_js_clonedRuleSet_5_use_0_node_modules_vue_loader_dist_index_js_ruleSet_0_use_0_About_vue_vue_type_script_lang_js__WEBPACK_IMPORTED_MODULE_0__["default"])
/* harmony export */ });
/* harmony import */ var _node_modules_babel_loader_lib_index_js_clonedRuleSet_5_use_0_node_modules_vue_loader_dist_index_js_ruleSet_0_use_0_About_vue_vue_type_script_lang_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!../../../../node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!../../../../node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./About.vue?vue&type=script&lang=js */ "./node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!./node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./resources/js/vue/views/About.vue?vue&type=script&lang=js");
/***/ }),
/***/ "./resources/js/vue/views/About.vue?vue&type=template&id=7b08b7f5":
/*!************************************************************************!*\
!*** ./resources/js/vue/views/About.vue?vue&type=template&id=7b08b7f5 ***!
\************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "render": () => (/* reexport safe */ _node_modules_babel_loader_lib_index_js_clonedRuleSet_5_use_0_node_modules_vue_loader_dist_templateLoader_js_ruleSet_1_rules_2_node_modules_vue_loader_dist_index_js_ruleSet_0_use_0_About_vue_vue_type_template_id_7b08b7f5__WEBPACK_IMPORTED_MODULE_0__.render)
/* harmony export */ });
/* harmony import */ var _node_modules_babel_loader_lib_index_js_clonedRuleSet_5_use_0_node_modules_vue_loader_dist_templateLoader_js_ruleSet_1_rules_2_node_modules_vue_loader_dist_index_js_ruleSet_0_use_0_About_vue_vue_type_template_id_7b08b7f5__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!../../../../node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!../../../../node_modules/vue-loader/dist/templateLoader.js??ruleSet[1].rules[2]!../../../../node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./About.vue?vue&type=template&id=7b08b7f5 */ "./node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!./node_modules/vue-loader/dist/templateLoader.js??ruleSet[1].rules[2]!./node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./resources/js/vue/views/About.vue?vue&type=template&id=7b08b7f5");
/***/ })
}]);
|
import pyxel
from random import randint
class Circle:
def __init__(self, screen_width, screen_height, min_radius):
self._x = randint(0, screen_width)
self._y = randint(min_radius, screen_height - min_radius)
self._r = randint(min_radius, min(screen_width, screen_height) // 2) - 4
self._col = 7 # Color code for the circle
def draw(self):
pyxel.circb(self._x, self._y, self._r, self._col)
# Example usage
screen_width = 160
screen_height = 120
min_radius = 5
pyxel.init(screen_width, screen_height)
circle = Circle(screen_width, screen_height, min_radius)
circle.draw()
pyxel.show()
|
<filename>src/main/java/org/openbaton/vnfm/generic/utils/LogUtils.java<gh_stars>10-100
/*
* Copyright (c) 2015-2018 Open Baton (http://openbaton.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openbaton.vnfm.generic.utils;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonObject;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import org.openbaton.catalogue.mano.record.VNFCInstance;
import org.openbaton.catalogue.mano.record.VirtualNetworkFunctionRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Service;
/** Created by mpa on 14.12.16. */
@Service
@ConfigurationProperties
public class LogUtils {
private static Gson parser = new GsonBuilder().setPrettyPrinting().create();
@Value("${vnfm.ems.script.logpath:/var/log/openbaton/scriptsLog/}")
private String scriptsLogPath;
@Value("${vnfm.ems.script.old:60}")
private int old;
public int getOld() {
return old;
}
public void setOld(int old) {
this.old = old;
}
private Logger log = LoggerFactory.getLogger(LogUtils.class);
public void init() {
if (old > 0) {
File f = new File(scriptsLogPath);
f.mkdirs();
}
}
public synchronized void saveLogToFile(
VirtualNetworkFunctionRecord virtualNetworkFunctionRecord,
String script,
VNFCInstance vnfcInstance1,
String output)
throws IOException {
saveLogToFile(virtualNetworkFunctionRecord, script, vnfcInstance1, output, false);
}
public synchronized void saveLogToFile(
VirtualNetworkFunctionRecord virtualNetworkFunctionRecord,
String script,
VNFCInstance vnfcInstance1,
String output,
boolean error)
throws IOException {
log.debug("Old is: " + old);
if (old > 0) {
String path = "";
if (!error) {
path =
scriptsLogPath
+ virtualNetworkFunctionRecord.getName()
+ "/"
+ vnfcInstance1.getHostname()
+ ".log";
} else {
path =
scriptsLogPath
+ virtualNetworkFunctionRecord.getName()
+ "/"
+ vnfcInstance1.getHostname()
+ "-error.log";
}
File f = new File(path);
log.debug("The full log path is: " + path);
if (!f.exists()) {
f.getParentFile().mkdirs();
f.createNewFile();
}
if (!error) {
Files.write(
Paths.get(path),
("Output of Script : " + script + "\n\n").getBytes(),
StandardOpenOption.APPEND);
Files.write(
Paths.get(path),
parser
.fromJson(output, JsonObject.class)
.get("output")
.getAsString()
.replaceAll("\\\\n", "\n")
.getBytes(),
StandardOpenOption.APPEND);
log.debug(
"Wrote "
+ parser
.fromJson(output, JsonObject.class)
.get("output")
.getAsString()
.replaceAll("\\\\n", "\n")
+ " on file "
+ Paths.get(path));
} else {
Files.write(
Paths.get(path),
("Error log of Script : " + script + "\n\n").getBytes(),
StandardOpenOption.APPEND);
Files.write(
Paths.get(path),
parser
.fromJson(output, JsonObject.class)
.get("err")
.getAsString()
.replaceAll("\\\\n", "\n")
.getBytes(),
StandardOpenOption.APPEND);
}
Files.write(
Paths.get(path),
"\n\n\n~~~~~~~~~~~~~~~~~~~~~~~~~\n#########################\n~~~~~~~~~~~~~~~~~~~~~~~~~\n\n\n"
.getBytes(),
StandardOpenOption.APPEND);
}
}
}
|
def get_max_square_matrix(x):
rows = len(x)
cols = len(x[0])
result = [[0 for _ in range(cols)] for _ in range(rows)]
for i in range(rows):
result[i][0] = x[i][0]
for i in range(cols):
result[0][i] = x[0][i]
for i in range(1, rows):
for j in range(1, cols):
if (x[i][j] == 1):
result[i][j] =min(result[i][j-1], result[i-1][j], result[i-1][j-1]) + 1
else:
result[i][j] = 0
m = result[0][0]
mij = [0, 0]
for i in range(rows):
for j in range(cols):
if (m < result[i][j]):
m = result[i][j]
mij[0] = i
mij[1] = j
return m, mij[0], mij[1]
|
#!/bin/bash
#Limpia los datos mal generados por genia en el corpus de entrenamiento
#Así no ando tocando el árbol
#Esencialmente, lo que hace es borrar las líneas donde no tenemos Lema
#Por ahora, pasa solamente en los que empieza con una sola letra
sed '/^.*[[:space:]][[:space:]]/d' $1 > $1.temp
mv $1.temp $1
|
#!/usr/bin/env bash
echo "Installing react"
cd /vagrant
echo " creating package.json"
rm -f package.json
npm init --force >/dev/null 2>&1
echo " installing react"
npm install react --save >/dev/null 2>&1
echo " installing react-dom"
npm install react-dom --save >/dev/null 2>&1
echo " installing redux"
npm install redux --save >/dev/null 2>&1
echo " installing react-redux"
npm install react-redux --save >/dev/null 2>&1
echo " installing lodash"
npm install lodash --save >/dev/null 2>&1
# the core logic of Babel
echo " installing babel-core"
npm install babel-core --save-dev >/dev/null 2>&1
# You can use Babel with Webpack easily
echo " installing babel-loader"
npm install babel-loader --save-dev >/dev/null 2>&1
# Out of the box Babel doesn't do anything.
# In order to actually do anything to your code you need to enable plugins.
# There are two types of plugins: syntax and transform.
# Syntax allow Babel to parse additional syntax whereas transform apply transformations.
# This way the code that is using future syntax can get transformed back to JavaScript older environments can understand.
# To make it easier to consume plugins, Babel supports the concept of presets.
# Each preset comes with a set of plugins so you don't have to wire them up separately.
echo " installing babel-preset-es2015"
npm install babel-preset-es2015 --save-dev >/dev/null 2>&1
echo " installing babel-preset-react"
npm install babel-preset-react --save-dev >/dev/null 2>&1
echo " installing babel-preset-stage-1"
npm install babel-preset-stage-1 --save >/dev/null 2>&1
# You could pass Babel settings through Webpack (i.e., babel?presets[]=react,presets[]=es2015),
# but then it would be just for Webpack only.
# That's why we are going to push our Babel settings to this specific dotfile.
echo " installing .babelrc"
cat > .babelrc << EOF
{
"presets": ["react", "es2015", "stage-1"]
}
EOF
echo " installing webpack.config.js"
cat > webpack.config.js << EOF
module.exports = {
entry: [
'./app/index.js'
],
output: {
path: __dirname + "/public",
publicPath: '/',
filename: 'bundle.js'
},
module: {
loaders: [{
exclude: /node_modules/,
loader: 'babel'
}]
},
resolve: {
extensions: ['', '.js', '.jsx']
},
devServer: {
historyApiFallback: true,
contentBase: './public',
host: '192.168.33.60',
port: 3000
}
};
EOF
echo " installing index.html"
mkdir -p public
cat > public/index.html << EOF
<!DOCTYPE html>
<html>
<head>
<link rel="stylesheet" href="/style/style.css">
</head>
<body>
<div class="container"></div>
</body>
<script src="/bundle.js"></script>
</html>
EOF
mkdir -p app
mkdir -p app/actions
touch app/actions/index.js
mkdir -p app/components
cat > app/components/app.js << EOF
import React from 'react';
import { Component } from 'react';
export default class App extends Component {
render() {
return (
<div>React simple starter</div>
);
}
}
EOF
mkdir -p app/reducers
cat > app/reducers/index.js << EOF
import { combineReducers } from 'redux';
const rootReducer = combineReducers({
state: (state = {}) => state
});
export default rootReducer;
EOF
cat > app/index.js << EOF
import React from 'react';
import ReactDOM from 'react-dom';
import { Provider } from 'react-redux';
import { createStore, applyMiddleware } from 'redux';
import App from './components/app';
import reducers from './reducers';
const createStoreWithMiddleware = applyMiddleware()(createStore);
ReactDOM.render(
<Provider store={createStoreWithMiddleware(reducers)}>
<App />
</Provider>
, document.querySelector('.container'));
EOF
mkdir -p public/style
echo > public/style/style.css
echo "react installed"
|
#!/bin/bash
echo "TEST: # of lines $1 $2"
fgrep -e ' exon ' $1 | cut -f 9 | perl -ne 'chomp; $_=~/transcript_id "([^"]+)";/; print "$1\n";' | sort -u | wc -l > expected_wc
wc -l $2 | cut -d' ' -f 1 > new_wc
diff expected_wc new_wc
echo "TEST: grep for -1 #1 $2"
fgrep -e '-1,' $2
echo "TEST: grep for -1 #2 $2"
fgrep -e '"-1' $2
|
#!/usr/bin/env bash
# Copyright 2017 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
SCRIPT_ROOT=$(dirname "${BASH_SOURCE[0]}")/..
CODEGEN_PKG=${CODEGEN_PKG:-$(cd "${SCRIPT_ROOT}"; ls -d -1 ./vendor/k8s.io/code-generator 2>/dev/null || echo ../code-generator)}
# generate the code with:
# --output-base because this script should also be able to run inside the vendor dir of
# k8s.io/kubernetes. The output-base is needed for the generators to output into the vendor dir
# instead of the $GOPATH directly. For normal projects this can be dropped.
"${CODEGEN_PKG}"/generate-groups.sh "deepcopy,client,informer,lister" \
github.com/marcosQuesada/k8s-swarm/services/controller/internal/infra/k8s/generated github.com/marcosQuesada/k8s-swarm/services/controller/internal/infra/k8s/apis \
swarm:v1alpha1 \
--output-base "$(dirname "${BASH_SOURCE[0]}")/" \
--go-header-file "${SCRIPT_ROOT}"/hack/boilerplate.go.txt
|
package com.estafet.boostcd.feature.api.model;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.ForeignKey;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.SequenceGenerator;
import javax.persistence.Table;
import javax.persistence.UniqueConstraint;
@Entity
@Table(name = "ENV_MICROSERVICE", uniqueConstraints = {
@UniqueConstraint(columnNames = {"ENV_ID", "MICROSERVICE"}, name = "ENV_MICROSERVICE_KEY") })
public class EnvMicroservice {
@Id
@SequenceGenerator(name = "ENV_MICROSERVICE_ID_SEQ", sequenceName = "ENV_MICROSERVICE_ID_SEQ", allocationSize = 1)
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "ENV_MICROSERVICE_ID_SEQ")
@Column(name = "ENV_MICROSERVICE_ID")
private Long id;
@Column(name = "MICROSERVICE", nullable = false)
private String microservice;
@Column(name = "VERSION", nullable = false)
private String version;
@Column(name = "DEPLOYED_DATE", nullable = true)
private String deployedDate;
@ManyToOne
@JoinColumn(name = "ENV_ID", nullable = false, referencedColumnName = "ENV_ID", foreignKey = @ForeignKey(name = "MICROSERVICE_TO_ENV_FK"))
private Env env;
public String getDeployedDate() {
return deployedDate;
}
public void setDeployedDate(String deployedDate) {
this.deployedDate = deployedDate;
}
public String getMicroservice() {
return microservice;
}
public void setMicroservice(String microservice) {
this.microservice = microservice;
}
public String getVersion() {
return version;
}
public void setVersion(String version) {
this.version = version;
}
public Env getEnv() {
return env;
}
public void setEnv(Env microserviceEnv) {
this.env = microserviceEnv;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((id == null) ? 0 : id.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
EnvMicroservice other = (EnvMicroservice) obj;
if (id == null) {
if (other.id != null)
return false;
} else if (!id.equals(other.id))
return false;
return true;
}
public static EnvMicroserviceBuilder builder() {
return new EnvMicroserviceBuilder();
}
public void update(EnvMicroservice envMicroservice) {
this.version = envMicroservice.version;
this.deployedDate = envMicroservice.deployedDate;
}
@Override
public String toString() {
return "EnvMicroservice [id=" + id + ", microservice=" + microservice + ", env=" + env + "]";
}
public static class EnvMicroserviceBuilder {
private String version;
private String deployedDate;
private String microservice;
private EnvMicroserviceBuilder( ) { }
public EnvMicroserviceBuilder setMicroservice(String microservice) {
this.microservice = microservice;
return this;
}
public EnvMicroserviceBuilder setVersion(String version) {
this.version = version;
return this;
}
public EnvMicroserviceBuilder setDeployedDate(String deployedDate) {
this.deployedDate = deployedDate;
return this;
}
public EnvMicroservice build() {
EnvMicroservice envMicroservice = new EnvMicroservice();
envMicroservice.setMicroservice(microservice);
envMicroservice.setDeployedDate(deployedDate);
envMicroservice.setVersion(version);
return envMicroservice;
}
}
public boolean isGreaterOrEqualThan(EnvMicroservice other) {
if (microservice.equals(other.microservice)) {
return new Version(version).isGreaterOrEqualThan(new Version(other.version));
}
throw new RuntimeException("Canot compare " + microservice + " with " + other);
}
public boolean isLessThanOrEqual(EnvMicroservice other) {
if (microservice.equals(other.microservice)) {
return new Version(version).isLessThanOrEqual(new Version(other.version));
}
throw new RuntimeException("Canot compare " + microservice + " with " + other);
}
}
|
package com.gw.presentation.internal.di.component;
import com.gw.presentation.internal.di.PerActivity;
import com.gw.presentation.internal.di.module.ActivityModule;
import com.gw.presentation.internal.di.module.DecisionModule;
import com.gw.presentation.internal.di.module.ForecastModule;
import com.gw.presentation.view.activity.ForecastActivity;
import dagger.Component;
/**
* Created by vadym on 12.05.17.
*/
@PerActivity
@Component(dependencies = ApplicationComponent.class, modules = {ActivityModule.class, ForecastModule.class})
public interface ForecastComponent {
void inject(ForecastActivity forecastActivity);
}
|
#!/bin/bash
# Copyright 2021 rdugan
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
cd $(dirname $0)
if [ $# -lt 1 ];
then
echo "Usage: $0 <GPU> [DURATION]"
echo "Example: $0 1 5"
exit 1;
fi
duration=3
if [ $# = 2 ];
then
duration=$2
fi
hwmon="/sys/class/hwmon/hwmon$1"
echo "0" > $hwmon/pwm1_enable
sleep $duration
echo "2" > $hwmon/pwm1_enable;
|
<reponame>msabramo/django-chuck<filename>modules/oracle/chuck_module.py
description = """
Adds Oracle database settings to your project.
For more information, visit:
http://cx-oracle.sourceforge.net/
"""
|
#!/usr/bin/env bash
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
set -e
# By default this builds packages for the Pi Two and Three only, since the NEON support
# this allows makes calculations many times faster. To support the Pi One or Zero, pass
# PI_ONE as the first argument to the script, for example:
# tensorflow/tools/ci_build/pi/build_raspberry_pi.sh PI_ONE
#
# To install the cross-compilation support for Python this script needs on Ubuntu Trusty, run
# something like these steps, after backing up your original /etc/apt/sources.list file:
#
# dpkg --add-architecture armhf
# echo 'deb [arch=armhf] http://ports.ubuntu.com/ trusty main restricted universe multiverse' >> /etc/apt/sources.list.d/armhf.list
# echo 'deb [arch=armhf] http://ports.ubuntu.com/ trusty-updates main restricted universe multiverse' >> /etc/apt/sources.list.d/armhf.list
# echo 'deb [arch=armhf] http://ports.ubuntu.com/ trusty-security main restricted universe multiverse' >> /etc/apt/sources.list.d/armhf.list
# echo 'deb [arch=armhf] http://ports.ubuntu.com/ trusty-backports main restricted universe multiverse' >> /etc/apt/sources.list.d/armhf.list
# sed -i 's#deb http://archive.ubuntu.com/ubuntu/#deb [arch=amd64] http://archive.ubuntu.com/ubuntu/#g' /etc/apt/sources.list
# apt-get update
# apt-get install -y libpython-all-dev:armhf
#
# Make sure you have an up to date version of the Bazel build tool installed too.
export TF_ENABLE_XLA=0
yes '' | ./configure
# Fix for curl build problem in 32-bit, see https://stackoverflow.com/questions/35181744/size-of-array-curl-rule-01-is-negative
sudo sed -i 's/define CURL_SIZEOF_LONG 8/define CURL_SIZEOF_LONG 4/g' /usr/include/curl/curlbuild.h
sudo sed -i 's/define CURL_SIZEOF_CURL_OFF_T 8/define CURL_SIZEOF_CURL_OFF_T 4/g' /usr/include/curl/curlbuild.h
# The system-installed OpenSSL headers get pulled in by the latest BoringSSL
# release on this configuration, so move them before we build:
if [ -d /usr/include/openssl ]; then
sudo mv /usr/include/openssl /usr/include/openssl.original
fi
WORKSPACE_PATH=`pwd`
# Build the OpenBLAS library, which is faster than Eigen on the Pi Zero/One.
# TODO(petewarden) - It would be nicer to move this into the main Bazel build
# process if we can maintain a build file for this.
TOOLCHAIN_INSTALL_PATH=/tmp/toolchain_install/
sudo rm -rf ${TOOLCHAIN_INSTALL_PATH}
mkdir ${TOOLCHAIN_INSTALL_PATH}
cd ${TOOLCHAIN_INSTALL_PATH}
curl -L https://github.com/raspberrypi/tools/archive/0e906ebc527eab1cdbf7adabff5b474da9562e9f.tar.gz -o toolchain.tar.gz
tar xzf toolchain.tar.gz
mv tools-0e906ebc527eab1cdbf7adabff5b474da9562e9f/ tools
CROSSTOOL_CC=${TOOLCHAIN_INSTALL_PATH}/tools/arm-bcm2708/arm-rpi-4.9.3-linux-gnueabihf/bin/arm-linux-gnueabihf-gcc
OPENBLAS_SRC_PATH=/tmp/openblas_src/
sudo rm -rf ${OPENBLAS_SRC_PATH}
git clone https://github.com/xianyi/OpenBLAS ${OPENBLAS_SRC_PATH}
cd ${OPENBLAS_SRC_PATH}
# The commit after this introduced Fortran compile issues. In theory they should
# be solvable using NOFORTRAN=1 on the make command, but my initial tries didn't
# work, so pinning to the last know good version.
git checkout 5a6a2bed9aff0ba8a18651d5514d029c8cae336a
# If this path is changed, you'll also need to update
# cxx_builtin_include_directory in third_party/toolchains/cpus/arm/CROSSTOOL.tpl
OPENBLAS_INSTALL_PATH=/tmp/openblas_install/
make CC=${CROSSTOOL_CC} FC=${CROSSTOOL_CC} HOSTCC=gcc TARGET=ARMV6
make PREFIX=${OPENBLAS_INSTALL_PATH} install
if [[ $1 == "PI_ONE" ]]; then
PI_COPTS="--copt=-march=armv6 --copt=-mfpu=vfp
--copt=-DUSE_GEMM_FOR_CONV --copt=-DUSE_OPENBLAS
--copt=-isystem --copt=${OPENBLAS_INSTALL_PATH}/include/
--copt=-std=gnu11 --copt=-DS_IREAD=S_IRUSR --copt=-DS_IWRITE=S_IWUSR
--linkopt=-L${OPENBLAS_INSTALL_PATH}/lib/
--linkopt=-l:libopenblas.a"
echo "Building for the Pi One/Zero, with no NEON support"
WHEEL_ARCH=linux_armv6l
else
PI_COPTS='--copt=-march=armv7-a --copt=-mfpu=neon-vfpv4
--copt=-std=gnu11 --copt=-DS_IREAD=S_IRUSR --copt=-DS_IWRITE=S_IWUSR
--copt=-O3 --copt=-fno-tree-pre
--copt=-U__GCC_HAVE_SYNC_COMPARE_AND_SWAP_1
--copt=-U__GCC_HAVE_SYNC_COMPARE_AND_SWAP_2
--copt=-U__GCC_HAVE_SYNC_COMPARE_AND_SWAP_8'
WHEEL_ARCH=linux_armv7l
echo "Building for the Pi Two/Three, with NEON acceleration"
fi
# We need to pass down the environment variable with a possible alternate Python
# include path for Python 3.x builds to work.
export CROSSTOOL_PYTHON_INCLUDE_PATH
cd ${WORKSPACE_PATH}
bazel build -c opt ${PI_COPTS} \
--config=monolithic \
--copt=-funsafe-math-optimizations --copt=-ftree-vectorize \
--copt=-fomit-frame-pointer --cpu=armeabi \
--crosstool_top=@local_config_arm_compiler//:toolchain \
--define tensorflow_mkldnn_contraction_kernel=0 \
--verbose_failures \
//tensorflow:libtensorflow.so \
//tensorflow:libtensorflow_framework.so \
//tensorflow/tools/benchmark:benchmark_model \
//tensorflow/tools/pip_package:build_pip_package
OUTDIR=output-artifacts
mkdir -p "${OUTDIR}"
echo "Final outputs will go to ${OUTDIR}"
# Build a universal wheel.
BDIST_OPTS="--universal" \
bazel-bin/tensorflow/tools/pip_package/build_pip_package "${OUTDIR}"
OLD_FN=$(ls "${OUTDIR}" | grep -m 1 \.whl)
SUB='s/tensorflow-([^-]+)-([^-]+)-.*/tensorflow-\1-\2-none-'${WHEEL_ARCH}'.whl/; print'
NEW_FN=$(echo "${OLD_FN}" | perl -ne "${SUB}")
mv "${OUTDIR}/${OLD_FN}" "${OUTDIR}/${NEW_FN}"
cp bazel-bin/tensorflow/tools/benchmark/benchmark_model "${OUTDIR}"
cp bazel-bin/tensorflow/libtensorflow.so "${OUTDIR}"
cp bazel-bin/tensorflow/libtensorflow_framework.so "${OUTDIR}"
echo "Output can be found here:"
find "${OUTDIR}"
|
<reponame>p32929/AndroidEasySQL-Library
package p32929.androideasysql_library;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.DatabaseErrorHandler;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import java.util.ArrayList;
/**
* Created by p32929 on 7/8/18.
*/
/*
MIT License
Copyright (c) 2018 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
public class EasyDB extends SQLiteOpenHelper {
// Variables
private String DATABASE_NAME, TABLE_NAME = "DEMO_TABLE", SQL = "";
private ArrayList<Column> columns = new ArrayList<>();
private SQLiteDatabase writableDatabase;
private ContentValues contentValues = new ContentValues();
private boolean initedDb = false;
//
public EasyDB addData(int columnNumber, String data) {
if (!initedDb || writableDatabase == null) initDatabase();
contentValues.put(columns.get(columnNumber - 1).columnName, data);
return this;
}
public EasyDB addData(String columnName, String data) {
columnName = columnName.replaceAll(" ", "_");
if (!initedDb || writableDatabase == null) initDatabase();
contentValues.put(columnName, data);
return this;
}
public EasyDB addData(int columnNumber, int data) {
if (!initedDb || writableDatabase == null) initDatabase();
contentValues.put(columns.get(columnNumber - 1).columnName, data);
return this;
}
public EasyDB addData(String columnName, int data) {
columnName = columnName.replaceAll(" ", "_");
if (!initedDb || writableDatabase == null) initDatabase();
contentValues.put(columnName, data);
return this;
}
public boolean doneDataAdding() {
long result = writableDatabase.insert(TABLE_NAME, null, contentValues);
contentValues = new ContentValues();
if (result == -1)
return false;
else
return true;
}
//
public Cursor getAllData() {
if (!initedDb || writableDatabase == null) initDatabase();
Cursor res = writableDatabase.rawQuery("select * from " + TABLE_NAME, null);
return res;
}
public Cursor getAllDataOrderedBy(int columnNumber, boolean ascending) {
String postfix = ascending ? "" : " DESC ";
String colNam = columnNumber == 0 ? " ID " : columns.get(columnNumber - 1).columnName;
if (!initedDb || writableDatabase == null) initDatabase();
Cursor res = writableDatabase.rawQuery("select * from " + TABLE_NAME + " ORDER BY " + colNam + postfix, null);
return res;
}
//
public Cursor getOneRowData(int rowID) {
if (!initedDb || writableDatabase == null) initDatabase();
String allColNames[] = new String[columns.size() + 1];
allColNames[0] = "ID";
for (int i = 0; i < columns.size(); i++) {
allColNames[i + 1] = columns.get(i).columnName;
}
Cursor cursor = writableDatabase.query(TABLE_NAME,
allColNames, allColNames[0].toString() + "=?",
new String[]{String.valueOf(rowID)},
null, null, null, "1");
if (cursor.getCount() > 0) {
return cursor;
} else {
return null;
}
}
@Deprecated
public Cursor getOneRowData(int columnNumber, String value) {
if (!initedDb || writableDatabase == null) initDatabase();
String allColNames[] = new String[columns.size() + 1];
allColNames[0] = "ID";
for (int i = 0; i < columns.size(); i++) {
allColNames[i + 1] = columns.get(i).columnName;
}
Cursor cursor = writableDatabase.query(TABLE_NAME,
allColNames, allColNames[columnNumber].toString() + "=?",
new String[]{value},
null, null, null, "1");
if (cursor.getCount() > 0) {
return cursor;
} else {
return null;
}
}
@Deprecated
public Cursor getOneRowData(String columnName, String value) {
if (!initedDb || writableDatabase == null) initDatabase();
String allColNames[] = new String[columns.size() + 1];
allColNames[0] = "ID";
for (int i = 0; i < columns.size(); i++) {
allColNames[i + 1] = columns.get(i).columnName;
}
Cursor cursor = writableDatabase.query(TABLE_NAME,
allColNames, " " + columnName + " " + "=?",
new String[]{value},
null, null, null, "1");
if (cursor.getCount() > 0) {
return cursor;
} else {
return null;
}
}
public Cursor searchInColumn(int columnNumber, String valueToSearch, int limit) {
if (!initedDb || writableDatabase == null) initDatabase();
String allColNames[] = new String[columns.size() + 1];
allColNames[0] = "ID";
for (int i = 0; i < columns.size(); i++) {
allColNames[i + 1] = columns.get(i).columnName;
}
Cursor cursor = writableDatabase.query(TABLE_NAME,
allColNames, allColNames[columnNumber].toString() + "=?",
new String[]{valueToSearch},
null, null, null, limit == -1 ? null : String.valueOf(limit));
if (cursor.getCount() > 0) {
return cursor;
} else {
return null;
}
}
public Cursor searchInColumn(String columnName, String valueToSearch, int limit) {
if (!initedDb || writableDatabase == null) initDatabase();
String allColNames[] = new String[columns.size() + 1];
allColNames[0] = "ID";
for (int i = 0; i < columns.size(); i++) {
allColNames[i + 1] = columns.get(i).columnName;
}
Cursor cursor = writableDatabase.query(TABLE_NAME,
allColNames, " " + columnName + " " + "=?",
new String[]{valueToSearch},
null, null, null, limit == -1 ? null : String.valueOf(limit));
if (cursor.getCount() > 0) {
return cursor;
} else {
return null;
}
}
//
public boolean matchColumns(String columnsToMatch[], String valuesToMatch[]) {
String query = "";
for (int i = 0; i < columnsToMatch.length; i++) {
query += columnsToMatch[i] + " = ? ";
if (i != columnsToMatch.length - 1) {
query += " AND ";
}
}
Cursor cursor = writableDatabase.query(TABLE_NAME, columnsToMatch, query, valuesToMatch, null, null, null);
if (cursor.getCount() > 0) {
return true;
} else {
return false;
}
}
//
public EasyDB updateData(int columnNumber, String data) {
if (!initedDb || writableDatabase == null) initDatabase();
contentValues.put(columns.get(columnNumber - 1).columnName, data);
return this;
}
public EasyDB updateData(int columnNumber, int data) {
if (!initedDb || writableDatabase == null) initDatabase();
contentValues.put(columns.get(columnNumber - 1).columnName, data);
return this;
}
public EasyDB updateData(String columnName, String data) {
columnName = columnName.replaceAll(" ", "_");
if (!initedDb || writableDatabase == null) initDatabase();
contentValues.put(columnName, data);
return this;
}
public EasyDB updateData(String columnName, int data) {
columnName = columnName.replaceAll(" ", "_");
if (!initedDb || writableDatabase == null) initDatabase();
contentValues.put(columnName, data);
return this;
}
public boolean rowID(int id) {
try {
return writableDatabase.update(TABLE_NAME, contentValues, "id = ?", new String[]{String.valueOf(id)}) > 0;
} catch (Exception e) {
return false;
}
}
//
public boolean deleteRow(int id) {
SQLiteDatabase db = this.getWritableDatabase();
return db.delete(TABLE_NAME, "id = ?", new String[]{String.valueOf(id)}) == 1;
}
public boolean deleteRow(int columnNumber, int valueToMatch) {
SQLiteDatabase db = this.getWritableDatabase();
return db.delete(TABLE_NAME, columns.get(columnNumber - 1).columnName + " = ?", new String[]{String.valueOf(valueToMatch)}) == 1;
}
public boolean deleteRow(int columnNumber, String valueToMatch) {
SQLiteDatabase db = this.getWritableDatabase();
return db.delete(TABLE_NAME, columns.get(columnNumber - 1).columnName + " = ?", new String[]{valueToMatch}) == 1;
}
public boolean deleteRow(String columnName, int valueToMatch) {
SQLiteDatabase db = this.getWritableDatabase();
return db.delete(TABLE_NAME, columnName + " = ?", new String[]{String.valueOf(valueToMatch)}) == 1;
}
public boolean deleteRow(String columnName, String valueToMatch) {
SQLiteDatabase db = this.getWritableDatabase();
return db.delete(TABLE_NAME, columnName + " = ?", new String[]{valueToMatch}) == 1;
}
public void deleteAllDataFromTable() {
SQLiteDatabase db = this.getWritableDatabase();
db.execSQL("delete from " + TABLE_NAME);
}
//
public EasyDB setTableName(String tableName) {
this.TABLE_NAME = tableName.replaceAll(" ", "_");
return this;
}
public EasyDB addColumn(Column column) {
columns.add(column);
return this;
}
public EasyDB addColumn(String columnName, String... columnDataTypes) {
Column column = new Column(columnName, columnDataTypes);
columns.add(column);
return this;
}
public EasyDB doneTableColumn() {
SQL = " CREATE TABLE " + TABLE_NAME + " ( ID INTEGER PRIMARY KEY AUTOINCREMENT, ";
for (int i = 0; i < columns.size(); i++) {
SQL += " " + columns.get(i).columnName + " " + columns.get(i).columnDataType + " ";
if (i == columns.size() - 1) {
SQL += " ) ";
} else {
SQL += " , ";
}
}
if (!initedDb || writableDatabase == null) initDatabase();
return this;
}
//
public String[] getAllColumns() {
String allColNames[] = new String[columns.size() + 1];
allColNames[0] = "ID";
for (int i = 0; i < columns.size(); i++) {
allColNames[i + 1] = columns.get(i).columnName;
}
return allColNames;
}
//
private void initDatabase() {
writableDatabase = getWritableDatabase();
initedDb = true;
}
//
public static EasyDB init(Context context, String dbName) {
if (!dbName.endsWith(".db"))
dbName += ".db";
dbName = dbName.replaceAll(" ", "_");
return new EasyDB(context, dbName, null, 1);
}
public static EasyDB init(Context context, String dbName, int version) {
if (!dbName.endsWith(".db"))
dbName += ".db";
dbName = dbName.replaceAll(" ", "_");
return new EasyDB(context, dbName, null, version);
}
public static EasyDB init(Context context, String dbName, SQLiteDatabase.CursorFactory factory, int version) {
if (!dbName.endsWith(".db"))
dbName += ".db";
dbName = dbName.replaceAll(" ", "_");
return new EasyDB(context, dbName, factory, version);
}
public static EasyDB init(Context context, String dbName, SQLiteDatabase.CursorFactory factory, int version, DatabaseErrorHandler errorHandler) {
if (!dbName.endsWith(".db"))
dbName += ".db";
dbName = dbName.replaceAll(" ", "_");
return new EasyDB(context, dbName, factory, version, errorHandler);
}
//
@Override
public void onCreate(SQLiteDatabase db) {
this.writableDatabase = db;
db.execSQL(SQL);
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
db.execSQL(" DROP TABLE IF EXISTS " + TABLE_NAME);
onCreate(db);
}
// Saving, just in case :)
// Codes below this might once or never be used :D
private Context context;
private SQLiteDatabase.CursorFactory factory;
private int version;
private DatabaseErrorHandler errorHandler;
private EasyDB(Context context, String name, SQLiteDatabase.CursorFactory factory, int version) {
super(context, name, factory, version);
//
this.context = context;
this.DATABASE_NAME = name;
this.factory = factory;
this.version = version;
}
private EasyDB(Context context, String name, SQLiteDatabase.CursorFactory factory, int version, DatabaseErrorHandler errorHandler) {
super(context, name, factory, version, errorHandler);
//
this.context = context;
this.DATABASE_NAME = name;
this.factory = factory;
this.version = version;
this.errorHandler = errorHandler;
}
}
|
import tensorflow as tf
from tensorflow import keras
# Build the model:
model = keras.Sequential()
model.add(keras.layers.Embedding(1000, input_length=50))
model.add(keras.layers.LSTM(64))
model.add(keras.layers.Dense(1, activation='sigmoid'))
# Compile the model:
model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
# Train the model:
model.fit(x_train, y_train, epochs=10)
# Evaluate the model:
score = model.evaluate(x_test, y_test)
|
var structdroid_1_1_runtime_1_1_messaging_1_1_f_b_s_1_1_f_single =
[
[ "__assign", "structdroid_1_1_runtime_1_1_messaging_1_1_f_b_s_1_1_f_single.html#a40e8b1e3e58b9a94ab592a6090d1c0b5", null ],
[ "__init", "structdroid_1_1_runtime_1_1_messaging_1_1_f_b_s_1_1_f_single.html#aabe2f946b422bc761a239b5fef589ff0", null ],
[ "ByteBuffer", "structdroid_1_1_runtime_1_1_messaging_1_1_f_b_s_1_1_f_single.html#a336c4bfa75ca958721aa5739fd6b062c", null ],
[ "Range", "structdroid_1_1_runtime_1_1_messaging_1_1_f_b_s_1_1_f_single.html#ae5fa7537c0908ea534a2c14fc12bda43", null ],
[ "Value", "structdroid_1_1_runtime_1_1_messaging_1_1_f_b_s_1_1_f_single.html#a2d26ec694c716fb09fe0d1c5333cccff", null ]
];
|
import { Injectable } from '@angular/core';
@Injectable()
export class CommonService {
constructor() {}
urldecode(str) {
if (!str){ return ''; }
return decodeURIComponent( str.replace( /\+/g, '%20' ).replace( /\%21/g, '!' ).replace( /\%27/g, "'" ).replace( /\%28/g, '(' ).replace( /\%29/g, ')' ).replace( /\%2A/g, '*' ).replace( /\%7E/g, '~' ) );
}
invertColor(hex, bw) {
if (hex.indexOf('#') === 0) {
hex = hex.slice(1);
}
// convert 3-digit hex to 6-digits.
if (hex.length === 3) {
hex = hex[0] + hex[0] + hex[1] + hex[1] + hex[2] + hex[2];
}
if (hex.length !== 6) {
throw new Error('Invalid HEX color.');
}
let r = parseInt(hex.slice(0, 2), 16),
g = parseInt(hex.slice(2, 4), 16),
b = parseInt(hex.slice(4, 6), 16);
if (bw) {
return (r * 0.299 + g * 0.587 + b * 0.114) > 186
? '#000000'
: '#FFFFFF';
}
// invert color components
const newR = (255 - r).toString(16);
const newG = (255 - g).toString(16);
const newB = (255 - b).toString(16);
// pad each with zeros and return
return "#" + this.padZero(newR) + this.padZero(newG) + this.padZero(newB);
}
padZero(str) {
const len = 2;
let zeros = new Array(len).join('0');
return (zeros + str).slice(-len);
}
}
|
curl -XGET http://127.0.0.1:9200/test-mindex/_search -d '{
"query": {
"geo_distance": {
"pin.location": {
"lat": 40,
"lon": 70
},
"distance": "200km",
"optimize_bbox": "memory"
}
}
}
}'
|
package org.nekperu15739.oauth;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.security.core.authority.AuthorityUtils;
import org.springframework.security.oauth2.config.annotation.web.configuration.EnableResourceServer;
import org.springframework.security.oauth2.provider.OAuth2Authentication;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.HashMap;
import java.util.Map;
@RestController
@EnableResourceServer
@SpringBootApplication
public class ResourceServerApplication {
public static void main(String[] args) {
SpringApplication.run(ResourceServerApplication.class, args);
}
@GetMapping("/user")
public String user() {
return "userInfo";
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.