text stringlengths 1 1.05M |
|---|
#!/bin/ksh -p
#
# This file and its contents are supplied under the terms of the
# Common Development and Distribution License ("CDDL"), version 1.0.
# You may only use this file in accordance with the terms of version
# 1.0 of the CDDL.
#
# A full copy of the text of the CDDL should have accompanied this
# source. A copy of the CDDL is also available via the Internet at
# http://www.illumos.org/license/CDDL.
#
#
# Copyright 2017, loli10K <ezomori.nozomu@gmail.com>. All rights reserved.
# Copyright 2019 Joyent, Inc.
#
. $STF_SUITE/include/libtest.shlib
#
# DESCRIPTION:
# 'zfs diff -F' shows different object types correctly.
#
# STRATEGY:
# 1. Prepare a dataset
# 2. Create different objects and verify 'zfs diff -F' shows the correct type
#
verify_runnable "both"
function cleanup
{
log_must zfs destroy -r "$DATASET"
rm -f "$FILEDIFF"
}
#
# Verify object at $path is of type $symbol using 'zfs diff -F'
# Valid types are:
# * B (Block device)
# * C (Character device)
# * / (Directory)
# * > (Door)
# * | (Named pipe)
# * @ (Symbolic link)
# * P (Event port)
# * = (Socket)
# * F (Regular file)
#
function verify_object_class # <path> <symbol>
{
path="$1"
symbol="$2"
log_must eval "zfs diff -F $TESTSNAP1 $TESTSNAP2 > $FILEDIFF"
diffsym="$(nawk -v path="$path" '$NF == path { print $2 }' < $FILEDIFF)"
if [[ "$diffsym" != "$symbol" ]]; then
log_fail "Unexpected type for $path ('$diffsym' != '$symbol')"
else
log_note "Object $path type is correctly displayed as '$symbol'"
fi
log_must zfs destroy "$TESTSNAP1"
log_must zfs destroy "$TESTSNAP2"
}
log_assert "'zfs diff -F' should show different object types correctly."
log_onexit cleanup
DATASET="$TESTPOOL/$TESTFS/fs"
TESTSNAP1="$DATASET@snap1"
TESTSNAP2="$DATASET@snap2"
FILEDIFF="$TESTDIR/zfs-diff.txt"
MAJOR=$(stat -c %t /dev/null)
MINOR=$(stat -c %T /dev/null)
# 1. Prepare a dataset
log_must zfs create $DATASET
MNTPOINT="$(get_prop mountpoint $DATASET)"
log_must zfs set devices=on $DATASET
log_must zfs set xattr=on $DATASET
# 2. Create different objects and verify 'zfs diff -F' shows the correct type
# 2. F (Regular file)
log_must zfs snapshot "$TESTSNAP1"
log_must touch "$MNTPOINT/file"
log_must zfs snapshot "$TESTSNAP2"
verify_object_class "$MNTPOINT/file" "F"
# 2. @ (Symbolic link)
log_must zfs snapshot "$TESTSNAP1"
log_must ln -s "$MNTPOINT/file" "$MNTPOINT/link"
log_must zfs snapshot "$TESTSNAP2"
verify_object_class "$MNTPOINT/link" "@"
# 2. B (Block device)
log_must zfs snapshot "$TESTSNAP1"
log_must mknod "$MNTPOINT/bdev" b $MAJOR $MINOR
log_must zfs snapshot "$TESTSNAP2"
verify_object_class "$MNTPOINT/bdev" "B"
# 2. C (Character device)
log_must zfs snapshot "$TESTSNAP1"
log_must mknod "$MNTPOINT/cdev" c $MAJOR $MINOR
log_must zfs snapshot "$TESTSNAP2"
verify_object_class "$MNTPOINT/cdev" "C"
# 2. | (Named pipe)
log_must zfs snapshot "$TESTSNAP1"
log_must mknod "$MNTPOINT/fifo" p
log_must zfs snapshot "$TESTSNAP2"
verify_object_class "$MNTPOINT/fifo" "|"
# 2. / (Directory)
log_must zfs snapshot "$TESTSNAP1"
log_must mkdir "$MNTPOINT/dir"
log_must zfs snapshot "$TESTSNAP2"
verify_object_class "$MNTPOINT/dir" "/"
# 2. = (Socket)
log_must zfs snapshot "$TESTSNAP1"
log_must $STF_SUITE/tests/functional/cli_root/zfs_diff/socket "$MNTPOINT/sock"
log_must zfs snapshot "$TESTSNAP2"
verify_object_class "$MNTPOINT/sock" "="
log_pass "'zfs diff -F' shows different object types correctly."
|
<filename>mc-runtime/src/main/java/org/n52/movingcode/runtime/processors/r/RSessionInfo.java
/**
* Copyright (C) 2012 52ยฐNorth Initiative for Geospatial Open Source Software GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.n52.movingcode.runtime.processors.r;
import org.rosuda.REngine.REXPMismatchException;
import org.rosuda.REngine.Rserve.RConnection;
import org.rosuda.REngine.Rserve.RserveException;
public class RSessionInfo {
public static String getVersion(RConnection rCon) throws RserveException, REXPMismatchException {
return getConsoleOutput(rCon, "R.version[[\"version.string\"]]");
}
public String getSessionInfo(RConnection rCon) throws RserveException, REXPMismatchException {
return getConsoleOutput(rCon, "sessionInfo()");
}
private static String getConsoleOutput(RConnection rCon, String cmd) throws RserveException, REXPMismatchException {
return rCon.eval("paste(capture.output(print(" + cmd + ")),collapse='\\n')").asString();
}
}
|
#!/bin/bash
#install zip on debian OS, since microsoft/dotnet container doesn't have zip by default
if [[ -f /etc/debian_version ]]
then
apt -qq update
apt -qq -y install zip
fi
dotnet restore
dotnet lambda package --configuration release --framework netcoreapp2.1 --output-package bin/release/netcoreapp2.1/deploy-package.zip
|
from django.core.exceptions import ValidationError
from django.utils.translation import gettext as _
from .models import Circle # Import the Circle model from your Django application
def circle_id_exists(circle_id):
# Query the database to check if the circle with the given circle_id exists
count = Circle.objects.filter(id=circle_id).count()
# If the count is 0, raise a ValidationError indicating that the circle does not exist
if count == 0:
raise ValidationError(
_('The circle does not exist.'),
) |
import os
from pathlib import Path
class initialize_config_dir:
def __init__(self, config_dir):
self.config_dir = config_dir
def __enter__(self):
# Initialize the configuration directory
os.makedirs(self.config_dir, exist_ok=True)
def __exit__(self, exc_type, exc_value, traceback):
# Clean up or handle any exceptions if needed
pass
def compose(config_name, overrides):
# Implement the logic to compose the configuration
composed_config = {
'config_name': config_name,
'overrides': overrides
}
return composed_config
# Example usage
config_path = Path('/path/to/config')
config_dir_name = 'my_config'
config_name = 'example_config'
overrides = {'key1': 'value1', 'key2': 'value2'}
with initialize_config_dir(config_dir=str(config_path / config_dir_name)):
cfg = compose(config_name=config_name, overrides=overrides)
print(cfg) |
<filename>lib/data_import/my_drama_list/extractor/details.rb<gh_stars>0
module DataImport
class MyDramaList
module Extractor
class Details
COUNTRY_CODES = {
'South Korea' => 'sk',
'Japan' => 'jp',
'China' => 'cn',
'Taiwan' => 'tw',
'Hong Kong' => 'hk',
'Thailand' => 'th'
}.freeze
LANGUAGE_CODES = {
'South Korea' => 'ko',
'Japan' => 'ja',
'China' => 'zh',
'Taiwan' => 'zh',
'Hong Kong' => 'zh',
'Thailand' => 'th'
}.freeze
SUBTYPES = {
'Drama' => :drama,
'Movie' => :movie,
'Drama Special' => :special
}.freeze
include Helpers
attr_reader :dom
def initialize(html)
@dom = Nokogiri::HTML(html)
end
def titles
main_title = dom.at_css('h1').content
.gsub("(#{start_date.try(:year)})", '').strip
titles = {}
titles["en_#{country}"] = main_title
titles["#{language}_#{country}"] = details['Native title']
titles
end
def canonical_title
"en_#{country}"
end
def abbreviated_titles
(details['Also Known as'] || '').split(';').map(&:strip)
end
def synopsis
dom.css('.show-synopsis > p').map(&:content).join('\n\n')
end
def episode_count
(details['Episodes'] || 1).to_i
end
def episode_length
str = details['Duration']
parts = str.match(/(?:(?<hr>\d+) hr. )?(?<min>\d+) min./)
(parts['hr'].to_i * 60) + parts['min'].to_i
end
def subtype
SUBTYPES[details['Type']]
end
def poster_image
original_for dom.at_css('.cover img')['src']
end
def start_date
dates[0]
end
def end_date
dates[1]
end
def country
COUNTRY_CODES[details['Country']]
end
def to_h
%i[
titles canonical_title abbreviated_titles synopsis episode_count
episode_length subtype poster_image start_date end_date country
].map { |k| [k, send(k)] }.to_h
end
def genres
dom.css('.show-genres > a').map(&:content).map(&:strip)
end
private
def details
@details ||= dom.css('.show-details .txt-block').map { |row|
# Grab the header, strip the whitespace and colon
key = row.at_css('h4').content.strip.sub(/:\z/, '')
# This little XPath query basically queries for *all* nodes (bare
# text included) which are not H4 tags, so we get all the content in
# the row. Once we grab the content, we strip it of whitespace,
# nuke the blank entries, and drop the array if it's singular.
value = row.xpath('node()[not(self::h4)]').map(&:content)
.map(&:strip)
value.reject!(&:blank?)
value = value.one? ? value.first : value
[key, value]
}.to_h
end
def dates
if details.include? 'Release Date'
[Date.parse(details['Release Date'])]
elsif details.include? 'Aired'
details['Aired'].split('to').map { |d| Date.parse(d) }
else
[]
end
end
def language
LANGUAGE_CODES[details['Country']]
end
end
end
end
end
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.starHalf = void 0;
var starHalf = {
"viewBox": "0 0 896 1792",
"children": [{
"name": "path",
"attribs": {
"d": "M832 32v1339l-449 236q-22 12-40 12-21 0-31.5-14.5t-10.5-35.5q0-6 2-20l86-500-364-354q-25-27-25-48 0-37 56-46l502-73 225-455q19-41 49-41z"
}
}]
};
exports.starHalf = starHalf; |
<gh_stars>0
from my_portfolio import views
from django.urls import path
from django.contrib import admin
urlpatterns = [
path('admin/', admin.site.urls, name = "admin"),
path('', views.index_view, name= "home"),
path('Experience/', views.experience_view, name= "experience"),
path('Projects/', views.projects_view, name= "projects"),
path('Certification/', views.certification_view, name= "certification"),
path('Research/', views.research_view, name= "research"),
path('Education/', views.education_view, name= "education"),
]
|
#!/usr/bin/env bash
##############################################################################
## Hardening for TCP wrappers
##############################################################################
## Files modified
##
## /etc/hosts.allow
## /etc/hosts.deny
##
##############################################################################
## License
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License along
## with this program; if not, write to the Free Software Foundation, Inc.,
## 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
##############################################################################
## References
##
## https://bitbucket.org/carlisle/hardening-ks/raw/master/centos7/c7-tcp_wrappers.cfg
##
##############################################################################
## Notes
## Not all services use tcp_wrappers.
## ldd <binary> | grep libwrap will show if its used.
## Firewall rules are more reliable. tcp_wrappers is expected to be phased out.
##
##############################################################################
#timestamp
echo "** security_hardening_tcp_wrappers.sh START" $(date +%F-%H%M-%S)
# Ensure packages are installed
yum install -y tcp_wrappers
##################
## SET VARIBLES
##################
BACKUPDIR=/root/KShardening
#################
## BACKUP FILES
#################
if [ ! -d "${BACKUPDIR}" ]; then mkdir -p ${BACKUPDIR}; fi
/bin/cp -fpd /etc/hosts.allow ${BACKUPDIR}/hosts.allow-DEFAULT
/bin/cp -fpd /etc/hosts.deny ${BACKUPDIR}/hosts.deny-DEFAULT
####################
## WRITE NEW FILES
####################
################
## hosts.allow
################
cat > ${BACKUPDIR}/hosts.allow << 'EOFALLOW'
#
# hosts.allow This file contains access rules which are used to
# allow or deny connections to network services that
# either use the tcp_wrappers library or that have been
# started through a tcp_wrappers-enabled xinetd.
#
# See 'man 5 hosts_options' and 'man 5 hosts_access'
# for information on rule syntax.
# See 'man tcpd' for information on tcp_wrappers
#
# Allow allow services to localhost
ALL: 127.0.0.1 LOCAL localhost
# Allow ssh from all ip addresses
sshd: ALL
# Allow ssh from only 192.168.0.0/16
#sshd: 192.168.
EOFALLOW
################
## hosts.deny
################
cat > ${BACKUPDIR}/hosts.deny << 'EOFDENY'
#
# hosts.deny This file contains access rules which are used to
# deny connections to network services that either use
# the tcp_wrappers library or that have been
# started through a tcp_wrappers-enabled xinetd.
#
# The rules in this file can also be set up in
# /etc/hosts.allow with a 'deny' option instead.
#
# See 'man 5 hosts_options' and 'man 5 hosts_access'
# for information on rule syntax.
# See 'man tcpd' for information on tcp_wrappers
#
# Deny all services from all ip addresses by default
ALL: ALL
#spawn command upon deny, in this case send email
#ALL: ALL : spawn (/bin/echo -e \`/bin/date\` "\n%c attempted connection to %s and was denied"\ | /bin/mail -s "%c denied to %s" admin@doman.com ) &
EOFDENY
#####################
## DEPLOY NEW FILES
#####################
/bin/cp -f ${BACKUPDIR}/hosts.allow /etc/hosts.allow
chown root:root /etc/hosts.allow
chmod 644 /etc/hosts.allow
/bin/cp -f ${BACKUPDIR}/hosts.deny /etc/hosts.deny
chown root:root /etc/hosts.deny
chmod 644 /etc/hosts.deny
#timestamp
echo "** security_hardening_tcp_wrappers.sh COMPLETE" $(date +%F-%H%M-%S)
|
#!/bin/bash
#
# Copyright 2018 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Load the test setup defined in the parent directory
CURRENT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "${CURRENT_DIR}/../integration_test_setup.sh" \
|| { echo "integration_test_setup.sh not found!" >&2; exit 1; }
test_result_recorded() {
mkdir result_recorded && cd result_recorded
rm -rf fetchrepo
mkdir fetchrepo
cd fetchrepo
cat > rule.bzl <<'EOF'
def _rule_impl(ctx):
ctx.symlink(ctx.attr.build_file, "BUILD")
return {"build_file": ctx.attr.build_file, "extra_arg": "foobar"}
trivial_rule = repository_rule(
implementation = _rule_impl,
attrs = { "build_file" : attr.label() },
)
EOF
cat > ext.BUILD <<'EOF'
genrule(
name = "foo",
outs = ["foo.txt"],
cmd = "echo bar > $@",
)
EOF
touch BUILD
cat > WORKSPACE <<'EOF'
load("//:rule.bzl", "trivial_rule")
trivial_rule(
name = "ext",
build_file = "//:ext.BUILD",
)
EOF
bazel clean --expunge
bazel build --experimental_repository_resolved_file=../repo.bzl @ext//... \
> "${TEST_log}" 2>&1 || fail "Expected success"
bazel shutdown
# We expect the additional argument to be reported to the user...
expect_log 'extra_arg.*foobar'
# ...as well as the location of the definition.
expect_log 'fetchrepo/WORKSPACE:2'
# Verify that bazel can read the generated repo.bzl file and that it contains
# the expected information
cd ..
echo; cat repo.bzl; echo; echo
mkdir analysisrepo
mv repo.bzl analysisrepo
cd analysisrepo
touch WORKSPACE
cat > BUILD <<'EOF'
load("//:repo.bzl", "resolved")
[ genrule(
name = "out",
outs = ["out.txt"],
cmd = "echo %s > $@" % entry["repositories"][0]["attributes"]["extra_arg"],
) for entry in resolved if entry["original_rule_class"] == "//:rule.bzl%trivial_rule"
]
[ genrule(
name = "origcount",
outs = ["origcount.txt"],
cmd = "echo %s > $@" % len(entry["original_attributes"])
) for entry in resolved if entry["original_rule_class"] == "//:rule.bzl%trivial_rule"
]
EOF
bazel build :out :origcount || fail "Expected success"
grep "foobar" `bazel info bazel-genfiles`/out.txt \
|| fail "Did not find the expected value"
[ $(cat `bazel info bazel-genfiles`/origcount.txt) -eq 2 ] \
|| fail "Not the correct number of original attributes"
}
test_git_return_value() {
EXTREPODIR=`pwd`
tar xvf ${TEST_SRCDIR}/test_WORKSPACE_files/archives.tar
export GIT_CONFIG_NOSYSTEM=YES
mkdir extgit
(cd extgit && git init \
&& git config user.email 'me@example.com' \
&& git config user.name 'E X Ample' )
echo Hello World > extgit/hello.txt
(cd extgit
git add .
git commit --author="A U Thor <author@example.com>" -m 'initial commit'
git tag mytag)
# Check out the external git repository at the given tag, and record
# the return value of the git rule.
mkdir tagcheckout
cd tagcheckout
cat > WORKSPACE <<EOF
load("@bazel_tools//tools/build_defs/repo:git.bzl", "new_git_repository")
new_git_repository(
name="ext",
remote="file://${EXTREPODIR}/extgit/.git",
tag="mytag",
build_file_content="exports_files([\"hello.txt\"])",
)
EOF
bazel sync --distdir=${EXTREPODIR}/test_WORKSPACE/distdir --experimental_repository_resolved_file=../repo.bzl
bazel shutdown
cd ..
echo; cat repo.bzl; echo
# Now add an additional commit to the upstream repository and
# force update the tag
echo CHANGED > extgit/hello.txt
(cd extgit
git add .
git commit --author="A U Thor <author@example.com>" -m 'change hello.txt'
git tag -f mytag)
# Verify that the recorded resolved information is what we expect. In
# particular, verify that we don't get the new upstream commit.
mkdir analysisrepo
cd analysisrepo
cp ../repo.bzl .
cat > workspace.bzl <<'EOF'
load("@bazel_tools//tools/build_defs/repo:git.bzl", "new_git_repository")
load("//:repo.bzl", "resolved")
def repo():
for entry in resolved:
if entry["original_attributes"]["name"] == "ext":
new_git_repository(**(entry["repositories"][0]["attributes"]))
EOF
cat > WORKSPACE <<'EOF'
load("//:workspace.bzl", "repo")
repo()
EOF
cat > BUILD <<'EOF'
genrule(
name = "out",
outs = ["out.txt"],
srcs = ["@ext//:hello.txt"],
cmd = "cp $< $@",
)
EOF
bazel build //:out
grep "Hello World" `bazel info bazel-genfiles`/out.txt \
|| fail "ext not taken at the right commit"
grep "CHANGED" `bazel info bazel-genfiles`/out.txt \
&& fail "not taking the frozen commit" || :
}
test_git_follow_branch() {
EXTREPODIR=`pwd`
tar xvf ${TEST_SRCDIR}/test_WORKSPACE_files/archives.tar
export GIT_CONFIG_NOSYSTEM=YES
mkdir extgit
(cd extgit && git init \
&& git config user.email 'me@example.com' \
&& git config user.name 'E X Ample' )
echo Hello World > extgit/hello.txt
(cd extgit
git add .
git commit --author="A U Thor <author@example.com>" -m 'initial commit')
# Check out the external git repository at the given branch, and record
# the return value of the git rule.
mkdir branchcheckout
cd branchcheckout
cat > WORKSPACE <<EOF
load("@bazel_tools//tools/build_defs/repo:git.bzl", "new_git_repository")
new_git_repository(
name="ext",
remote="file://${EXTREPODIR}/extgit/.git",
branch="master",
build_file_content="exports_files([\"hello.txt\"])",
)
EOF
cat > BUILD <<'EOF'
genrule(
name = "out",
outs = ["out.txt"],
srcs = ["@ext//:hello.txt"],
cmd = "cp $< $@",
)
EOF
bazel sync --distdir=${EXTREPODIR}/test_WORKSPACE/distdir --experimental_repository_resolved_file=../repo.bzl
bazel build :out
grep "CHANGED" `bazel info bazel-genfiles`/out.txt \
&& fail "Unexpected content in out.txt" || :
cd ..
echo; cat repo.bzl; echo
# Now add an additional commit to the upstream repository
echo CHANGED > extgit/hello.txt
(cd extgit
git add .
git commit --author="A U Thor <author@example.com>" -m 'change hello.txt')
# First verify that `bazel sync` sees the new commit (we don't record it).
cd branchcheckout
bazel sync --distdir=${EXTREPODIR}/test_WORKSPACE/distdir
bazel build :out
grep "CHANGED" `bazel info bazel-genfiles`/out.txt \
|| fail "sync did not update the external repository"
bazel shutdown
cd ..
echo
# Verify that the recorded resolved information is what we expect. In
# particular, verify that we don't get the new upstream commit.
mkdir analysisrepo
cd analysisrepo
cp ../repo.bzl .
cat > workspace.bzl <<'EOF'
load("@bazel_tools//tools/build_defs/repo:git.bzl", "new_git_repository")
load("//:repo.bzl", "resolved")
def repo():
for entry in resolved:
if entry["original_attributes"]["name"] == "ext":
new_git_repository(**(entry["repositories"][0]["attributes"]))
EOF
cat > WORKSPACE <<'EOF'
load("//:workspace.bzl", "repo")
repo()
EOF
cat > BUILD <<'EOF'
genrule(
name = "out",
outs = ["out.txt"],
srcs = ["@ext//:hello.txt"],
cmd = "cp $< $@",
)
EOF
bazel build //:out
grep "Hello World" `bazel info bazel-genfiles`/out.txt \
|| fail "ext not taken at the right commit"
grep "CHANGED" `bazel info bazel-genfiles`/out.txt \
&& fail "not taking the frozen commit" || :
}
test_sync_follows_git_branch() {
EXTREPODIR=`pwd`
tar xvf ${TEST_SRCDIR}/test_WORKSPACE_files/archives.tar
export GIT_CONFIG_NOSYSTEM=YES
rm -f gitdir
mkdir gitdir
(cd gitdir && git init \
&& git config user.email 'me@example.com' \
&& git config user.name 'E X Ample' )
echo Hello World > gitdir/hello.txt
(cd gitdir
git add .
git commit --author="A U Thor <author@example.com>" -m 'initial commit')
echo Hello Stable World > gitdir/hello.txt
(cd gitdir
git checkout -b stable
git add .
git commit --author="A U Thor <author@example.com>" -m 'stable commit')
# Follow the stable branch of the git repository
mkdir followbranch
cat > followbranch/WORKSPACE <<EOF
load("@bazel_tools//tools/build_defs/repo:git.bzl", "new_git_repository")
new_git_repository(
name="ext",
remote="file://${EXTREPODIR}/gitdir/.git",
branch="stable",
build_file_content="exports_files([\"hello.txt\"])",
)
EOF
cat > followbranch/BUILD <<'EOF'
genrule(
name = "out",
outs = ["out.txt"],
srcs = ["@ext//:hello.txt"],
cmd = "cp $< $@",
)
EOF
(cd followbranch && bazel build :out \
&& cat `bazel info bazel-genfiles`/out.txt > "${TEST_log}")
expect_log 'Hello Stable World'
# New upstream commits on the branch followed
echo CHANGED > gitdir/hello.txt
(cd gitdir
git checkout stable
git add .
git commit --author="A U Thor <author@example.com>" -m 'stable commit')
# Verify that sync followed by build gets the correct version
(cd followbranch && bazel sync --distdir=${EXTREPODIR}/test_WORKSPACE/distdir && bazel build :out \
&& cat `bazel info bazel-genfiles`/out.txt > "${TEST_log}")
expect_log 'CHANGED'
expect_not_log 'Hello Stable World'
}
test_http_return_value() {
EXTREPODIR=`pwd`
tar xvf ${TEST_SRCDIR}/test_WORKSPACE_files/archives.tar
mkdir -p a
touch a/WORKSPACE
touch a/BUILD
touch a/f.txt
zip a.zip a/*
expected_sha256="$(sha256sum "${EXTREPODIR}/a.zip" | head -c 64)"
rm -rf a
# http_archive rule doesn't specify the sha256 attribute
mkdir -p main
cat > main/WORKSPACE <<EOF
workspace(name = "main")
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name="a",
strip_prefix="a",
urls=["file://${EXTREPODIR}/a.zip"],
)
EOF
touch main/BUILD
cd main
bazel sync --distdir=${EXTREPODIR}/test_WORKSPACE/distdir \
--experimental_repository_resolved_file=../repo.bzl
grep ${expected_sha256} ../repo.bzl || fail "didn't return commit"
}
test_sync_calls_all() {
EXTREPODIR=`pwd`
tar xvf ${TEST_SRCDIR}/test_WORKSPACE_files/archives.tar
mkdir sync_calls_all && cd sync_calls_all
rm -rf fetchrepo
mkdir fetchrepo
rm -f repo.bzl
cd fetchrepo
cat > rule.bzl <<'EOF'
def _rule_impl(ctx):
ctx.file("foo.bzl", """
it = "foo"
other = "bar"
""")
ctx.file("BUILD", "")
return {"comment" : ctx.attr.comment }
trivial_rule = repository_rule(
implementation = _rule_impl,
attrs = { "comment" : attr.string() },
)
EOF
touch BUILD
cat > WORKSPACE <<'EOF'
load("//:rule.bzl", "trivial_rule")
trivial_rule(name = "a", comment = "bootstrap")
load("@a//:foo.bzl", "it")
trivial_rule(name = "b", comment = it)
trivial_rule(name = "c", comment = it)
load("@c//:foo.bzl", "other")
trivial_rule(name = "d", comment = other)
EOF
bazel clean --expunge
bazel sync --distdir=${EXTREPODIR}/test_WORKSPACE/distdir --experimental_repository_resolved_file=../repo.bzl
bazel shutdown
cd ..
echo; cat repo.bzl; echo
touch WORKSPACE
cat > BUILD <<'EOF'
load("//:repo.bzl", "resolved")
names = [entry["original_attributes"]["name"]
for entry in resolved
if "native" not in entry]
[
genrule(
name = name,
outs = [ "%s.txt" % (name,) ],
cmd = "echo %s > $@" % (name,),
) for name in names
]
EOF
bazel build :a :b :c :d || fail "Expected all 4 repositories to be present"
}
test_sync_call_invalidates() {
EXTREPODIR=`pwd`
tar xvf ${TEST_SRCDIR}/test_WORKSPACE_files/archives.tar
mkdir sync_call_invalidates && cd sync_call_invalidates
rm -rf fetchrepo
mkdir fetchrepo
rm -f repo.bzl
touch BUILD
cat > rule.bzl <<'EOF'
def _rule_impl(ctx):
ctx.file("BUILD", """
genrule(
name = "it",
outs = ["it.txt"],
cmd = "echo hello world > $@",
)
""")
ctx.file("WORKSPACE", "")
trivial_rule = repository_rule(
implementation = _rule_impl,
attrs = {},
)
EOF
cat > WORKSPACE <<'EOF'
load("//:rule.bzl", "trivial_rule")
trivial_rule(name = "a")
trivial_rule(name = "b")
EOF
bazel build @a//... @b//...
echo; echo sync run; echo
bazel sync --distdir=${EXTREPODIR}/test_WORKSPACE/distdir --experimental_repository_resolved_file=../repo.bzl
bazel shutdown
cd ..
echo; cat repo.bzl; echo
touch WORKSPACE
cat > BUILD <<'EOF'
load("//:repo.bzl", "resolved")
names = [entry["original_attributes"]["name"]
for entry in resolved
if "native" not in entry]
[
genrule(
name = name,
outs = [ "%s.txt" % (name,) ],
cmd = "echo %s > $@" % (name,),
) for name in names
]
EOF
bazel build :a :b || fail "Expected both repositories to be present"
}
test_sync_load_errors_reported() {
EXTREPODIR=`pwd`
tar xvf ${TEST_SRCDIR}/test_WORKSPACE_files/archives.tar
rm -rf fetchrepo
mkdir fetchrepo
cd fetchrepo
cat > WORKSPACE <<'EOF'
load("//does/not:exist.bzl", "randomfunction")
radomfunction(name="foo")
EOF
bazel sync --distdir=${EXTREPODIR}/test_WORKSPACE/distdir > "${TEST_log}" 2>&1 && fail "Expected failure" || :
expect_log '//does/not:exist.bzl'
}
test_sync_reporting() {
# Verify that debug and error messages in starlark functions are reported.
# Also verify that the fact that the repository is fetched is reported as well.
EXTREPODIR=`pwd`
tar xvf ${TEST_SRCDIR}/test_WORKSPACE_files/archives.tar
rm -rf fetchrepo
mkdir fetchrepo
cd fetchrepo
cat > rule.bzl <<'EOF'
def _broken_rule_impl(ctx):
print("DEBUG-message")
fail("Failure-message")
broken_rule = repository_rule(
implementation = _broken_rule_impl,
attrs = {},
)
EOF
touch BUILD
cat >> $(create_workspace_with_default_repos WORKSPACE) <<'EOF'
load("//:rule.bzl", "broken_rule")
broken_rule(name = "broken")
EOF
bazel sync --curses=yes --experimental_ui_actions_shown=100 --distdir=${EXTREPODIR}/test_WORKSPACE/distdir > "${TEST_log}" 2>&1 && fail "expected failure" || :
expect_log 'Fetching @broken'
expect_log "DEBUG-message"
expect_log "Failure-message"
}
test_indirect_call() {
EXTREPODIR=`pwd`
tar xvf ${TEST_SRCDIR}/test_WORKSPACE_files/archives.tar
rm -rf fetchrepo
mkdir fetchrepo
cd fetchrepo
touch BUILD
cat > rule.bzl <<'EOF'
def _trivial_rule_impl(ctx):
ctx.file("BUILD","genrule(name='hello', outs=['hello.txt'], cmd=' echo hello world > $@')")
trivial_rule = repository_rule(
implementation = _trivial_rule_impl,
attrs = {},
)
EOF
cat > indirect.bzl <<'EOF'
def call(fn_name, **args):
fn_name(**args)
EOF
cat > WORKSPACE <<'EOF'
load("//:rule.bzl", "trivial_rule")
load("//:indirect.bzl", "call")
call(trivial_rule, name="foo")
EOF
bazel sync --distdir=${EXTREPODIR}/test_WORKSPACE/distdir --experimental_repository_resolved_file=../repo.bzl
bazel shutdown
cd ..
echo; cat repo.bzl; echo
touch WORKSPACE
cat > BUILD <<'EOF'
load("//:repo.bzl", "resolved")
ruleclass = "".join([entry["original_rule_class"] for entry in resolved if entry["original_attributes"]["name"]=="foo"])
genrule(
name = "ruleclass",
outs = ["ruleclass.txt"],
cmd = "echo %s > $@" % (ruleclass,)
)
EOF
bazel build //:ruleclass
cat `bazel info bazel-genfiles`/ruleclass.txt > ${TEST_log}
expect_log '//:rule.bzl%trivial_rule'
expect_not_log 'fn_name'
}
test_resolved_file_reading() {
# Verify that the option to read a resolved file instead of the WORKSPACE
# file works as expected.
EXTREPODIR=`pwd`
export GIT_CONFIG_NOSYSTEM=YES
tar xvf ${TEST_SRCDIR}/test_WORKSPACE_files/archives.tar
mkdir extgit
(cd extgit && git init \
&& git config user.email 'me@example.com' \
&& git config user.name 'E X Ample' )
echo Hello World > extgit/hello.txt
(cd extgit
git add .
git commit --author="A U Thor <author@example.com>" -m 'initial commit'
git tag mytag)
mkdir main
cd main
cat > WORKSPACE <<EOF
load("@bazel_tools//tools/build_defs/repo:git.bzl", "new_git_repository")
new_git_repository(
name="ext",
remote="file://${EXTREPODIR}/extgit/.git",
branch="master",
build_file_content="exports_files([\"hello.txt\"])",
)
EOF
cat > BUILD <<'EOF'
genrule(
name = "out",
outs = ["out.txt"],
srcs = ["@ext//:hello.txt"],
cmd = "cp $< $@",
)
EOF
bazel sync --distdir=${EXTREPODIR}/test_WORKSPACE/distdir --experimental_repository_resolved_file=resolved.bzl
echo; cat resolved.bzl; echo
bazel clean --expunge
echo 'Do not use any more' > WORKSPACE
bazel build \
--experimental_resolved_file_instead_of_workspace=`pwd`/resolved.bzl \
:out || fail "Expected success with resolved file replacing WORKSPACE"
rm WORKSPACE && touch WORKSPACE # bazel info needs a valid WORKSPACE
grep 'Hello World' `bazel info bazel-genfiles`/out.txt \
|| fail "Did not find the expected output"
}
test_label_resolved_value() {
# Verify that label arguments in a repository rule end up in the resolved
# file in a parsable form.
EXTREPODIR=`pwd`
tar xvf ${TEST_SRCDIR}/test_WORKSPACE_files/archives.tar
mkdir ext
echo Hello World > ext/file.txt
zip ext.zip ext/*
mkdir main
cd main
cat > WORKSPACE <<EOF
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name="ext",
strip_prefix="ext",
urls=["file://${EXTREPODIR}/ext.zip"],
build_file="@//:exit.BUILD",
)
EOF
echo 'exports_files(["file.txt"])' > exit.BUILD
cat > BUILD <<'EOF'
genrule(
name = "local",
outs = ["local.txt"],
srcs = ["@ext//:file.txt"],
cmd = "cp $< $@",
)
EOF
bazel sync --distdir=${EXTREPODIR}/test_WORKSPACE/distdir --experimental_repository_resolved_file=resolved.bzl
rm WORKSPACE
touch WORKSPACE
echo; cat resolved.bzl; echo
bazel build --experimental_resolved_file_instead_of_workspace=resolved.bzl \
//:local || fail "Expected success"
grep World `bazel info bazel-genfiles`/local.txt \
|| fail "target not built correctly"
}
test_resolved_file_not_remembered() {
# Verify that the --experimental_resolved_file_instead_of_workspace option
# does not leak into a subsequent sync
EXTREPODIR=`pwd`
tar xvf ${TEST_SRCDIR}/test_WORKSPACE_files/archives.tar
export GIT_CONFIG_NOSYSTEM=YES
rm -f gitdir
mkdir gitdir
(cd gitdir && git init \
&& git config user.email 'me@example.com' \
&& git config user.name 'E X Ample' )
echo Hello Stable World > gitdir/hello.txt
(cd gitdir
git checkout -b stable
git add .
git commit --author="A U Thor <author@example.com>" -m 'stable commit')
# The project follows the stable branch of the git repository
mkdir followbranch
cat > followbranch/WORKSPACE <<EOF
load("@bazel_tools//tools/build_defs/repo:git.bzl", "new_git_repository")
new_git_repository(
name="ext",
remote="file://${EXTREPODIR}/gitdir/.git",
branch="stable",
build_file_content="exports_files([\"hello.txt\"])",
)
EOF
cat > followbranch/BUILD <<'EOF'
genrule(
name = "out",
outs = ["out.txt"],
srcs = ["@ext//:hello.txt"],
cmd = "cp $< $@",
)
EOF
(cd followbranch \
&& bazel sync --distdir=${EXTREPODIR}/test_WORKSPACE/distdir --experimental_repository_resolved_file=resolved.bzl)
# New upstream commits on the branch followed
echo CHANGED > gitdir/hello.txt
(cd gitdir
git checkout stable
git add .
git commit --author="A U Thor <author@example.com>" -m 'stable commit')
cd followbranch
bazel build --experimental_resolved_file_instead_of_workspace=resolved.bzl :out
cat `bazel info bazel-genfiles`/out.txt > "${TEST_log}"
expect_log 'Hello Stable World'
expect_not_log 'CHANGED'
bazel sync --distdir=${EXTREPODIR}/test_WORKSPACE/distdir --experimental_repository_resolved_file=resolved.bzl
bazel build --experimental_resolved_file_instead_of_workspace=resolved.bzl :out
cat `bazel info bazel-genfiles`/out.txt > "${TEST_log}"
expect_log 'CHANGED'
expect_not_log 'Hello Stable World'
}
create_sample_repository() {
# Create, in the current directory, a repository that creates an external
# repository `foo` containing
# - file with fixed data, generated by ctx.file,
# - a BUILD file linked from the main repository
# - a symlink to ., and
# - danling absolute and reproducible symlink.
touch BUILD
cat > rule.bzl <<'EOF'
def _trivial_rule_impl(ctx):
ctx.symlink(ctx.attr.build_file, "BUILD")
ctx.file("data.txt", "some data")
ctx.execute(["ln", "-s", ".", "self_link"])
ctx.execute(["ln", "-s", "/does/not/exist", "dangling"])
trivial_rule = repository_rule(
implementation = _trivial_rule_impl,
attrs = { "build_file" : attr.label() },
)
EOF
echo '# fixed contents' > BUILD.remote
cat > WORKSPACE <<'EOF'
load("//:rule.bzl", "trivial_rule")
trivial_rule(name="foo", build_file="@//:BUILD.remote")
EOF
}
test_hash_included_and_reproducible() {
# Verify that a hash of the output directory is included, that
# the hash is invariant under
# - change of the working directory, and
# - and current time.
EXTREPODIR=`pwd`
tar xvf ${TEST_SRCDIR}/test_WORKSPACE_files/archives.tar
rm -rf fetchrepoA
mkdir fetchrepoA
cd fetchrepoA
create_sample_repository
bazel sync --distdir=${EXTREPODIR}/test_WORKSPACE/distdir --experimental_repository_resolved_file=../repo.bzl
bazel shutdown
cd ..
echo; cat repo.bzl; echo
touch WORKSPACE
cat > BUILD <<'EOF'
load("//:repo.bzl", "resolved")
hashes = [entry["repositories"][0]["output_tree_hash"]
for entry in resolved if entry["original_attributes"]["name"]=="foo"]
[genrule(
name="hash",
outs=["hash.txt"],
cmd="echo '%s' > $@" % (hash,),
) for hash in hashes]
EOF
bazel build //:hash
cp `bazel info bazel-genfiles`/hash.txt hashA.txt
cat hashA.txt > "${TEST_log}"
[ `cat hashA.txt | wc -c` -gt 2 ] \
|| fail "A hash of reasonable length expected"
bazel clean --expunge
rm repo.bzl
rm -rf fetchrepoB
mkdir fetchrepoB
cd fetchrepoB
create_sample_repository
bazel sync --distdir=${EXTREPODIR}/test_WORKSPACE/distdir --experimental_repository_resolved_file=../repo.bzl
bazel shutdown
cd ..
echo; cat repo.bzl; echo
bazel build //:hash
cp `bazel info bazel-genfiles`/hash.txt hashB.txt
cat hashB.txt > "${TEST_log}"
diff hashA.txt hashB.txt || fail "Expected hash to be reproducible"
}
test_non_reproducibility_detected() {
EXTREPODIR=`pwd`
tar xvf ${TEST_SRCDIR}/test_WORKSPACE_files/archives.tar
# Verify that a non-reproducible rule is detected by hash verification
mkdir repo
cd repo
touch BUILD
cat > rule.bzl <<'EOF'
def _time_rule_impl(ctx):
ctx.execute(["bash", "-c", "date +%s > timestamp"])
time_rule = repository_rule(
implementation = _time_rule_impl,
attrs = {},
)
EOF
cat > WORKSPACE <<'EOF'
load("//:rule.bzl", "time_rule")
time_rule(name="timestamprepo")
EOF
bazel sync --distdir=${EXTREPODIR}/test_WORKSPACE/distdir --experimental_repository_resolved_file=resolved.bzl
bazel sync --distdir=${EXTREPODIR}/test_WORKSPACE/distdir --experimental_repository_hash_file=`pwd`/resolved.bzl \
--experimental_verify_repository_rules='//:rule.bzl%time_rule' \
> "${TEST_log}" 2>&1 && fail "expected failure" || :
expect_log "timestamprepo.*hash"
}
test_chain_resolved() {
# Verify that a cahin of dependencies in external repositories is reflected
# in the resolved file in such a way, that the resolved file can be used.
EXTREPODIR=`pwd`
tar xvf ${TEST_SRCDIR}/test_WORKSPACE_files/archives.tar
mkdir rulerepo
cat > rulerepo/rule.bzl <<'EOF'
def _rule_impl(ctx):
ctx.file("data.txt", "Hello World")
ctx.file("BUILD", "exports_files(['data.txt'])")
trivial_rule = repository_rule(
implementation = _rule_impl,
attrs = {},
)
EOF
touch rulerepo/BUILD
zip rule.zip rulerepo/*
rm -rf rulerepo
cat > WORKSPACE <<EOF
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name="rulerepo",
strip_prefix="rulerepo",
urls=["file://${EXTREPODIR}/rule.zip"],
)
load("@rulerepo//:rule.bzl", "trivial_rule")
trivial_rule(name="a")
EOF
cat > BUILD <<'EOF'
genrule(
name = "local",
srcs = ["@a//:data.txt"],
outs = ["local.txt"],
cmd = "cp $< $@",
)
EOF
bazel sync --distdir=${EXTREPODIR}/test_WORKSPACE/distdir \
--experimental_repository_resolved_file=resolved.bzl
bazel clean --expunge
echo; cat resolved.bzl; echo
bazel build --experimental_resolved_file_instead_of_workspace=resolved.bzl \
//:local || fail "Expected success"
}
test_usage_order_respected() {
# Verify that if one rules uses a file from another (without any load
# statement between), then still the resolved file is such that it can
# be used as a workspace replacement.
EXTREPODIR=`pwd`
tar xvf ${TEST_SRCDIR}/test_WORKSPACE_files/archives.tar
mkdir datarepo
echo 'Pure data' > datarepo/data.txt
zip datarepo.zip datarepo/*
rm -rf datarepo
mkdir metadatarepo
echo 'exports_files(["data.txt"])' > metadatarepo/datarepo.BUILD
touch metadatarepo/BUILD
zip metadatarepo.zip metadatarepo/*
rm -rf metadatarepo
mkdir main
cd main
cat > WORKSPACE <<EOF
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name="datarepo",
strip_prefix="datarepo",
urls=["file://${EXTREPODIR}/datarepo.zip"],
build_file="@metadatarepo//:datarepo.BUILD",
)
http_archive(
name="metadatarepo",
strip_prefix="metadatarepo",
urls=["file://${EXTREPODIR}/metadatarepo.zip"],
)
EOF
cat > BUILD <<'EOF'
genrule(
name = "local",
srcs = ["@datarepo//:data.txt"],
outs = ["local.txt"],
cmd = "cp $< $@",
)
EOF
bazel sync --distdir=${EXTREPODIR}/test_WORKSPACE/distdir \
--experimental_repository_resolved_file=resolved.bzl
bazel clean --expunge
echo; cat resolved.bzl; echo
bazel build --experimental_resolved_file_instead_of_workspace=resolved.bzl \
//:local || fail "Expected success"
}
test_order_reproducible() {
# Verify that the order of repositories in the resolved file is reproducible
# and does not depend on the parameters or timing of the actual rules.
EXTREPODIR=`pwd`
tar xvf ${TEST_SRCDIR}/test_WORKSPACE_files/archives.tar
mkdir main
cd main
cat > rule.bzl <<'EOF'
def _impl(ctx):
ctx.execute(["/bin/sh", "-c", "sleep %s" % (ctx.attr.sleep,)])
ctx.file("data", "some test data")
ctx.file("BUILD", "exports_files(['data'])")
sleep_rule = repository_rule(
implementation = _impl,
attrs = {"sleep": attr.int()},
)
EOF
cat > BUILD <<'EOF'
load("//:repo.bzl", "resolved")
genrule(
name = "order",
outs = ["order.txt"],
cmd = ("echo '%s' > $@" %
([entry["original_attributes"]["name"] for entry in resolved],)),
)
EOF
cat > WORKSPACE <<'EOF'
load("//:rule.bzl", "sleep_rule")
sleep_rule(name="a", sleep=1)
sleep_rule(name="c", sleep=3)
sleep_rule(name="b", sleep=5)
EOF
bazel sync --distdir=${EXTREPODIR}/test_WORKSPACE/distdir \
--experimental_repository_resolved_file=repo.bzl
bazel build //:order
cp `bazel info bazel-genfiles`/order.txt order-first.txt
bazel clean --expunge
cat > WORKSPACE <<'EOF'
load("//:rule.bzl", "sleep_rule")
sleep_rule(name="a", sleep=5)
sleep_rule(name="c", sleep=3)
sleep_rule(name="b", sleep=1)
EOF
bazel sync --distdir=${EXTREPODIR}/test_WORKSPACE/distdir \
--experimental_repository_resolved_file=repo.bzl
bazel build //:order
cp `bazel info bazel-genfiles`/order.txt order-second.txt
echo; cat order-first.txt; echo; cat order-second.txt; echo
diff order-first.txt order-second.txt \
|| fail "expected order to be reproducible"
}
test_non_starlarkrepo() {
# Verify that entries in the WORKSPACE that are not starlark repositoires
# are correctly reported in the resolved file.
EXTREPODIR=`pwd`
tar xvf ${TEST_SRCDIR}/test_WORKSPACE_files/archives.tar
mkdir local
touch local/WORKSPACE
echo Hello World > local/data.txt
echo 'exports_files(["data.txt"])' > local/BUILD
mkdir newlocal
echo Pure data > newlocal/data.txt
mkdir main
cd main
mkdir target_to_be_bound
echo More data > target_to_be_bound/data.txt
echo 'exports_files(["data.txt"])' > target_to_be_bound/BUILD
cat > WORKSPACE <<'EOF'
local_repository(name="thisislocal", path="../local")
new_local_repository(name="newlocal", path="../newlocal",
build_file_content='exports_files(["data.txt"])')
bind(name="bound", actual="//target_to_be_bound:data.txt")
EOF
cat > BUILD <<'EOF'
genrule(
name = "it",
srcs = ["@thisislocal//:data.txt", "@newlocal//:data.txt",
"//external:bound"],
outs = ["it.txt"],
cmd = "cat $(SRCS) > $@",
)
EOF
bazel build //:it || fail "Expected success"
bazel sync --distdir=${EXTREPODIR}/test_WORKSPACE/distdir \
--experimental_repository_resolved_file=resolved.bzl
echo > WORKSPACE # remove workspace, only work from the resolved file
bazel clean --expunge
echo; cat resolved.bzl; echo
bazel build --experimental_resolved_file_instead_of_workspace=resolved.bzl \
//:it || fail "Expected success"
}
test_hidden_symbols() {
# Verify that the resolved file can be used for building, even if it
# legitimately contains a private symbol
mkdir main
cd main
cat > BUILD <<'EOF'
genrule(
name = "it",
srcs = ["@foo//:data.txt"],
outs = ["it.txt"],
cmd = "cp $< $@",
)
EOF
cat > repo.bzl <<'EOF'
_THE_DATA="42"
def _data_impl(ctx):
ctx.file("BUILD", "exports_files(['data.txt'])")
ctx.file("data.txt", ctx.attr.data)
_repo = repository_rule(
implementation = _data_impl,
attrs = { "data" : attr.string() },
)
def data_repo(name):
_repo(name=name, data=_THE_DATA)
EOF
cat > WORKSPACE <<'EOF'
load("//:repo.bzl", "data_repo")
data_repo("foo")
EOF
bazel build --experimental_repository_resolved_file=resolved.bzl //:it
echo > WORKSPACE # remove workspace, only work from the resolved file
bazel clean --expunge
echo; cat resolved.bzl; echo
bazel build --experimental_resolved_file_instead_of_workspace=resolved.bzl \
//:it || fail "Expected success"
}
test_toolchain_recorded() {
# Verify that the registration of toolchains and execution platforms is
# recorded in the resolved file
EXTREPODIR=`pwd`
tar xvf ${TEST_SRCDIR}/test_WORKSPACE_files/archives.tar
mkdir ext
touch ext/BUILD
cat > ext/toolchains.bzl <<'EOF'
def ext_toolchains():
native.register_toolchains("@ext//:toolchain")
native.register_execution_platforms("@ext//:platform")
EOF
tar cvf ext.tar ext
rm -rf ext
mkdir main
cd main
cat >> WORKSPACE <<EOF
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name="ext",
strip_prefix="ext",
urls=["file://${EXTREPODIR}/ext.tar"],
)
load("@ext//:toolchains.bzl", "ext_toolchains")
ext_toolchains()
EOF
touch BUILD
bazel sync --distdir=${EXTREPODIR}/test_WORKSPACE/distdir \
--experimental_repository_resolved_file=resolved.bzl
echo; cat resolved.bzl; echo
grep 'register_toolchains.*ext//:toolchain' resolved.bzl \
|| fail "tool chain not registered in resolved file"
grep 'register_execution_platforms.*ext//:platform' resolved.bzl \
|| fail "execution platform not registered in resolved file"
}
test_local_config_platform_recorded() {
# Verify that the auto-generated local_config_platform repo is
# recorded in the resolved file
mkdir main
cd main
cat >> WORKSPACE <<EOF
EOF
cat > BUILD <<'EOF'
genrule(
name = "it",
srcs = ["data.txt"],
outs = ["it.txt"],
cmd = "cp $< $@",
)
EOF
touch data.txt
# Turn on the new host platforms so the repository is used.
bazel build \
--incompatible_auto_configure_host_platform \
--experimental_repository_resolved_file=resolved.bzl \
-- \
//:it
echo; cat resolved.bzl; echo
grep 'local_config_platform' resolved.bzl \
|| fail "local_config_platform in resolved file"
}
test_definition_location_recorded() {
# Verify that for Starlark repositories the location of the definition
# is recorded in the resolved file.
EXTREPODIR=`pwd`
tar xvf ${TEST_SRCDIR}/test_WORKSPACE_files/archives.tar
mkdir ext
touch ext/BUILD
tar cvf ext.tar ext
rm -rf ext
mkdir main
cd main
touch BUILD
mkdir -p first/path
cat > first/path/foo.bzl <<'EOF'
load("//:another/directory/bar.bzl", "bar")
def foo():
bar()
EOF
mkdir -p another/directory
cat > another/directory/bar.bzl <<EOF
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
def bar():
http_archive(
name = "ext",
url = "file://${EXTREPODIR}/ext.tar",
)
EOF
cat > WORKSPACE <<'EOF'
load("//:first/path/foo.bzl", "foo")
foo()
EOF
bazel sync --distdir=${EXTREPODIR}/test_WORKSPACE/distdir \
--experimental_repository_resolved_file=resolved.bzl
echo; cat resolved.bzl; echo
cat > BUILD <<'EOF'
load("//:finddef.bzl", "finddef")
genrule(
name = "ext_def",
outs = ["ext_def.txt"],
cmd = "echo '%s' > $@" % (finddef("ext"),),
)
EOF
cat > finddef.bzl <<'EOF'
load("//:resolved.bzl", "resolved")
def finddef(name):
for repo in resolved:
if repo["original_attributes"]["name"] == name:
return repo["definition_information"]
EOF
bazel build //:ext_def
cat `bazel info bazel-genfiles`/ext_def.txt > "${TEST_log}"
expect_log "WORKSPACE:3"
expect_log "first/path/foo.bzl:4"
expect_log "another/directory/bar.bzl:4"
}
run_suite "workspace_resolved_test tests"
|
import numpy as np
import pytest
from src.rdppy import filter as rdp
test_cases = [
{
"input": [],
"threshold": 0.0,
"output": [],
},
{
"input": [(0, 0)],
"threshold": 10.0,
"output": [True],
},
{
"input": [(0, 0), (4, 0)],
"threshold": 10.0,
"output": [True, True],
},
{
"input": [(0, 0), (4, 0), (3, 1)],
"threshold": 10.0,
"output": [True, False, True],
},
{
"input": [(0, 0), (4, 0), (3, 1)],
"threshold": 1.4,
"output": [True, True, True],
},
{
"input": [(0, 0), (4, 0), (3, 1)],
"threshold": 1.5,
"output": [True, False, True],
},
{
"input": [
(0, 0),
(4, 0),
(0, 1),
(1, 1),
(1, 2),
(2, 2),
(2, 3),
(3, 3),
(3, 4),
(5, 4),
],
"threshold": 0.3,
"output": [True, True, True, True, True, True, True, True, True, True],
},
{
"input": [
(0, 0),
(4, 0),
(0, 1),
(1, 1),
(1, 2),
(2, 2),
(2, 3),
(3, 3),
(3, 4),
(5, 4),
],
"threshold": 0.9,
"output": [True, True, True, False, False, False, False, False, True, True],
},
{
"input": [
(0, 0),
(4, 0),
(0, 1),
(1, 1),
(1, 2),
(2, 2),
(2, 3),
(3, 3),
(3, 4),
(5, 4),
],
"threshold": 1.1,
"output": [True, True, True, False, False, False, False, False, False, True],
},
{
"input": [(0, 0), (1, 0), (0, 0)],
"threshold": 0.5,
"output": [True, True, True],
},
{
"input": [(0, 0), (1, 0), (0, 0)],
"threshold": 1.5,
"output": [True, False, True],
},
]
@pytest.mark.parametrize(
"input,threshold,expected",
[(d["input"], d["threshold"], d["output"]) for d in test_cases],
)
def test_rdp(input, threshold, expected):
input = np.array(input)
expected = np.array(expected)
assert np.all(rdp(input, threshold) == expected)
|
#!/bin/bash
cd /app
git clone https://github.com/idaholab/moose.git --verbose
cd moose
git checkout master
git submodule init libmesh
git submodule update --recursive libmesh
|
import numpy as np
class Mesh:
def __init__(self, size):
self.size = size
self.charge_density = np.zeros((size, size, size))
def weight_particles_charge_to_mesh(self, particle_arrays):
for particle_array in particle_arrays:
for position, weight in zip(particle_array.positions, particle_array.weights):
x, y, z = position
if 0 <= x < self.size and 0 <= y < self.size and 0 <= z < self.size:
self.charge_density[x, y, z] += particle_array.charge * weight
else:
raise ValueError("Position is out of meshgrid bounds") |
<reponame>ccrpjournal/clinicalresearch
/*
* Copyright (c) 2017 Public Library of Science
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
//var s;
(function ($) {
//Setting up global foundation settings - you can override these the documentation is here:
// http://foundation.zurb.com/docs/javascript.html#configure-on-the-fly
$(document).foundation({
//Tooltips
tooltip: {
'wrap': 'word',
// 'disable_for_touch': 'true',
tip_template: function (selector, content) {
return '<span data-selector="' + selector + '" class="'
+ Foundation.libs.tooltip.settings.tooltip_class.substring(1)
+ '">' + content + '</span>';
}
},
// reveal is used for the figure viewer modal
reveal: {
animation: false
}
});
var searchCheck = function (){
if (!Modernizr.input.required) {
$("form[name='searchForm']").submit(function() {
var searchTerm = $("#search").val();
if (!searchTerm) return false;
});
}
};
$(document).ready(function () {
var runSearchCheck = searchCheck();
// hover delay for menu
hover_delay.init();
//placeholder style change
placeholder_style.init();
// initialize tooltip_hover for everything
tooltip_hover.init();
});
})(jQuery); |
const title = 'EZcomments โ Ajoutez des commentaires ร votre site en un รฉclair.';
const description = 'Version Pro gratuite pendant 14 jours, annulable ร tout moment.';
const SEO = {
title,
description,
canonical: 'https://ezcomments.vercel.app',
openGraph: {
type: 'website',
locale: 'fr_FR',
url: 'https://ezcomments.vercel.app',
title,
description,
images: [
{
url: 'https://ezcomments.vercel.app/og.png',
alt: title,
width: 1280,
height: 720
}
]
}
};
export default SEO; |
# Script to safely checkout to different branch
# /bin/bash change-branch.sh master
target="$1"
THISFILE=${BASH_SOURCE[0]}
DIR="$( cd "$( dirname "${THISFILE}" )" && pwd -P )"
source "$DIR/../libs/colours.sh";
if [ "$#" -lt 1 ] ; then
{ red "\n[error] At least one argument expected, like: \n\n /bin/bash $0 \"branch-name\" \n"; } 2>&3
exit 1;
fi
/bin/bash $DIR/is-commited.sh
git checkout $target;
if [ "$(git rev-parse --abbrev-ref HEAD)" != $target ]; then
{ red "[error] checkout to '$target' - failed"; } 2>&3
exit 1
fi
{ green "[ok] checkout to '$target' - success"; } 2>&3
|
class Anagram():
def __init__(self, string):
self.string = string
def find_anagrams(self):
result = set()
for i in range(len(self.string)):
for j in range(i + 1, len(self.string) + 1):
result.add("".join(sorted(self.string[i:j])))
return list(result) |
#!/bin/sh
#
# Vivado(TM)
# runme.sh: a Vivado-generated Runs Script for UNIX
# Copyright 1986-2020 Xilinx, Inc. All Rights Reserved.
#
echo "This script was generated under a different operating system."
echo "Please update the PATH and LD_LIBRARY_PATH variables below, before executing this script"
exit
if [ -z "$PATH" ]; then
PATH=C:/Xilinx/Vivado/2020.2/ids_lite/ISE/bin/nt64;C:/Xilinx/Vivado/2020.2/ids_lite/ISE/lib/nt64:C:/Xilinx/Vivado/2020.2/bin
else
PATH=C:/Xilinx/Vivado/2020.2/ids_lite/ISE/bin/nt64;C:/Xilinx/Vivado/2020.2/ids_lite/ISE/lib/nt64:C:/Xilinx/Vivado/2020.2/bin:$PATH
fi
export PATH
if [ -z "$LD_LIBRARY_PATH" ]; then
LD_LIBRARY_PATH=
else
LD_LIBRARY_PATH=:$LD_LIBRARY_PATH
fi
export LD_LIBRARY_PATH
HD_PWD='C:/Users/PC/Documents/VUT/Digitlnelektronika1/Cvika/Digital-electronics-1-Project/Door lock system/Door lock system.runs/impl_1'
cd "$HD_PWD"
HD_LOG=runme.log
/bin/touch $HD_LOG
ISEStep="./ISEWrap.sh"
EAStep()
{
$ISEStep $HD_LOG "$@" >> $HD_LOG 2>&1
if [ $? -ne 0 ]
then
exit
fi
}
# pre-commands:
/bin/touch .init_design.begin.rst
EAStep vivado -log top.vdi -applog -m64 -product Vivado -messageDb vivado.pb -mode batch -source top.tcl -notrace
|
<filename>OpenBCI_GUI/libraries/controlP5/src/controlP5/ControllerLayoutElement.java
package controlP5;
/**
* controlP5 is a processing gui library.
*
* 2006-2015 by <NAME>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2.1
* of the License, or (at your option) any later version.
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General
* Public License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place, Suite 330,
* Boston, MA 02111-1307 USA
*
* @author <NAME> (http://www.sojamo.de)
* @modified 04/14/2016
* @version 2.2.6
*
*/
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
class ControllerLayoutElement implements Serializable, Cloneable {
private static final long serialVersionUID = -5006855922546529005L;
private transient ControllerInterface<?> controller;
private Class<?> type;
private Map<String,Object> values;
ControllerLayoutElement(ControllerInterface<?> theController) {
controller = theController;
type = theController.getClass();
values = new HashMap<String,Object>();
}
private void cascade(Object theObject) {
}
} |
package com.example.lsireneva.todoapp.models;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.util.Log;
import com.example.lsireneva.todoapp.activities.MainActivity;
import java.util.ArrayList;
/**
* Created by <NAME>
*/
public class TodoDatabase {
private SQLiteDatabase sqlTodoDB;
private TodoDatabaseHelper dbHelper;
private Context context;
private static final String TABLE_TODOTASK = "todoTaskTable";
private static final String DATABASE_NAME = "todoDatabase";
private static final int DATABASE_VERSION = 4;
// TodoTaskTable Columns
private static final String KEY_TASK_ID = "id";
private static final String KEY_TASKNAME_TEXT = "taskName";
private static final String KEY_TASKPRIORITYLEVEL = "taskPriorityLevel";
private static final String KEY_TASKDUEDATE_LONG = "taskDueDate";
private static final String KEY_TASKNOTES_TEXT = "taskNotes";
private static final String KEY_TASKSTATUS_TEXT = "taskStatus";
public TodoDatabase(Context context) {
this.context = context;
}
public void connectTodoDB() {
dbHelper = new TodoDatabaseHelper(context, DATABASE_NAME, null, DATABASE_VERSION);
sqlTodoDB=dbHelper.getWritableDatabase();
}
public void closeTodoDB() {
dbHelper.close();
}
// Insert task into the database
public void writeTasks(MainActivity.ToDoTask toDoTask) {
Log.d("TODOAPP", "writeTasks = " + toDoTask.taskName);
Log.d("TODOAPP", "writeTasks = " + toDoTask.taskDueDate);
Log.d("TODOAPP", "writeTasks = " + toDoTask.taskNotes);
Log.d("TODOAPP", "writeTasks = " + toDoTask.taskPriority);
Log.d("TODOAPP", "writeTasks = " + toDoTask.taskStatus);
// Create and/or open the database for writing
//sqlTodoDB=dbHelper.getWritableDatabase();
ContentValues values = new ContentValues();
sqlTodoDB.beginTransaction();
try {
values.put(KEY_TASKNAME_TEXT, toDoTask.taskName);
values.put(KEY_TASKDUEDATE_LONG, toDoTask.taskDueDate);
values.put(KEY_TASKNOTES_TEXT, toDoTask.taskNotes);
values.put(KEY_TASKPRIORITYLEVEL, toDoTask.taskPriority);
values.put(KEY_TASKSTATUS_TEXT, toDoTask.taskStatus);
// Notice how we haven't specified the primary key. SQLite auto increments the primary key column.
sqlTodoDB.insert(TABLE_TODOTASK, null, values);
sqlTodoDB.setTransactionSuccessful();
} catch (Exception e) {
Log.d("TODOAPP", "Error while trying to add task to database");
} finally {
sqlTodoDB.endTransaction();
}
}
public void updateTask(MainActivity.ToDoTask toDoTask) {
Log.d("TODOAPP", "updateTask");
Log.d("TODOAPP", "updateTass ID=" + toDoTask.taskID);
Log.d("TODOAPP", "updateTask Name=" + toDoTask.taskName);
//sqlTodoDB=dbHelper.getWritableDatabase();
ContentValues values = new ContentValues();
sqlTodoDB.beginTransaction();
try {
values.put(KEY_TASKNAME_TEXT, toDoTask.taskName);
values.put(KEY_TASKDUEDATE_LONG, toDoTask.taskDueDate);
values.put(KEY_TASKNOTES_TEXT, toDoTask.taskNotes);
values.put(KEY_TASKPRIORITYLEVEL, toDoTask.taskPriority);
values.put(KEY_TASKSTATUS_TEXT, toDoTask.taskStatus);
// Which row to update, based on the ID
String selection = KEY_TASK_ID + " LIKE ?";
String[] selectionArgs = {String.valueOf(toDoTask.taskID)};
int count = sqlTodoDB.update(
TABLE_TODOTASK,
values,
selection,
selectionArgs);
sqlTodoDB.setTransactionSuccessful();
} catch (Exception e) {
Log.d("TODOAPP", "Error while trying to update task to database");
} finally {
sqlTodoDB.endTransaction();
}
}
public void deleteTask(MainActivity.ToDoTask toDoTask) {
Log.d("TODOAPP", "deleteTask");
Log.d("TODOAPP", "deleteTask ID=" + toDoTask.taskID);
Log.d("TODOAPP", "deleteTask Name=" + toDoTask.taskName);
//sqlTodoDB=dbHelper.getWritableDatabase();
sqlTodoDB.beginTransaction();
try {
// Which row to delete, based on the ID
String selection = KEY_TASK_ID + " LIKE ?";
String[] selectionArgs = {String.valueOf(toDoTask.taskID)};
sqlTodoDB.delete(TABLE_TODOTASK, selection, selectionArgs);
sqlTodoDB.setTransactionSuccessful();
} catch (Exception e) {
Log.d("TODOAPP", "Error while trying to delete task from database");
} finally {
sqlTodoDB.endTransaction();
}
}
public ArrayList<MainActivity.ToDoTask> getAllTasks() {
Log.d("TODOAPP", "getAllTasks()");
ArrayList<MainActivity.ToDoTask> tasks = new ArrayList<>();
//sqlTodoDB=dbHelper.getReadableDatabase();
// Define a projection that specifies which columns from the database
// you will actually use after this query.
String[] projection = {
KEY_TASK_ID,
KEY_TASKNAME_TEXT,
KEY_TASKDUEDATE_LONG,
KEY_TASKNOTES_TEXT,
KEY_TASKPRIORITYLEVEL,
KEY_TASKSTATUS_TEXT
};
String orderBy = "taskPriorityLevel";
String query = "SELECT * " + "from " + TABLE_TODOTASK + " WHERE " + KEY_TASKPRIORITYLEVEL + " IN ('High', 'Medium','Low') ORDER BY CASE " + KEY_TASKPRIORITYLEVEL +
" WHEN 'High' THEN 1 WHEN 'Medium' THEN 2 WHEN 'Low' THEN 3 END," + KEY_TASKPRIORITYLEVEL + "; ";
Cursor cursor = sqlTodoDB.rawQuery(query, null);
try {
if (cursor.moveToFirst()) {
do {
MainActivity.ToDoTask newTask = new MainActivity.ToDoTask();
newTask.taskID = cursor.getInt(cursor.getColumnIndex(KEY_TASK_ID));
newTask.taskName = cursor.getString(cursor.getColumnIndexOrThrow(KEY_TASKNAME_TEXT));
newTask.taskDueDate = cursor.getLong(cursor.getColumnIndexOrThrow(KEY_TASKDUEDATE_LONG));
newTask.taskNotes = cursor.getString(cursor.getColumnIndexOrThrow(KEY_TASKNOTES_TEXT));
newTask.taskPriority = cursor.getString(cursor.getColumnIndexOrThrow(KEY_TASKPRIORITYLEVEL));
newTask.taskStatus = cursor.getString(cursor.getColumnIndexOrThrow(KEY_TASKSTATUS_TEXT));
Log.d("TODOAPP", "newTask.taskID=" + newTask.taskID);
Log.d("TODOAPP", "newTask.taskName=" + newTask.taskName);
Log.d("TODOAPP", "newTask.taskDueDate=" + newTask.taskDueDate);
Log.d("TODOAPP", "newTask.taskNotes=" + newTask.taskNotes);
Log.d("TODOAPP", "newTask.taskPriority=" + newTask.taskPriority);
Log.d("TODOAPP", "newTask.taskSTATUS=" + newTask.taskStatus);
tasks.add(newTask);
} while (cursor.moveToNext());
}
} catch (Exception e) {
Log.d("TODOAPP", "Error while trying to get tasks from database");
} finally {
if (cursor != null && !cursor.isClosed()) {
cursor.close();
}
}
return tasks;
}
public static class TodoDatabaseHelper extends SQLiteOpenHelper {
public TodoDatabaseHelper(Context context, String name, SQLiteDatabase.CursorFactory factory, int version) {
super(context, name, factory, version);
}
@Override
public void onCreate(SQLiteDatabase db) {
Log.d("TODOAPP", "--- onCreate database ---");
String CREATE_TODOAPP_TABLE = "CREATE TABLE " + TABLE_TODOTASK +
"(" +
KEY_TASK_ID + " INTEGER PRIMARY KEY," + // Define a primary key
KEY_TASKNAME_TEXT + " TEXT," +
KEY_TASKDUEDATE_LONG + " INTEGER," +
KEY_TASKNOTES_TEXT + " TEXT," +
KEY_TASKPRIORITYLEVEL + " TEXT," +
KEY_TASKSTATUS_TEXT + " TEXT" +
")";
db.execSQL(CREATE_TODOAPP_TABLE);
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
Log.d("TODOAPP", "--- onUpgrade ---");
if (oldVersion != newVersion) {
db.execSQL("DROP TABLE IF EXISTS " + TABLE_TODOTASK);
onCreate(db);
}
}
}
}
|
<html>
<head>
<title>My page</title>
</head>
<body>
<div class="auth-info">
<!-- Authentication information -->
</div>
<nav class="menu">
<!-- Navigation menu -->
</nav>
<footer class="copyright">
<p>Copyright ยฉ 2020 My Company Name</p>
</footer>
</body>
</html> |
<reponame>fsancheztemprano/chess-lite<gh_stars>0
import { HttpClientTestingModule } from '@angular/common/http/testing';
import { TestBed } from '@angular/core/testing';
import { stubAdministrationServiceProvider } from '../../../services/administration.service.stub';
import { RoleManagementService } from './role-management.service';
describe('RoleManagementService', () => {
let service: RoleManagementService;
beforeEach(() => {
TestBed.configureTestingModule({
imports: [HttpClientTestingModule],
providers: [stubAdministrationServiceProvider],
});
service = TestBed.inject(RoleManagementService);
});
it('should be created', () => {
expect(service).toBeTruthy();
});
});
|
package playlists
import (
"regexp"
"strings"
"github.com/liampulles/banger/pkg/library"
)
var Registered map[string]PlaylistCollector = map[string]PlaylistCollector{
"post-punk": PostPunk,
"dummy": Dummy,
"nineties-alternative": NinetiesAlternative,
}
var alphanumeric, _ = regexp.Compile("[^a-zA-Z0-9]+")
type PlaylistCollector func([]library.Track) []library.Track
func toRegular(in string) string {
regular := alphanumeric.ReplaceAllString(in, "")
return strings.ToUpper(regular)
}
|
SELECT *
FROM table
ORDER BY field DESC
LIMIT 1; |
#!/usr/bin/env bash
npm run build:schema
npm run build:types
npm run build:node-type
npm run build:finder
tsc -b
|
from typing import List
class TaskManager:
def __init__(self):
self.tasks = []
def add_task(self, description: str):
if description:
self.tasks.append((description, False))
else:
print("Error: Task description cannot be empty.")
def complete_task(self, task_index: int):
if 0 <= task_index < len(self.tasks):
self.tasks[task_index] = (self.tasks[task_index][0], True)
else:
print("Error: Invalid task index.")
def list_tasks(self) -> List[str]:
task_list = []
for i, (description, completed) in enumerate(self.tasks, start=1):
status = "[x]" if completed else "[ ]"
task_list.append(f"{status} {i}. {description}")
return task_list
def remove_task(self, task_index: int):
if 0 <= task_index < len(self.tasks):
del self.tasks[task_index]
else:
print("Error: Invalid task index.")
def main():
task_manager = TaskManager()
while True:
command = input("Enter command: ").split()
if command[0] == "add":
if len(command) > 1:
task_manager.add_task(" ".join(command[1:]))
else:
print("Error: Task description cannot be empty.")
elif command[0] == "complete":
if len(command) > 1:
try:
task_index = int(command[1]) - 1
task_manager.complete_task(task_index)
except ValueError:
print("Error: Invalid task index.")
else:
print("Error: Please provide a task index.")
elif command[0] == "list":
tasks = task_manager.list_tasks()
for task in tasks:
print(task)
elif command[0] == "remove":
if len(command) > 1:
try:
task_index = int(command[1]) - 1
task_manager.remove_task(task_index)
except ValueError:
print("Error: Invalid task index.")
else:
print("Error: Please provide a task index.")
elif command[0] == "exit":
break
else:
print("Error: Invalid command.")
if __name__ == "__main__":
main() |
<filename>src/store/reducers/http.js
import ACTIONTYPE from '../action.types'
export function http(state = {}, action) {
switch (action.type) {
case ACTIONTYPE.http:
return { ...state, ...action.payload }
case ACTIONTYPE.resetHttp:
return action.payload
default:
return state
}
} |
<reponame>DevTeamSolv/solv-boilerplate
/*
* Sale Messages
*
* This contains all the text for the Sale component.
*/
import { defineMessages } from 'react-intl';
export default defineMessages({
header: {
id: 'app.containers.Sale.header',
defaultMessage: 'Token Sale',
},
coming: {
id: 'app.containers.Sale.coming',
defaultMessage: 'Presale Coming Soon',
},
join: {
id: 'app.containers.Sale.join',
defaultMessage: 'Join The Solution',
},
});
|
#!/bin/bash -e
logger "INFO:snap-TAF: deploy-device-service" |
#!/usr/bin/env bash
PYTHON=/home/detao//anaconda3/bin/python
CONFIG=$1
CHECKPOINT=$2
GPUS=$3
PORT=${PORT:-29500}
$PYTHON -m torch.distributed.launch --nproc_per_node=$GPUS --master_port=$PORT \
$(dirname "$0")/test.py $CONFIG $CHECKPOINT --launcher pytorch ${@:4}
|
if [[ $__p9k_sourced != 12 ]]; then
>&2 print -P ""
>&2 print -P "[%F{1}ERROR%f]: Corrupted powerlevel10k installation."
>&2 print -P ""
if (( ${+functions[antigen]} )); then
>&2 print -P "If using %Bantigen%b, run the folowing command to fix:"
>&2 print -P ""
>&2 print -P " %F{2}antigen%f reset"
if [[ -d ~/.antigen ]]; then
>&2 print -P ""
>&2 print -P "If it doesn't help, try this:"
>&2 print -P ""
>&2 print -P " %F{2}rm%f -rf %U~/.antigen%u"
fi
else
>&2 print -P "Try resetting cache in your plugin manager or"
>&2 print -P "reinstalling powerlevel10k from scratch."
fi
>&2 print -P ""
return 1
fi
if ! autoload -Uz is-at-least || ! is-at-least 5.1; then
() {
>&2 echo -E "You are using ZSH version $ZSH_VERSION. The minimum required version for Powerlevel10k is 5.1."
>&2 echo -E "Type 'echo \$ZSH_VERSION' to see your current zsh version."
local def=${SHELL:c:A}
local cur=${${ZSH_ARGZERO#-}:c:A}
local cur_v="$($cur -c 'echo -E $ZSH_VERSION' 2>/dev/null)"
if [[ $cur_v == $ZSH_VERSION && $cur != $def ]]; then
>&2 echo -E "The shell you are currently running is likely $cur."
fi
local other=${${:-zsh}:c}
if [[ -n $other ]] && $other -c 'autoload -Uz is-at-least && is-at-least 5.1' &>/dev/null; then
local other_v="$($other -c 'echo -E $ZSH_VERSION' 2>/dev/null)"
if [[ -n $other_v && $other_v != $ZSH_VERSION ]]; then
>&2 echo -E "You have $other with version $other_v but this is not what you are using."
if [[ -n $def && $def != ${other:A} ]]; then
>&2 echo -E "To change your user shell, type the following command:"
>&2 echo -E ""
if [[ "$(grep -F $other /etc/shells 2>/dev/null)" != $other ]]; then
>&2 echo -E " echo ${(q-)other} | sudo tee -a /etc/shells"
fi
>&2 echo -E " chsh -s ${(q-)other}"
fi
fi
fi
}
return 1
fi
builtin source "${__p9k_root_dir}/internal/configure.zsh"
builtin source "${__p9k_root_dir}/internal/worker.zsh"
builtin source "${__p9k_root_dir}/internal/parser.zsh"
builtin source "${__p9k_root_dir}/internal/icons.zsh"
# For compatibility with Powerlevel9k. It's not recommended to use mnemonic color
# names in the configuration except for colors 0-7 as these are standard.
typeset -grA __p9k_colors=(
black 000 red 001 green 002 yellow 003
blue 004 magenta 005 cyan 006 white 007
grey 008 maroon 009 lime 010 olive 011
navy 012 fuchsia 013 aqua 014 teal 014
silver 015 grey0 016 navyblue 017 darkblue 018
blue3 020 blue1 021 darkgreen 022 deepskyblue4 025
dodgerblue3 026 dodgerblue2 027 green4 028 springgreen4 029
turquoise4 030 deepskyblue3 032 dodgerblue1 033 darkcyan 036
lightseagreen 037 deepskyblue2 038 deepskyblue1 039 green3 040
springgreen3 041 cyan3 043 darkturquoise 044 turquoise2 045
green1 046 springgreen2 047 springgreen1 048 mediumspringgreen 049
cyan2 050 cyan1 051 purple4 055 purple3 056
blueviolet 057 grey37 059 mediumpurple4 060 slateblue3 062
royalblue1 063 chartreuse4 064 paleturquoise4 066 steelblue 067
steelblue3 068 cornflowerblue 069 darkseagreen4 071 cadetblue 073
skyblue3 074 chartreuse3 076 seagreen3 078 aquamarine3 079
mediumturquoise 080 steelblue1 081 seagreen2 083 seagreen1 085
darkslategray2 087 darkred 088 darkmagenta 091 orange4 094
lightpink4 095 plum4 096 mediumpurple3 098 slateblue1 099
wheat4 101 grey53 102 lightslategrey 103 mediumpurple 104
lightslateblue 105 yellow4 106 darkseagreen 108 lightskyblue3 110
skyblue2 111 chartreuse2 112 palegreen3 114 darkslategray3 116
skyblue1 117 chartreuse1 118 lightgreen 120 aquamarine1 122
darkslategray1 123 deeppink4 125 mediumvioletred 126 darkviolet 128
purple 129 mediumorchid3 133 mediumorchid 134 darkgoldenrod 136
rosybrown 138 grey63 139 mediumpurple2 140 mediumpurple1 141
darkkhaki 143 navajowhite3 144 grey69 145 lightsteelblue3 146
lightsteelblue 147 darkolivegreen3 149 darkseagreen3 150 lightcyan3 152
lightskyblue1 153 greenyellow 154 darkolivegreen2 155 palegreen1 156
darkseagreen2 157 paleturquoise1 159 red3 160 deeppink3 162
magenta3 164 darkorange3 166 indianred 167 hotpink3 168
hotpink2 169 orchid 170 orange3 172 lightsalmon3 173
lightpink3 174 pink3 175 plum3 176 violet 177
gold3 178 lightgoldenrod3 179 tan 180 mistyrose3 181
thistle3 182 plum2 183 yellow3 184 khaki3 185
lightyellow3 187 grey84 188 lightsteelblue1 189 yellow2 190
darkolivegreen1 192 darkseagreen1 193 honeydew2 194 lightcyan1 195
red1 196 deeppink2 197 deeppink1 199 magenta2 200
magenta1 201 orangered1 202 indianred1 204 hotpink 206
mediumorchid1 207 darkorange 208 salmon1 209 lightcoral 210
palevioletred1 211 orchid2 212 orchid1 213 orange1 214
sandybrown 215 lightsalmon1 216 lightpink1 217 pink1 218
plum1 219 gold1 220 lightgoldenrod2 222 navajowhite1 223
mistyrose1 224 thistle1 225 yellow1 226 lightgoldenrod1 227
khaki1 228 wheat1 229 cornsilk1 230 grey100 231
grey3 232 grey7 233 grey11 234 grey15 235
grey19 236 grey23 237 grey27 238 grey30 239
grey35 240 grey39 241 grey42 242 grey46 243
grey50 244 grey54 245 grey58 246 grey62 247
grey66 248 grey70 249 grey74 250 grey78 251
grey82 252 grey85 253 grey89 254 grey93 255)
# For compatibility with Powerlevel9k.
#
# Type `getColorCode background` or `getColorCode foreground` to see the list of predefined colors.
function getColorCode() {
eval "$__p9k_intro"
if (( ARGC == 1 )); then
case $1 in
foreground)
local k
for k in "${(k@)__p9k_colors}"; do
local v=${__p9k_colors[$k]}
print -rP -- "%F{$v}$v - $k%f"
done
return 0
;;
background)
local k
for k in "${(k@)__p9k_colors}"; do
local v=${__p9k_colors[$k]}
print -rP -- "%K{$v}$v - $k%k"
done
return 0
;;
esac
fi
echo "Usage: getColorCode background|foreground" >&2
return 1
}
# _p9k_declare <type> <uppercase-name> [default]...
function _p9k_declare() {
local -i set=$+parameters[$2]
(( ARGC > 2 || set )) || return 0
case $1 in
-b)
if (( set )); then
[[ ${(P)2} == true ]] && typeset -gi _$2=1 || typeset -gi _$2=0
else
typeset -gi _$2=$3
fi
;;
-a)
local -a v=("${(@P)2}")
if (( set )); then
eval "typeset -ga _${(q)2}=(${(@qq)v})";
else
if [[ $3 != '--' ]]; then
echo "internal error in _p9k_declare " "${(qqq)@}" >&2
fi
eval "typeset -ga _${(q)2}=(${(@qq)*[4,-1]})"
fi
;;
-i)
(( set )) && typeset -gi _$2=$2 || typeset -gi _$2=$3
;;
-F)
(( set )) && typeset -gF _$2=$2 || typeset -gF _$2=$3
;;
-s)
(( set )) && typeset -g _$2=${(P)2} || typeset -g _$2=$3
;;
-e)
if (( set )); then
local v=${(P)2}
typeset -g _$2=${(g::)v}
else
typeset -g _$2=${(g::)3}
fi
;;
*)
echo "internal error in _p9k_declare " "${(qqq)@}" >&2
esac
}
function _p9k_read_word() {
local -a stat
zstat -A stat +mtime -- $1 2>/dev/null || stat=(-1)
local cached=$_p9k__read_word_cache[$1]
if [[ $cached == $stat[1]:* ]]; then
_p9k__ret=${cached#*:}
else
local rest
_p9k__ret=
{ read _p9k__ret rest <$1 } 2>/dev/null
_p9k__ret=${_p9k__ret%$'\r'}
_p9k__read_word_cache[$1]=$stat[1]:$_p9k__ret
fi
[[ -n $_p9k__ret ]]
}
function _p9k_fetch_cwd() {
_p9k__cwd=${(%):-%/}
_p9k__cwd_a=${${_p9k__cwd:A}:-.}
case $_p9k__cwd in
~|/|.)
_p9k__parent_dirs=()
_p9k__parent_mtimes=()
_p9k__parent_mtimes_i=()
_p9k__parent_mtimes_s=
return
;;
~/*)
local parent=~/
local parts=(${(s./.)_p9k__cwd#$parent})
;;
*)
local parent=/
local parts=(${(s./.)_p9k__cwd})
;;
esac
local MATCH
_p9k__parent_dirs=(${(@)${:-{$#parts..1}}/(#m)*/$parent${(pj./.)parts[1,MATCH]}})
if ! zstat -A _p9k__parent_mtimes +mtime -- $_p9k__parent_dirs 2>/dev/null; then
_p9k__parent_mtimes=(${(@)parts/*/-1})
fi
_p9k__parent_mtimes_i=(${(@)${:-{1..$#parts}}/(#m)*/$MATCH:$_p9k__parent_mtimes[MATCH]})
_p9k__parent_mtimes_s="$_p9k__parent_mtimes_i"
}
# Usage: _p9k_glob parent_dir_index pattern
#
# parent_dir_index indexes _p9k__parent_dirs.
#
# Returns the number of matches.
#
# Pattern cannot have slashes.
#
# Example: _p9k_glob 3 '*.csproj'
function _p9k_glob() {
local dir=$_p9k__parent_dirs[$1]
local cached=$_p9k__glob_cache[$dir/$2]
if [[ $cached == $_p9k__parent_mtimes[$1]:* ]]; then
return ${cached##*:}
fi
local -a stat
zstat -A stat +mtime -- $dir 2>/dev/null || stat=(-1)
local files=($dir/$~2(N:t))
_p9k__glob_cache[$dir/$2]="$stat[1]:$#files"
return $#files
}
# Usage: _p9k_upglob pattern
#
# Returns index within _p9k__parent_dirs or 0 if there is no match.
#
# Pattern cannot have slashes. Never matches in / or ~. Search stops before reaching / or ~.
#
# Example: _p9k_upglob '*.csproj'
function _p9k_upglob() {
local cached=$_p9k__upsearch_cache[$_p9k__cwd/$1]
if [[ -n $cached ]]; then
if [[ $_p9k__parent_mtimes_s == ${cached% *}(| *) ]]; then
return ${cached##* }
fi
cached=(${(s: :)cached})
local last_idx=$cached[-1]
cached[-1]=()
local -i i
for i in ${(@)${cached:|_p9k__parent_mtimes_i}%:*}; do
_p9k_glob $i $1 && continue
_p9k__upsearch_cache[$_p9k__cwd/$1]="${_p9k__parent_mtimes_i[1,i]} $i"
return i
done
if (( i != last_idx )); then
_p9k__upsearch_cache[$_p9k__cwd/$1]="${_p9k__parent_mtimes_i[1,$#cached]} $last_idx"
return last_idx
fi
i=$(($#cached + 1))
else
local -i i=1
fi
for ((; i <= $#_p9k__parent_mtimes; ++i)); do
_p9k_glob $i $1 && continue
_p9k__upsearch_cache[$_p9k__cwd/$1]="${_p9k__parent_mtimes_i[1,i]} $i"
return i
done
_p9k__upsearch_cache[$_p9k__cwd/$1]="$_p9k__parent_mtimes_s 0"
return 0
}
# If we execute `print -P $1`, how many characters will be printed on the last line?
# Assumes that `%{%}` and `%G` don't lie.
#
# _p9k_prompt_length '' => 0
# _p9k_prompt_length 'abc' => 3
# _p9k_prompt_length $'abc\nxy' => 2
# _p9k_prompt_length $'\t' => 8
# _p9k_prompt_length '%F{red}abc' => 3
# _p9k_prompt_length $'%{a\b%Gb%}' => 1
function _p9k_prompt_length() {
local COLUMNS=1024
local -i x y=$#1 m
if (( y )); then
while (( ${${(%):-$1%$y(l.1.0)}[-1]} )); do
x=y
(( y *= 2 ));
done
local xy
while (( y > x + 1 )); do
m=$(( x + (y - x) / 2 ))
typeset ${${(%):-$1%$m(l.x.y)}[-1]}=$m
done
fi
_p9k__ret=$x
}
typeset -gr __p9k_byte_suffix=('B' 'K' 'M' 'G' 'T' 'P' 'E' 'Z' 'Y')
# 42 => 42B
# 1536 => 1.5K
function _p9k_human_readable_bytes() {
typeset -F 2 n=$1
local suf
for suf in $__p9k_byte_suffix; do
(( n < 100 )) && break
(( n /= 1024 ))
done
_p9k__ret=${${n%%0#}%.}$suf
}
if is-at-least 5.4; then
function _p9k_print_params() { typeset -p -- "$@" }
else
# Cannot use `typeset -p` unconditionally because of bugs in zsh.
function _p9k_print_params() {
local name
for name; do
case $parameters[$name] in
array*)
print -r -- "$name=(" "${(@q)${(@P)name}}" ")"
;;
association*)
# Cannot use "${(@q)${(@kvP)name}}" because of bugs in zsh.
local kv=("${(@kvP)name}")
print -r -- "$name=(" "${(@q)kv}" ")"
;;
*)
print -r -- "$name=${(q)${(P)name}}"
;;
esac
done
}
fi
# Determine if the passed segment is used in the prompt
#
# Pass the name of the segment to this function to test for its presence in
# either the LEFT or RIGHT prompt arrays.
# * $1: The segment to be tested.
_p9k_segment_in_use() {
(( $_POWERLEVEL9K_LEFT_PROMPT_ELEMENTS[(I)$1(|_joined)] ||
$_POWERLEVEL9K_RIGHT_PROMPT_ELEMENTS[(I)$1(|_joined)] ))
}
# Caching allows storing array-to-array associations. It should be used like this:
#
# if ! _p9k_cache_get "$key1" "$key2"; then
# # Compute val1 and val2 and then store them in the cache.
# _p9k_cache_set "$val1" "$val2"
# fi
# # Here ${_p9k__cache_val[1]} and ${_p9k__cache_val[2]} are $val1 and $val2 respectively.
#
# Limitations:
#
# * Calling _p9k_cache_set without arguments clears the cache entry. Subsequent calls to
# _p9k_cache_get for the same key will return an error.
# * There must be no intervening _p9k_cache_get calls between the associated _p9k_cache_get
# and _p9k_cache_set.
_p9k_cache_set() {
# Uncomment to see cache misses.
# echo "caching: ${(@0q)_p9k__cache_key} => (${(q)@})" >&2
_p9k_cache[$_p9k__cache_key]="${(pj:\0:)*}0"
_p9k__cache_val=("$@")
_p9k__state_dump_scheduled=1
}
_p9k_cache_get() {
_p9k__cache_key="${(pj:\0:)*}"
local v=$_p9k_cache[$_p9k__cache_key]
[[ -n $v ]] && _p9k__cache_val=("${(@0)${v[1,-2]}}")
}
_p9k_cache_ephemeral_set() {
# Uncomment to see cache misses.
# echo "caching: ${(@0q)_p9k__cache_key} => (${(q)@})" >&2
_p9k__cache_ephemeral[$_p9k__cache_key]="${(pj:\0:)*}0"
_p9k__cache_val=("$@")
}
_p9k_cache_ephemeral_get() {
_p9k__cache_key="${(pj:\0:)*}"
local v=$_p9k__cache_ephemeral[$_p9k__cache_key]
[[ -n $v ]] && _p9k__cache_val=("${(@0)${v[1,-2]}}")
}
_p9k_cache_stat_get() {
local -H stat
local label=$1 f
shift
_p9k__cache_stat_meta=
_p9k__cache_stat_fprint=
for f; do
if zstat -H stat -- $f 2>/dev/null; then
_p9k__cache_stat_meta+="${(q)f} $stat[inode] $stat[mtime] $stat[size] $stat[mode]; "
fi
done
if _p9k_cache_get $0 $label meta "$@"; then
if [[ $_p9k__cache_val[1] == $_p9k__cache_stat_meta ]]; then
_p9k__cache_stat_fprint=$_p9k__cache_val[2]
local -a key=($0 $label fprint "$@" "$_p9k__cache_stat_fprint")
_p9k__cache_fprint_key="${(pj:\0:)key}"
shift 2 _p9k__cache_val
return 0
else
local -a key=($0 $label fprint "$@" "$_p9k__cache_val[2]")
_p9k__cache_ephemeral[${(pj:\0:)key}]="${(pj:\0:)_p9k__cache_val[3,-1]}0"
fi
fi
if (( $+commands[md5] )); then
_p9k__cache_stat_fprint="$(md5 -- $* 2>&1)"
elif (( $+commands[md5sum] )); then
_p9k__cache_stat_fprint="$(md5sum -b -- $* 2>&1)"
else
return 1
fi
local meta_key=$_p9k__cache_key
if _p9k_cache_ephemeral_get $0 $label fprint "$@" "$_p9k__cache_stat_fprint"; then
_p9k__cache_fprint_key=$_p9k__cache_key
_p9k__cache_key=$meta_key
_p9k_cache_set "$_p9k__cache_stat_meta" "$_p9k__cache_stat_fprint" "$_p9k__cache_val[@]"
shift 2 _p9k__cache_val
return 0
fi
_p9k__cache_fprint_key=$_p9k__cache_key
_p9k__cache_key=$meta_key
return 1
}
_p9k_cache_stat_set() {
_p9k_cache_set "$_p9k__cache_stat_meta" "$_p9k__cache_stat_fprint" "$@"
_p9k__cache_key=$_p9k__cache_fprint_key
_p9k_cache_ephemeral_set "$@"
}
# _p9k_param prompt_foo_BAR BACKGROUND red
_p9k_param() {
local key="_p9k_param ${(pj:\0:)*}"
_p9k__ret=$_p9k_cache[$key]
if [[ -n $_p9k__ret ]]; then
_p9k__ret[-1,-1]=''
else
if [[ ${1//-/_} == (#b)prompt_([a-z0-9_]#)(*) ]]; then
local var=_POWERLEVEL9K_${${(U)match[1]}//ฤฐ/I}$match[2]_$2
if (( $+parameters[$var] )); then
_p9k__ret=${(P)var}
else
var=_POWERLEVEL9K_${${(U)match[1]%_}//ฤฐ/I}_$2
if (( $+parameters[$var] )); then
_p9k__ret=${(P)var}
else
var=_POWERLEVEL9K_$2
if (( $+parameters[$var] )); then
_p9k__ret=${(P)var}
else
_p9k__ret=$3
fi
fi
fi
else
local var=_POWERLEVEL9K_$2
if (( $+parameters[$var] )); then
_p9k__ret=${(P)var}
else
_p9k__ret=$3
fi
fi
_p9k_cache[$key]=${_p9k__ret}.
fi
}
# _p9k_get_icon prompt_foo_BAR BAZ_ICON quix
_p9k_get_icon() {
local key="_p9k_get_icon ${(pj:\0:)*}"
_p9k__ret=$_p9k_cache[$key]
if [[ -n $_p9k__ret ]]; then
_p9k__ret[-1,-1]=''
else
if [[ $2 == $'\1'* ]]; then
_p9k__ret=${2[2,-1]}
else
_p9k_param "$1" "$2" ${icons[$2]-$'\1'$3}
if [[ $_p9k__ret == $'\1'* ]]; then
_p9k__ret=${_p9k__ret[2,-1]}
else
_p9k__ret=${(g::)_p9k__ret}
[[ $_p9k__ret != $'\b'? ]] || _p9k__ret="%{$_p9k__ret%}" # penance for past sins
fi
fi
_p9k_cache[$key]=${_p9k__ret}.
fi
}
_p9k_translate_color() {
if [[ $1 == <-> ]]; then # decimal color code: 255
_p9k__ret=${(l.3..0.)1}
elif [[ $1 == '#'[[:xdigit:]]## ]]; then # hexademical color code: #ffffff
_p9k__ret=${${(L)1}//ฤฑ/i}
else # named color: red
# Strip prifixes if there are any.
_p9k__ret=$__p9k_colors[${${${1#bg-}#fg-}#br}]
fi
}
# _p9k_color prompt_foo_BAR BACKGROUND red
_p9k_color() {
local key="_p9k_color ${(pj:\0:)*}"
_p9k__ret=$_p9k_cache[$key]
if [[ -n $_p9k__ret ]]; then
_p9k__ret[-1,-1]=''
else
_p9k_param "$@"
_p9k_translate_color $_p9k__ret
_p9k_cache[$key]=${_p9k__ret}.
fi
}
# _p9k_vcs_style CLEAN REMOTE_BRANCH
_p9k_vcs_style() {
local key="$0 ${(pj:\0:)*}"
_p9k__ret=$_p9k_cache[$key]
if [[ -n $_p9k__ret ]]; then
_p9k__ret[-1,-1]=''
else
local style=%b # TODO: support bold
_p9k_color prompt_vcs_$1 BACKGROUND "${__p9k_vcs_states[$1]}"
_p9k_background $_p9k__ret
style+=$_p9k__ret
local var=_POWERLEVEL9K_VCS_${1}_${2}FORMAT_FOREGROUND
if (( $+parameters[$var] )); then
_p9k_translate_color "${(P)var}"
else
var=_POWERLEVEL9K_VCS_${2}FORMAT_FOREGROUND
if (( $+parameters[$var] )); then
_p9k_translate_color "${(P)var}"
else
_p9k_color prompt_vcs_$1 FOREGROUND "$_p9k_color1"
fi
fi
_p9k_foreground $_p9k__ret
_p9k__ret=$style$_p9k__ret
_p9k_cache[$key]=${_p9k__ret}.
fi
}
_p9k_background() {
[[ -n $1 ]] && _p9k__ret="%K{$1}" || _p9k__ret="%k"
}
_p9k_foreground() {
# Note: This code used to produce `%1F` instead of `%F{1}` because it's more efficient.
# Unfortunately, this triggers a bug in zsh. Namely, `%1F{2}` gets percent-expanded as if
# it was `%F{2}`.
[[ -n $1 ]] && _p9k__ret="%F{$1}" || _p9k__ret="%f"
}
_p9k_escape_style() {
[[ $1 == *'}'* ]] && _p9k__ret='${:-"'$1'"}' || _p9k__ret=$1
}
_p9k_escape() {
[[ $1 == *["~!#\`\$^&*()\\\"'<>?{}[]"]* ]] && _p9k__ret="\${(Q)\${:-${(qqq)${(q)1}}}}" || _p9k__ret=$1
}
# * $1: Name of the function that was originally invoked.
# Necessary, to make the dynamic color-overwrite mechanism work.
# * $2: Background color.
# * $3: Foreground color.
# * $4: An identifying icon.
# * $5: 1 to to perform parameter expansion and process substitution.
# * $6: If not empty but becomes empty after parameter expansion and process substitution,
# the segment isn't rendered.
# * $7: Content.
_p9k_left_prompt_segment() {
if ! _p9k_cache_get "$0" "$1" "$2" "$3" "$4" "$_p9k__segment_index"; then
_p9k_color $1 BACKGROUND $2
local bg_color=$_p9k__ret
_p9k_background $bg_color
local bg=$_p9k__ret
_p9k_color $1 FOREGROUND $3
local fg_color=$_p9k__ret
_p9k_foreground $fg_color
local fg=$_p9k__ret
local style=%b$bg$fg
local style_=${style//\}/\\\}}
_p9k_get_icon $1 LEFT_SEGMENT_SEPARATOR
local sep=$_p9k__ret
_p9k_escape $_p9k__ret
local sep_=$_p9k__ret
_p9k_get_icon $1 LEFT_SUBSEGMENT_SEPARATOR
_p9k_escape $_p9k__ret
local subsep_=$_p9k__ret
local icon_
if [[ -n $4 ]]; then
_p9k_get_icon $1 $4
_p9k_escape $_p9k__ret
icon_=$_p9k__ret
fi
_p9k_get_icon $1 LEFT_PROMPT_FIRST_SEGMENT_START_SYMBOL
local start_sep=$_p9k__ret
[[ -n $start_sep ]] && start_sep="%b%k%F{$bg_color}$start_sep"
_p9k_get_icon $1 LEFT_PROMPT_LAST_SEGMENT_END_SYMBOL $sep
_p9k_escape $_p9k__ret
local end_sep_=$_p9k__ret
_p9k_get_icon $1 WHITESPACE_BETWEEN_LEFT_SEGMENTS ' '
local space=$_p9k__ret
_p9k_get_icon $1 LEFT_LEFT_WHITESPACE $space
local left_space=$_p9k__ret
[[ $left_space == *%* ]] && left_space+=$style
_p9k_get_icon $1 LEFT_RIGHT_WHITESPACE $space
_p9k_escape $_p9k__ret
local right_space_=$_p9k__ret
[[ $right_space_ == *%* ]] && right_space_+=$style_
local s='<_p9k__s>' ss='<_p9k__ss>'
local -i non_hermetic=0
# Segment separator logic:
#
# if [[ $_p9k__bg == NONE ]]; then
# 1
# elif (( joined )); then
# 2
# elif [[ $bg_color == (${_p9k__bg}|${_p9k__bg:-0}) ]]; then
# 3
# else
# 4
# fi
local t=$(($#_p9k_t - __p9k_ksh_arrays))
_p9k_t+=$start_sep$style$left_space # 1
_p9k_t+=$style # 2
if [[ -n $fg_color && $fg_color == $bg_color ]]; then
if [[ $fg_color == $_p9k_color1 ]]; then
_p9k_foreground $_p9k_color2
else
_p9k_foreground $_p9k_color1
fi
_p9k_t+=%b$bg$_p9k__ret$ss$style$left_space # 3
else
_p9k_t+=%b$bg$ss$style$left_space # 3
fi
_p9k_t+=%b$bg$s$style$left_space # 4
local join="_p9k__i>=$_p9k_left_join[$_p9k__segment_index]"
_p9k_param $1 SELF_JOINED false
if [[ $_p9k__ret == false ]]; then
if (( _p9k__segment_index > $_p9k_left_join[$_p9k__segment_index] )); then
join+="&&_p9k__i<$_p9k__segment_index"
else
join=
fi
fi
local p=
p+="\${_p9k__n::=}"
p+="\${\${\${_p9k__bg:-0}:#NONE}:-\${_p9k__n::=$((t+1))}}" # 1
if [[ -n $join ]]; then
p+="\${_p9k__n:=\${\${\$(($join)):#0}:+$((t+2))}}" # 2
fi
if (( __p9k_sh_glob )); then
p+="\${_p9k__n:=\${\${(M)\${:-x$bg_color}:#x\$_p9k__bg}:+$((t+3))}}" # 3
p+="\${_p9k__n:=\${\${(M)\${:-x$bg_color}:#x\$${_p9k__bg:-0}}:+$((t+3))}}" # 3
else
p+="\${_p9k__n:=\${\${(M)\${:-x$bg_color}:#x(\$_p9k__bg|\${_p9k__bg:-0})}:+$((t+3))}}" # 3
fi
p+="\${_p9k__n:=$((t+4))}" # 4
_p9k_param $1 VISUAL_IDENTIFIER_EXPANSION '${P9K_VISUAL_IDENTIFIER}'
[[ $_p9k__ret == (|*[^\\])'$('* ]] && non_hermetic=1
local icon_exp_=${_p9k__ret:+\"$_p9k__ret\"}
_p9k_param $1 CONTENT_EXPANSION '${P9K_CONTENT}'
[[ $_p9k__ret == (|*[^\\])'$('* ]] && non_hermetic=1
local content_exp_=${_p9k__ret:+\"$_p9k__ret\"}
if [[ ( $icon_exp_ != '"${P9K_VISUAL_IDENTIFIER}"' && $icon_exp_ == *'$'* ) ||
( $content_exp_ != '"${P9K_CONTENT}"' && $content_exp_ == *'$'* ) ]]; then
p+="\${P9K_VISUAL_IDENTIFIER::=$icon_}"
fi
local -i has_icon=-1 # maybe
if [[ $icon_exp_ != '"${P9K_VISUAL_IDENTIFIER}"' && $icon_exp_ == *'$'* ]]; then
p+='${_p9k__v::='$icon_exp_$style_'}'
else
[[ $icon_exp_ == '"${P9K_VISUAL_IDENTIFIER}"' ]] && _p9k__ret=$icon_ || _p9k__ret=$icon_exp_
if [[ -n $_p9k__ret ]]; then
p+="\${_p9k__v::=$_p9k__ret"
[[ $_p9k__ret == *%* ]] && p+=$style_
p+="}"
has_icon=1 # definitely yes
else
has_icon=0 # definitely no
fi
fi
p+="\${_p9k__c::=$content_exp_}"
p+='${_p9k__e::=${${_p9k__'${_p9k__line_index}l${${1#prompt_}%%[A-Z_]#}'+00}:-'
if (( has_icon == -1 )); then
p+='${${(%):-$_p9k__c%1(l.1.0)}[-1]}${${(%):-$_p9k__v%1(l.1.0)}[-1]}}'
else
p+='${${(%):-$_p9k__c%1(l.1.0)}[-1]}'$has_icon'}'
fi
p+='}}+}'
p+='${${_p9k__e:#00}:+${${_p9k_t[$_p9k__n]/'$ss'/$_p9k__ss}/'$s'/$_p9k__s}'
_p9k_param $1 ICON_BEFORE_CONTENT ''
if [[ $_p9k__ret != false ]]; then
_p9k_param $1 PREFIX ''
_p9k__ret=${(g::)_p9k__ret}
_p9k_escape $_p9k__ret
p+=$_p9k__ret
[[ $_p9k__ret == *%* ]] && local -i need_style=1 || local -i need_style=0
if (( has_icon != 0 )); then
_p9k_color $1 VISUAL_IDENTIFIER_COLOR $fg_color
_p9k_foreground $_p9k__ret
_p9k__ret=%b$bg$_p9k__ret
_p9k__ret=${_p9k__ret//\}/\\\}}
[[ $_p9k__ret != $style_ || $need_style == 1 ]] && p+=$_p9k__ret
p+='${_p9k__v}'
_p9k_get_icon $1 LEFT_MIDDLE_WHITESPACE ' '
if [[ -n $_p9k__ret ]]; then
_p9k_escape $_p9k__ret
[[ _p9k__ret == *%* ]] && _p9k__ret+=$style_
p+='${${(M)_p9k__e:#11}:+'$_p9k__ret'}'
fi
elif (( need_style )); then
p+=$style_
fi
p+='${_p9k__c}'$style_
else
_p9k_param $1 PREFIX ''
_p9k__ret=${(g::)_p9k__ret}
_p9k_escape $_p9k__ret
p+=$_p9k__ret
[[ $_p9k__ret == *%* ]] && p+=$style_
p+='${_p9k__c}'$style_
if (( has_icon != 0 )); then
local -i need_style=0
_p9k_get_icon $1 LEFT_MIDDLE_WHITESPACE ' '
if [[ -n $_p9k__ret ]]; then
_p9k_escape $_p9k__ret
[[ $_p9k__ret == *%* ]] && need_style=1
p+='${${(M)_p9k__e:#11}:+'$_p9k__ret'}'
fi
_p9k_color $1 VISUAL_IDENTIFIER_COLOR $fg_color
_p9k_foreground $_p9k__ret
_p9k__ret=%b$bg$_p9k__ret
_p9k__ret=${_p9k__ret//\}/\\\}}
[[ $_p9k__ret != $style_ || $need_style == 1 ]] && p+=$_p9k__ret
p+='$_p9k__v'
fi
fi
_p9k_param $1 SUFFIX ''
_p9k__ret=${(g::)_p9k__ret}
_p9k_escape $_p9k__ret
p+=$_p9k__ret
[[ $_p9k__ret == *%* && -n $right_space_ ]] && p+=$style_
p+=$right_space_
p+='${${:-'
p+="\${_p9k__s::=%F{$bg_color\}$sep_}\${_p9k__ss::=$subsep_}\${_p9k__sss::=%F{$bg_color\}$end_sep_}"
p+="\${_p9k__i::=$_p9k__segment_index}\${_p9k__bg::=$bg_color}"
p+='}+}'
p+='}'
_p9k_param $1 SHOW_ON_UPGLOB ''
_p9k_cache_set "$p" $non_hermetic $_p9k__ret
fi
if [[ -n $_p9k__cache_val[3] ]]; then
_p9k__has_upglob=1
_p9k_upglob $_p9k__cache_val[3] && return
fi
_p9k__non_hermetic_expansion=$_p9k__cache_val[2]
(( $5 )) && _p9k__ret=\"$7\" || _p9k_escape $7
if [[ -z $6 ]]; then
_p9k__prompt+="\${\${:-\${P9K_CONTENT::=$_p9k__ret}$_p9k__cache_val[1]"
else
_p9k__prompt+="\${\${:-\"$6\"}:+\${\${:-\${P9K_CONTENT::=$_p9k__ret}$_p9k__cache_val[1]}"
fi
}
# The same as _p9k_left_prompt_segment above but for the right prompt.
_p9k_right_prompt_segment() {
if ! _p9k_cache_get "$0" "$1" "$2" "$3" "$4" "$_p9k__segment_index"; then
_p9k_color $1 BACKGROUND $2
local bg_color=$_p9k__ret
_p9k_background $bg_color
local bg=$_p9k__ret
local bg_=${_p9k__ret//\}/\\\}}
_p9k_color $1 FOREGROUND $3
local fg_color=$_p9k__ret
_p9k_foreground $fg_color
local fg=$_p9k__ret
local style=%b$bg$fg
local style_=${style//\}/\\\}}
_p9k_get_icon $1 RIGHT_SEGMENT_SEPARATOR
local sep=$_p9k__ret
_p9k_escape $_p9k__ret
local sep_=$_p9k__ret
_p9k_get_icon $1 RIGHT_SUBSEGMENT_SEPARATOR
local subsep=$_p9k__ret
[[ $subsep == *%* ]] && subsep+=$style
local icon_
if [[ -n $4 ]]; then
_p9k_get_icon $1 $4
_p9k_escape $_p9k__ret
icon_=$_p9k__ret
fi
_p9k_get_icon $1 RIGHT_PROMPT_FIRST_SEGMENT_START_SYMBOL $sep
local start_sep=$_p9k__ret
[[ -n $start_sep ]] && start_sep="%b%k%F{$bg_color}$start_sep"
_p9k_get_icon $1 RIGHT_PROMPT_LAST_SEGMENT_END_SYMBOL
_p9k_escape $_p9k__ret
local end_sep_=$_p9k__ret
_p9k_get_icon $1 WHITESPACE_BETWEEN_RIGHT_SEGMENTS ' '
local space=$_p9k__ret
_p9k_get_icon $1 RIGHT_LEFT_WHITESPACE $space
local left_space=$_p9k__ret
[[ $left_space == *%* ]] && left_space+=$style
_p9k_get_icon $1 RIGHT_RIGHT_WHITESPACE $space
_p9k_escape $_p9k__ret
local right_space_=$_p9k__ret
[[ $right_space_ == *%* ]] && right_space_+=$style_
local w='<_p9k__w>' s='<_p9k__s>'
local -i non_hermetic=0
# Segment separator logic:
#
# if [[ $_p9k__bg == NONE ]]; then
# 1
# elif (( joined )); then
# 2
# elif [[ $_p9k__bg == (${bg_color}|${bg_color:-0}) ]]; then
# 3
# else
# 4
# fi
local t=$(($#_p9k_t - __p9k_ksh_arrays))
_p9k_t+=$start_sep$style$left_space # 1
_p9k_t+=$w$style # 2
_p9k_t+=$w$style$subsep$left_space # 3
_p9k_t+=$w%F{$bg_color}$sep$style$left_space # 4
local join="_p9k__i>=$_p9k_right_join[$_p9k__segment_index]"
_p9k_param $1 SELF_JOINED false
if [[ $_p9k__ret == false ]]; then
if (( _p9k__segment_index > $_p9k_right_join[$_p9k__segment_index] )); then
join+="&&_p9k__i<$_p9k__segment_index"
else
join=
fi
fi
local p=
p+="\${_p9k__n::=}"
p+="\${\${\${_p9k__bg:-0}:#NONE}:-\${_p9k__n::=$((t+1))}}" # 1
if [[ -n $join ]]; then
p+="\${_p9k__n:=\${\${\$(($join)):#0}:+$((t+2))}}" # 2
fi
if (( __p9k_sh_glob )); then
p+="\${_p9k__n:=\${\${(M)\${:-x\$_p9k__bg}:#x${(b)bg_color}}:+$((t+3))}}" # 3
p+="\${_p9k__n:=\${\${(M)\${:-x\$_p9k__bg}:#x${(b)bg_color:-0}}:+$((t+3))}}" # 3
else
p+="\${_p9k__n:=\${\${(M)\${:-x\$_p9k__bg}:#x(${(b)bg_color}|${(b)bg_color:-0})}:+$((t+3))}}" # 3
fi
p+="\${_p9k__n:=$((t+4))}" # 4
_p9k_param $1 VISUAL_IDENTIFIER_EXPANSION '${P9K_VISUAL_IDENTIFIER}'
[[ $_p9k__ret == (|*[^\\])'$('* ]] && non_hermetic=1
local icon_exp_=${_p9k__ret:+\"$_p9k__ret\"}
_p9k_param $1 CONTENT_EXPANSION '${P9K_CONTENT}'
[[ $_p9k__ret == (|*[^\\])'$('* ]] && non_hermetic=1
local content_exp_=${_p9k__ret:+\"$_p9k__ret\"}
if [[ ( $icon_exp_ != '"${P9K_VISUAL_IDENTIFIER}"' && $icon_exp_ == *'$'* ) ||
( $content_exp_ != '"${P9K_CONTENT}"' && $content_exp_ == *'$'* ) ]]; then
p+="\${P9K_VISUAL_IDENTIFIER::=$icon_}"
fi
local -i has_icon=-1 # maybe
if [[ $icon_exp_ != '"${P9K_VISUAL_IDENTIFIER}"' && $icon_exp_ == *'$'* ]]; then
p+="\${_p9k__v::=$icon_exp_$style_}"
else
[[ $icon_exp_ == '"${P9K_VISUAL_IDENTIFIER}"' ]] && _p9k__ret=$icon_ || _p9k__ret=$icon_exp_
if [[ -n $_p9k__ret ]]; then
p+="\${_p9k__v::=$_p9k__ret"
[[ $_p9k__ret == *%* ]] && p+=$style_
p+="}"
has_icon=1 # definitely yes
else
has_icon=0 # definitely no
fi
fi
p+="\${_p9k__c::=$content_exp_}"
p+='${_p9k__e::=${${_p9k__'${_p9k__line_index}r${${1#prompt_}%%[A-Z_]#}'+00}:-'
if (( has_icon == -1 )); then
p+='${${(%):-$_p9k__c%1(l.1.0)}[-1]}${${(%):-$_p9k__v%1(l.1.0)}[-1]}}'
else
p+='${${(%):-$_p9k__c%1(l.1.0)}[-1]}'$has_icon'}'
fi
p+='}}+}'
p+='${${_p9k__e:#00}:+${_p9k_t[$_p9k__n]/'$w'/$_p9k__w}'
_p9k_param $1 ICON_BEFORE_CONTENT ''
if [[ $_p9k__ret != true ]]; then
_p9k_param $1 PREFIX ''
_p9k__ret=${(g::)_p9k__ret}
_p9k_escape $_p9k__ret
p+=$_p9k__ret
[[ $_p9k__ret == *%* ]] && p+=$style_
p+='${_p9k__c}'$style_
if (( has_icon != 0 )); then
local -i need_style=0
_p9k_get_icon $1 RIGHT_MIDDLE_WHITESPACE ' '
if [[ -n $_p9k__ret ]]; then
_p9k_escape $_p9k__ret
[[ $_p9k__ret == *%* ]] && need_style=1
p+='${${(M)_p9k__e:#11}:+'$_p9k__ret'}'
fi
_p9k_color $1 VISUAL_IDENTIFIER_COLOR $fg_color
_p9k_foreground $_p9k__ret
_p9k__ret=%b$bg$_p9k__ret
_p9k__ret=${_p9k__ret//\}/\\\}}
[[ $_p9k__ret != $style_ || $need_style == 1 ]] && p+=$_p9k__ret
p+='$_p9k__v'
fi
else
_p9k_param $1 PREFIX ''
_p9k__ret=${(g::)_p9k__ret}
_p9k_escape $_p9k__ret
p+=$_p9k__ret
[[ $_p9k__ret == *%* ]] && local -i need_style=1 || local -i need_style=0
if (( has_icon != 0 )); then
_p9k_color $1 VISUAL_IDENTIFIER_COLOR $fg_color
_p9k_foreground $_p9k__ret
_p9k__ret=%b$bg$_p9k__ret
_p9k__ret=${_p9k__ret//\}/\\\}}
[[ $_p9k__ret != $style_ || $need_style == 1 ]] && p+=$_p9k__ret
p+='${_p9k__v}'
_p9k_get_icon $1 RIGHT_MIDDLE_WHITESPACE ' '
if [[ -n $_p9k__ret ]]; then
_p9k_escape $_p9k__ret
[[ _p9k__ret == *%* ]] && _p9k__ret+=$style_
p+='${${(M)_p9k__e:#11}:+'$_p9k__ret'}'
fi
elif (( need_style )); then
p+=$style_
fi
p+='${_p9k__c}'$style_
fi
_p9k_param $1 SUFFIX ''
_p9k__ret=${(g::)_p9k__ret}
_p9k_escape $_p9k__ret
p+=$_p9k__ret
p+='${${:-'
if [[ -n $fg_color && $fg_color == $bg_color ]]; then
if [[ $fg_color == $_p9k_color1 ]]; then
_p9k_foreground $_p9k_color2
else
_p9k_foreground $_p9k_color1
fi
else
_p9k__ret=$fg
fi
_p9k__ret=${_p9k__ret//\}/\\\}}
p+="\${_p9k__w::=${right_space_:+$style_}$right_space_%b$bg_$_p9k__ret}"
p+='${_p9k__sss::='
p+=$style_$right_space_
[[ $right_space_ == *%* ]] && p+=$style_
if [[ -n $end_sep_ ]]; then
p+="%k%F{$bg_color\}$end_sep_$style_"
fi
p+='}'
p+="\${_p9k__i::=$_p9k__segment_index}\${_p9k__bg::=$bg_color}"
p+='}+}'
p+='}'
_p9k_param $1 SHOW_ON_UPGLOB ''
_p9k_cache_set "$p" $non_hermetic $_p9k__ret
fi
if [[ -n $_p9k__cache_val[3] ]]; then
_p9k__has_upglob=1
_p9k_upglob $_p9k__cache_val[3] && return
fi
_p9k__non_hermetic_expansion=$_p9k__cache_val[2]
(( $5 )) && _p9k__ret=\"$7\" || _p9k_escape $7
if [[ -z $6 ]]; then
_p9k__prompt+="\${\${:-\${P9K_CONTENT::=$_p9k__ret}$_p9k__cache_val[1]"
else
_p9k__prompt+="\${\${:-\"$6\"}:+\${\${:-\${P9K_CONTENT::=$_p9k__ret}$_p9k__cache_val[1]}"
fi
}
function _p9k_prompt_segment() { "_p9k_${_p9k__prompt_side}_prompt_segment" "$@" }
function p9k_prompt_segment() { p10k segment "$@" }
function _p9k_python_version() {
_p9k_cached_cmd 1 python --version || return
[[ $_p9k__ret == (#b)Python\ ([[:digit:].]##)* ]] && _p9k__ret=$match[1]
}
################################################################
# Prompt Segment Definitions
################################################################
################################################################
# Anaconda Environment
prompt_anaconda() {
local msg
if _p9k_python_version; then
P9K_ANACONDA_PYTHON_VERSION=$_p9k__ret
if (( _POWERLEVEL9K_ANACONDA_SHOW_PYTHON_VERSION )); then
msg="${P9K_ANACONDA_PYTHON_VERSION//\%/%%} "
fi
else
unset P9K_ANACONDA_PYTHON_VERSION
fi
local p=${CONDA_PREFIX:-$CONDA_ENV_PATH}
msg+="$_POWERLEVEL9K_ANACONDA_LEFT_DELIMITER${${p:t}//\%/%%}$_POWERLEVEL9K_ANACONDA_RIGHT_DELIMITER"
_p9k_prompt_segment "$0" "blue" "$_p9k_color1" 'PYTHON_ICON' 0 '' "$msg"
}
_p9k_prompt_anaconda_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='${CONDA_PREFIX:-$CONDA_ENV_PATH}'
}
################################################################
# AWS Profile
prompt_aws() {
local aws_profile="${AWS_VAULT:-${AWSUME_PROFILE:-${AWS_PROFILE:-$AWS_DEFAULT_PROFILE}}}"
local pat class
for pat class in "${_POWERLEVEL9K_AWS_CLASSES[@]}"; do
if [[ $aws_profile == ${~pat} ]]; then
[[ -n $class ]] && state=_${${(U)class}//ฤฐ/I}
break
fi
done
_p9k_prompt_segment "$0$state" red white 'AWS_ICON' 0 '' "${aws_profile//\%/%%}"
}
_p9k_prompt_aws_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='${AWS_VAULT:-${AWSUME_PROFILE:-${AWS_PROFILE:-$AWS_DEFAULT_PROFILE}}}'
}
################################################################
# Current Elastic Beanstalk environment
prompt_aws_eb_env() {
_p9k_upglob .elasticbeanstalk && return
local dir=$_p9k__parent_dirs[$?]
if ! _p9k_cache_stat_get $0 $dir/.elasticbeanstalk/config.yml; then
local env
env="$(command eb list 2>/dev/null)" || env=
env="${${(@M)${(@f)env}:#\* *}#\* }"
_p9k_cache_stat_set "$env"
fi
[[ -n $_p9k__cache_val[1] ]] || return
_p9k_prompt_segment "$0" black green 'AWS_EB_ICON' 0 '' "${_p9k__cache_val[1]//\%/%%}"
}
_p9k_prompt_aws_eb_env_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='$commands[eb]'
}
################################################################
# Segment to indicate background jobs with an icon.
prompt_background_jobs() {
local -i len=$#_p9k__prompt _p9k__has_upglob
local msg
if (( _POWERLEVEL9K_BACKGROUND_JOBS_VERBOSE )); then
if (( _POWERLEVEL9K_BACKGROUND_JOBS_VERBOSE_ALWAYS )); then
msg='${(%):-%j}'
else
msg='${${(%):-%j}:#1}'
fi
fi
_p9k_prompt_segment $0 "$_p9k_color1" cyan BACKGROUND_JOBS_ICON 1 '${${(%):-%j}:#0}' "$msg"
(( _p9k__has_upglob )) || typeset -g "_p9k__segment_val_${_p9k__prompt_side}[_p9k__segment_index]"=$_p9k__prompt[len+1,-1]
}
################################################################
# Segment that indicates usage level of current partition.
prompt_disk_usage() {
local -i len=$#_p9k__prompt _p9k__has_upglob
_p9k_prompt_segment $0_CRITICAL red white DISK_ICON 1 '$_p9k__disk_usage_critical' '$_p9k__disk_usage_pct%%'
_p9k_prompt_segment $0_WARNING yellow $_p9k_color1 DISK_ICON 1 '$_p9k__disk_usage_warning' '$_p9k__disk_usage_pct%%'
if (( ! _POWERLEVEL9K_DISK_USAGE_ONLY_WARNING )); then
_p9k_prompt_segment $0_NORMAL $_p9k_color1 yellow DISK_ICON 1 '$_p9k__disk_usage_normal' '$_p9k__disk_usage_pct%%'
fi
(( _p9k__has_upglob )) || typeset -g "_p9k__segment_val_${_p9k__prompt_side}[_p9k__segment_index]"=$_p9k__prompt[len+1,-1]
}
_p9k_prompt_disk_usage_init() {
typeset -g _p9k__disk_usage_pct=
typeset -g _p9k__disk_usage_normal=
typeset -g _p9k__disk_usage_warning=
typeset -g _p9k__disk_usage_critical=
_p9k__async_segments_compute+='_p9k_worker_invoke disk_usage "_p9k_prompt_disk_usage_compute ${(q)_p9k__cwd_a}"'
}
_p9k_prompt_disk_usage_compute() {
(( $+commands[df] )) || return
_p9k_worker_async "_p9k_prompt_disk_usage_async ${(q)1}" _p9k_prompt_disk_usage_sync
}
_p9k_prompt_disk_usage_async() {
local pct=${${=${(f)"$(df -P $1 2>/dev/null)"}[2]}[5]%%%}
[[ $pct == <0-100> && $pct != $_p9k__disk_usage_pct ]] || return
_p9k__disk_usage_pct=$pct
_p9k__disk_usage_normal=
_p9k__disk_usage_warning=
_p9k__disk_usage_critical=
if (( _p9k__disk_usage_pct >= _POWERLEVEL9K_DISK_USAGE_CRITICAL_LEVEL )); then
_p9k__disk_usage_critical=1
elif (( _p9k__disk_usage_pct >= _POWERLEVEL9K_DISK_USAGE_WARNING_LEVEL )); then
_p9k__disk_usage_warning=1
elif (( ! _POWERLEVEL9K_DISK_USAGE_ONLY_WARNING )); then
_p9k__disk_usage_normal=1
fi
_p9k_print_params \
_p9k__disk_usage_pct \
_p9k__disk_usage_normal \
_p9k__disk_usage_warning \
_p9k__disk_usage_critical
echo -E - 'reset=1'
}
_p9k_prompt_disk_usage_sync() {
eval $REPLY
_p9k_worker_reply $REPLY
}
function _p9k_read_file() {
_p9k__ret=''
[[ -n $1 ]] && IFS='' read -r _p9k__ret <$1
[[ -n $_p9k__ret ]]
}
function _p9k_fvm_old() {
_p9k_upglob fvm && return 1
local fvm=$_p9k__parent_dirs[$?]/fvm
if [[ -L $fvm ]]; then
if [[ ${fvm:A} == (#b)*/versions/([^/]##)/bin/flutter ]]; then
_p9k_prompt_segment prompt_fvm blue $_p9k_color1 FLUTTER_ICON 0 '' ${match[1]//\%/%%}
return 0
fi
fi
return 1
}
function _p9k_fvm_new() {
_p9k_upglob .fvm && return 1
local sdk=$_p9k__parent_dirs[$?]/.fvm/flutter_sdk
if [[ -L $sdk ]]; then
if [[ ${sdk:A} == (#b)*/versions/([^/]##) ]]; then
_p9k_prompt_segment prompt_fvm blue $_p9k_color1 FLUTTER_ICON 0 '' ${match[1]//\%/%%}
return 0
fi
fi
return 1
}
prompt_fvm() {
_p9k_fvm_new || _p9k_fvm_old
}
_p9k_prompt_fvm_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='$commands[fvm]'
}
################################################################
# Segment that displays the battery status in levels and colors
prompt_battery() {
[[ $_p9k_os == (Linux|Android) ]] && _p9k_prompt_battery_set_args
(( $#_p9k__battery_args )) && _p9k_prompt_segment "${_p9k__battery_args[@]}"
}
_p9k_prompt_battery_init() {
typeset -ga _p9k__battery_args=()
if [[ $_p9k_os == OSX && $+commands[pmset] == 1 ]]; then
_p9k__async_segments_compute+='_p9k_worker_invoke battery _p9k_prompt_battery_compute'
return
fi
if [[ $_p9k_os != (Linux|Android) ||
-z /sys/class/power_supply/(CMB*|BAT*|battery)/(energy_full|charge_full|charge_counter)(#qN) ]]; then
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='${:-}'
fi
}
_p9k_prompt_battery_compute() {
_p9k_worker_async _p9k_prompt_battery_async _p9k_prompt_battery_sync
}
_p9k_prompt_battery_async() {
local prev="${(pj:\0:)_p9k__battery_args}"
_p9k_prompt_battery_set_args
[[ "${(pj:\0:)_p9k__battery_args}" == $prev ]] && return 1
_p9k_print_params _p9k__battery_args
echo -E - 'reset=2'
}
_p9k_prompt_battery_sync() {
eval $REPLY
_p9k_worker_reply $REPLY
}
_p9k_prompt_battery_set_args() {
_p9k__battery_args=()
local state remain
local -i bat_percent
case $_p9k_os in
OSX)
(( $+commands[pmset] )) || return
local raw_data=${${(Af)"$(pmset -g batt 2>/dev/null)"}[2]}
[[ $raw_data == *InternalBattery* ]] || return
remain=${${(s: :)${${(s:; :)raw_data}[3]}}[1]}
[[ $remain == *no* ]] && remain="..."
[[ $raw_data =~ '([0-9]+)%' ]] && bat_percent=$match[1]
case "${${(s:; :)raw_data}[2]}" in
'charging'|'finishing charge'|'AC attached')
if (( bat_percent == 100 )); then
state=CHARGED
remain=''
else
state=CHARGING
fi
;;
'discharging')
(( bat_percent < _POWERLEVEL9K_BATTERY_LOW_THRESHOLD )) && state=LOW || state=DISCONNECTED
;;
*)
state=CHARGED
remain=''
;;
esac
;;
Linux|Android)
# See https://sourceforge.net/projects/acpiclient.
local -a bats=( /sys/class/power_supply/(CMB*|BAT*|battery)/(FN) )
(( $#bats )) || return
local -i energy_now energy_full power_now
local -i is_full=1 is_calculating is_charching
local dir
for dir in $bats; do
local -i pow=0 full=0
if _p9k_read_file $dir/(energy_full|charge_full|charge_counter)(N); then
(( energy_full += ${full::=_p9k__ret} ))
fi
if _p9k_read_file $dir/(power|current)_now(N) && (( $#_p9k__ret < 9 )); then
(( power_now += ${pow::=$_p9k__ret} ))
fi
if _p9k_read_file $dir/(energy|charge)_now(N); then
(( energy_now += _p9k__ret ))
elif _p9k_read_file $dir/capacity(N); then
(( energy_now += _p9k__ret * full / 100. + 0.5 ))
fi
_p9k_read_file $dir/status(N) && local bat_status=$_p9k__ret || continue
[[ $bat_status != Full ]] && is_full=0
[[ $bat_status == Charging ]] && is_charching=1
[[ $bat_status == (Charging|Discharging) && $pow == 0 ]] && is_calculating=1
done
(( energy_full )) || return
bat_percent=$(( 100. * energy_now / energy_full + 0.5 ))
(( bat_percent > 100 )) && bat_percent=100
if (( is_full || (bat_percent == 100 && is_charching) )); then
state=CHARGED
else
if (( is_charching )); then
state=CHARGING
elif (( bat_percent < _POWERLEVEL9K_BATTERY_LOW_THRESHOLD )); then
state=LOW
else
state=DISCONNECTED
fi
if (( power_now > 0 )); then
(( is_charching )) && local -i e=$((energy_full - energy_now)) || local -i e=energy_now
local -i minutes=$(( 60 * e / power_now ))
(( minutes > 0 )) && remain=$((minutes/60)):${(l#2##0#)$((minutes%60))}
elif (( is_calculating )); then
remain="..."
fi
fi
;;
*)
return 0
;;
esac
(( bat_percent >= _POWERLEVEL9K_BATTERY_${state}_HIDE_ABOVE_THRESHOLD )) && return
local msg="$bat_percent%%"
[[ $_POWERLEVEL9K_BATTERY_VERBOSE == 1 && -n $remain ]] && msg+=" ($remain)"
local icon=BATTERY_ICON
local var=_POWERLEVEL9K_BATTERY_${state}_STAGES
local -i idx="${#${(@P)var}}"
if (( idx )); then
(( bat_percent < 100 )) && idx=$((bat_percent * idx / 100 + 1))
icon=$'\1'"${${(@P)var}[idx]}"
fi
local bg=$_p9k_color1
local var=_POWERLEVEL9K_BATTERY_${state}_LEVEL_BACKGROUND
local -i idx="${#${(@P)var}}"
if (( idx )); then
(( bat_percent < 100 )) && idx=$((bat_percent * idx / 100 + 1))
bg="${${(@P)var}[idx]}"
fi
local fg=$_p9k_battery_states[$state]
local var=_POWERLEVEL9K_BATTERY_${state}_LEVEL_FOREGROUND
local -i idx="${#${(@P)var}}"
if (( idx )); then
(( bat_percent < 100 )) && idx=$((bat_percent * idx / 100 + 1))
fg="${${(@P)var}[idx]}"
fi
_p9k__battery_args=(prompt_battery_$state "$bg" "$fg" $icon 0 '' $msg)
}
################################################################
# Public IP segment
prompt_public_ip() {
local -i len=$#_p9k__prompt _p9k__has_upglob
local ip='${_p9k__public_ip:-$_POWERLEVEL9K_PUBLIC_IP_NONE}'
if [[ -n $_POWERLEVEL9K_PUBLIC_IP_VPN_INTERFACE ]]; then
_p9k_prompt_segment "$0" "$_p9k_color1" "$_p9k_color2" PUBLIC_IP_ICON 1 '${_p9k__public_ip_not_vpn:+'$ip'}' $ip
_p9k_prompt_segment "$0" "$_p9k_color1" "$_p9k_color2" VPN_ICON 1 '${_p9k__public_ip_vpn:+'$ip'}' $ip
else
_p9k_prompt_segment "$0" "$_p9k_color1" "$_p9k_color2" PUBLIC_IP_ICON 1 $ip $ip
fi
(( _p9k__has_upglob )) || typeset -g "_p9k__segment_val_${_p9k__prompt_side}[_p9k__segment_index]"=$_p9k__prompt[len+1,-1]
}
_p9k_prompt_public_ip_init() {
typeset -g _p9k__public_ip=
typeset -gF _p9k__public_ip_next_time=0
_p9k__async_segments_compute+='_p9k_worker_invoke public_ip _p9k_prompt_public_ip_compute'
}
_p9k_prompt_public_ip_compute() {
(( EPOCHREALTIME >= _p9k__public_ip_next_time )) || return
_p9k_worker_async _p9k_prompt_public_ip_async _p9k_prompt_public_ip_sync
}
_p9k_prompt_public_ip_async() {
local ip method
local -F start=EPOCHREALTIME
local -F next='start + 5'
for method in $_POWERLEVEL9K_PUBLIC_IP_METHODS $_POWERLEVEL9K_PUBLIC_IP_METHODS; do
case $method in
dig)
if (( $+commands[dig] )); then
ip="$(dig +tries=1 +short -4 A myip.opendns.com @resolver1.opendns.com 2>/dev/null)"
[[ $ip == ';'* ]] && ip=
if [[ -z $ip ]]; then
ip="$(dig +tries=1 +short -6 AAAA myip.opendns.com @resolver1.opendns.com 2>/dev/null)"
[[ $ip == ';'* ]] && ip=
fi
fi
;;
curl)
if (( $+commands[curl] )); then
ip="$(curl --max-time 5 -w '\n' "$_POWERLEVEL9K_PUBLIC_IP_HOST" 2>/dev/null)"
fi
;;
wget)
if (( $+commands[wget] )); then
ip="$(wget -T 5 -qO- "$_POWERLEVEL9K_PUBLIC_IP_HOST" 2>/dev/null)"
fi
;;
esac
[[ $ip =~ '^[0-9a-f.:]+$' ]] || ip=''
if [[ -n $ip ]]; then
next=$((start + _POWERLEVEL9K_PUBLIC_IP_TIMEOUT))
break
fi
done
_p9k__public_ip_next_time=$next
_p9k_print_params _p9k__public_ip_next_time
[[ $_p9k__public_ip == $ip ]] && return
_p9k__public_ip=$ip
_p9k_print_params _p9k__public_ip
echo -E - 'reset=1'
}
_p9k_prompt_public_ip_sync() {
eval $REPLY
_p9k_worker_reply $REPLY
}
################################################################
# Context: user@hostname (who am I and where am I)
prompt_context() {
local -i len=$#_p9k__prompt _p9k__has_upglob
local content
if [[ $_POWERLEVEL9K_ALWAYS_SHOW_CONTEXT == 0 && -n $DEFAULT_USER && $P9K_SSH == 0 ]]; then
local user="${(%):-%n}"
if [[ $user == $DEFAULT_USER ]]; then
content="${user//\%/%%}"
fi
fi
local state
if (( P9K_SSH )); then
if [[ -n "$SUDO_COMMAND" ]]; then
state="REMOTE_SUDO"
else
state="REMOTE"
fi
elif [[ -n "$SUDO_COMMAND" ]]; then
state="SUDO"
else
state="DEFAULT"
fi
local cond
for state cond in $state '${${(%):-%#}:#\#}' ROOT '${${(%):-%#}:#\%}'; do
local text=$content
if [[ -z $text ]]; then
local var=_POWERLEVEL9K_CONTEXT_${state}_TEMPLATE
if (( $+parameters[$var] )); then
text=${(P)var}
text=${(g::)text}
else
text=$_POWERLEVEL9K_CONTEXT_TEMPLATE
fi
fi
_p9k_prompt_segment "$0_$state" "$_p9k_color1" yellow '' 0 "$cond" "$text"
done
(( _p9k__has_upglob )) || typeset -g "_p9k__segment_val_${_p9k__prompt_side}[_p9k__segment_index]"=$_p9k__prompt[len+1,-1]
}
instant_prompt_context() {
if [[ $_POWERLEVEL9K_ALWAYS_SHOW_CONTEXT == 0 && -n $DEFAULT_USER && $P9K_SSH == 0 ]]; then
if [[ ${(%):-%n} == $DEFAULT_USER ]]; then
if (( ! _POWERLEVEL9K_ALWAYS_SHOW_USER )); then
return
fi
fi
fi
prompt_context
}
_p9k_prompt_context_init() {
if [[ $_POWERLEVEL9K_ALWAYS_SHOW_CONTEXT == 0 && -n $DEFAULT_USER && $P9K_SSH == 0 ]]; then
if [[ ${(%):-%n} == $DEFAULT_USER ]]; then
if (( ! _POWERLEVEL9K_ALWAYS_SHOW_USER )); then
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='${:-}'
fi
fi
fi
}
################################################################
# User: user (who am I)
prompt_user() {
local -i len=$#_p9k__prompt _p9k__has_upglob
_p9k_prompt_segment "${0}_ROOT" "${_p9k_color1}" yellow ROOT_ICON 0 '${${(%):-%#}:#\%}' "$_POWERLEVEL9K_USER_TEMPLATE"
if [[ -n "$SUDO_COMMAND" ]]; then
_p9k_prompt_segment "${0}_SUDO" "${_p9k_color1}" yellow SUDO_ICON 0 '${${(%):-%#}:#\#}' "$_POWERLEVEL9K_USER_TEMPLATE"
else
_p9k_prompt_segment "${0}_DEFAULT" "${_p9k_color1}" yellow USER_ICON 0 '${${(%):-%#}:#\#}' "%n"
fi
(( _p9k__has_upglob )) || typeset -g "_p9k__segment_val_${_p9k__prompt_side}[_p9k__segment_index]"=$_p9k__prompt[len+1,-1]
}
instant_prompt_user() {
if [[ $_POWERLEVEL9K_ALWAYS_SHOW_USER == 0 && "${(%):-%n}" == $DEFAULT_USER ]]; then
return
fi
prompt_user
}
_p9k_prompt_user_init() {
if [[ $_POWERLEVEL9K_ALWAYS_SHOW_USER == 0 && "${(%):-%n}" == $DEFAULT_USER ]]; then
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='${:-}'
fi
}
################################################################
# Host: machine (where am I)
prompt_host() {
local -i len=$#_p9k__prompt _p9k__has_upglob
if (( P9K_SSH )); then
_p9k_prompt_segment "$0_REMOTE" "${_p9k_color1}" yellow SSH_ICON 0 '' "$_POWERLEVEL9K_HOST_TEMPLATE"
else
_p9k_prompt_segment "$0_LOCAL" "${_p9k_color1}" yellow HOST_ICON 0 '' "$_POWERLEVEL9K_HOST_TEMPLATE"
fi
(( _p9k__has_upglob )) || typeset -g "_p9k__segment_val_${_p9k__prompt_side}[_p9k__segment_index]"=$_p9k__prompt[len+1,-1]
}
instant_prompt_host() { prompt_host; }
################################################################
# The 'custom` prompt provides a way for users to invoke commands and display
# the output in a segment.
_p9k_custom_prompt() {
local segment_name=${1:u}
local command=_POWERLEVEL9K_CUSTOM_${segment_name}
command=${(P)command}
local parts=("${(@z)command}")
local cmd="${(Q)parts[1]}"
(( $+functions[$cmd] || $+commands[$cmd] )) || return
local content="$(eval $command)"
[[ -n $content ]] || return
_p9k_prompt_segment "prompt_custom_$1" $_p9k_color2 $_p9k_color1 "CUSTOM_${segment_name}_ICON" 0 '' "$content"
}
################################################################
# Display the duration the command needed to run.
prompt_command_execution_time() {
(( $+P9K_COMMAND_DURATION_SECONDS )) || return
(( P9K_COMMAND_DURATION_SECONDS >= _POWERLEVEL9K_COMMAND_EXECUTION_TIME_THRESHOLD )) || return
if (( P9K_COMMAND_DURATION_SECONDS < 60 )); then
if (( !_POWERLEVEL9K_COMMAND_EXECUTION_TIME_PRECISION )); then
local -i sec=$((P9K_COMMAND_DURATION_SECONDS + 0.5))
else
local -F $_POWERLEVEL9K_COMMAND_EXECUTION_TIME_PRECISION sec=P9K_COMMAND_DURATION_SECONDS
fi
local text=${sec}s
else
local -i d=$((P9K_COMMAND_DURATION_SECONDS + 0.5))
if [[ $_POWERLEVEL9K_COMMAND_EXECUTION_TIME_FORMAT == "H:M:S" ]]; then
local text=${(l.2..0.)$((d % 60))}
if (( d >= 60 )); then
text=${(l.2..0.)$((d / 60 % 60))}:$text
if (( d >= 36000 )); then
text=$((d / 3600)):$text
elif (( d >= 3600 )); then
text=0$((d / 3600)):$text
fi
fi
else
local text="$((d % 60))s"
if (( d >= 60 )); then
text="$((d / 60 % 60))m $text"
if (( d >= 3600 )); then
text="$((d / 3600 % 24))h $text"
if (( d >= 86400 )); then
text="$((d / 86400))d $text"
fi
fi
fi
fi
fi
_p9k_prompt_segment "$0" "red" "yellow1" 'EXECUTION_TIME_ICON' 0 '' $text
}
function _p9k_shorten_delim_len() {
local def=$1
_p9k__ret=${_POWERLEVEL9K_SHORTEN_DELIMITER_LENGTH:--1}
(( _p9k__ret >= 0 )) || _p9k_prompt_length $1
}
################################################################
# Dir: current working directory
prompt_dir() {
if (( _POWERLEVEL9K_DIR_PATH_ABSOLUTE )); then
local p=$_p9k__cwd
local -a parts=("${(s:/:)p}")
elif [[ -o auto_name_dirs ]]; then
local p=${_p9k__cwd/#(#b)$HOME(|\/*)/'~'$match[1]}
local -a parts=("${(s:/:)p}")
else
local p=${(%):-%~}
if [[ $p == '~['* ]]; then
# If "${(%):-%~}" expands to "~[a]/]/b", is the first component "~[a]" or "~[a]/]"?
# One would expect "${(%):-%-1~}" to give the right answer but alas it always simply
# gives the segment before the first slash, which would be "~[a]" in this case. Worse,
# for "~[a/b]" it'll give the nonsensical "~[a". To solve this problem we have to
# repeat what "${(%):-%~}" does and hope that it produces the same result.
local func=''
local -a parts=()
for func in zsh_directory_name $zsh_directory_name_functions; do
local reply=()
if (( $+functions[$func] )) && $func d $_p9k__cwd && [[ $p == '~['$reply[1]']'* ]]; then
parts+='~['$reply[1]']'
break
fi
done
if (( $#parts )); then
parts+=(${(s:/:)${p#$parts[1]}})
else
p=$_p9k__cwd
parts=("${(s:/:)p}")
fi
else
local -a parts=("${(s:/:)p}")
fi
fi
local -i fake_first=0 expand=0 shortenlen=${_POWERLEVEL9K_SHORTEN_DIR_LENGTH:--1}
if (( $+_POWERLEVEL9K_SHORTEN_DELIMITER )); then
local delim=$_POWERLEVEL9K_SHORTEN_DELIMITER
else
if [[ $langinfo[CODESET] == (utf|UTF)(-|)8 ]]; then
local delim=$'\u2026'
else
local delim='..'
fi
fi
case $_POWERLEVEL9K_SHORTEN_STRATEGY in
truncate_absolute|truncate_absolute_chars)
if (( shortenlen > 0 && $#p > shortenlen )); then
_p9k_shorten_delim_len $delim
if (( $#p > shortenlen + $_p9k__ret )); then
local -i n=shortenlen
local -i i=$#parts
while true; do
local dir=$parts[i]
local -i len=$(( $#dir + (i > 1) ))
if (( len <= n )); then
(( n -= len ))
(( --i ))
else
parts[i]=$'\1'$dir[-n,-1]
parts[1,i-1]=()
break
fi
done
fi
fi
;;
truncate_with_package_name|truncate_middle|truncate_from_right)
() {
[[ $_POWERLEVEL9K_SHORTEN_STRATEGY == truncate_with_package_name &&
$+commands[jq] == 1 && $#_POWERLEVEL9K_DIR_PACKAGE_FILES > 0 ]] || return
local pats="(${(j:|:)_POWERLEVEL9K_DIR_PACKAGE_FILES})"
local -i i=$#parts
local dir=$_p9k__cwd
for (( ; i > 0; --i )); do
local markers=($dir/${~pats}(N))
if (( $#markers )); then
local pat= pkg_file=
for pat in $_POWERLEVEL9K_DIR_PACKAGE_FILES; do
for pkg_file in $markers; do
[[ $pkg_file == $dir/${~pat} ]] || continue
if ! _p9k_cache_stat_get $0_pkg $pkg_file; then
local pkg_name=''
pkg_name="$(jq -j '.name | select(. != null)' <$pkg_file 2>/dev/null)" || pkg_name=''
_p9k_cache_stat_set "$pkg_name"
fi
[[ -n $_p9k__cache_val[1] ]] || continue
parts[1,i]=($_p9k__cache_val[1])
fake_first=1
return 0
done
done
fi
dir=${dir:h}
done
}
if (( shortenlen > 0 )); then
_p9k_shorten_delim_len $delim
local -i d=_p9k__ret pref=shortenlen suf=0 i=2
[[ $_POWERLEVEL9K_SHORTEN_STRATEGY == truncate_middle ]] && suf=pref
for (( ; i < $#parts; ++i )); do
local dir=$parts[i]
if (( $#dir > pref + suf + d )); then
dir[pref+1,-suf-1]=$'\1'
parts[i]=$dir
fi
done
fi
;;
truncate_to_last)
if [[ $#parts -gt 2 || $p[1] != / && $#parts -gt 1 ]]; then
fake_first=1
parts[1,-2]=()
fi
;;
truncate_to_first_and_last)
if (( shortenlen > 0 )); then
local -i i=$(( shortenlen + 1 ))
[[ $p == /* ]] && (( ++i ))
for (( ; i <= $#parts - shortenlen; ++i )); do
parts[i]=$'\1'
done
fi
;;
truncate_to_unique)
expand=1
delim=${_POWERLEVEL9K_SHORTEN_DELIMITER-'*'}
shortenlen=${_POWERLEVEL9K_SHORTEN_DIR_LENGTH:-1}
(( shortenlen >= 0 )) || shortenlen=1
local -i i=2 e=$(($#parts - shortenlen))
if [[ -n $_POWERLEVEL9K_DIR_TRUNCATE_BEFORE_MARKER ]]; then
(( e += shortenlen ))
local orig=("$parts[2]" "${(@)parts[$((shortenlen > $#parts ? -$#parts : -shortenlen)),-1]}")
elif [[ $p[1] == / ]]; then
(( ++i ))
fi
if (( i <= e )); then
local mtimes=(${(Oa)_p9k__parent_mtimes:$(($#parts-e)):$((e-i+1))})
local key="${(pj.:.)mtimes}"
else
local key=
fi
if ! _p9k_cache_ephemeral_get $0 $e $i $_p9k__cwd || [[ $key != $_p9k__cache_val[1] ]]; then
local tail=${(j./.)parts[i,-1]}
local parent=$_p9k__cwd[1,-2-$#tail]
_p9k_prompt_length $delim
local -i real_delim_len=_p9k__ret
[[ -n $parts[i-1] ]] && parts[i-1]="\${(Q)\${:-${(qqq)${(q)parts[i-1]}}}}"$'\2'
local -i d=${_POWERLEVEL9K_SHORTEN_DELIMITER_LENGTH:--1}
(( d >= 0 )) || d=real_delim_len
local -i m=1
for (( ; i <= e; ++i, ++m )); do
local sub=$parts[i]
local dir=$parent/$sub mtime=$mtimes[m]
local pair=$_p9k__dir_stat_cache[$dir]
if [[ $pair == ${mtime:-x}:* ]]; then
parts[i]=${pair#*:}
else
[[ $sub != *["~!#\`\$^&*()\\\"'<>?{}[]"]* ]]
local -i q=$?
if [[ -n $_POWERLEVEL9K_SHORTEN_FOLDER_MARKER &&
-n $parent/$sub/${~_POWERLEVEL9K_SHORTEN_FOLDER_MARKER}(#qN) ]]; then
(( q )) && parts[i]="\${(Q)\${:-${(qqq)${(q)sub}}}}"
parts[i]+=$'\2'
else
local -i j=$sub[(i)[^.]]
for (( ; j + d < $#sub; ++j )); do
local -a matching=($parent/$sub[1,j]*/(N))
(( $#matching == 1 )) && break
done
local -i saved=$(($#sub - j - d))
if (( saved > 0 )); then
if (( q )); then
parts[i]='${${${_p9k__d:#-*}:+${(Q)${:-'${(qqq)${(q)sub}}'}}}:-${(Q)${:-'
parts[i]+=$'\3'${(qqq)${(q)sub[1,j]}}$'}}\1\3''${$((_p9k__d+='$saved'))+}}'
else
parts[i]='${${${_p9k__d:#-*}:+'$sub$'}:-\3'$sub[1,j]$'\1\3''${$((_p9k__d+='$saved'))+}}'
fi
else
(( q )) && parts[i]="\${(Q)\${:-${(qqq)${(q)sub}}}}"
fi
fi
[[ -n $mtime ]] && _p9k__dir_stat_cache[$dir]="$mtime:$parts[i]"
fi
parent+=/$sub
done
if [[ -n $_POWERLEVEL9K_DIR_TRUNCATE_BEFORE_MARKER ]]; then
local _2=$'\2'
if [[ $_POWERLEVEL9K_DIR_TRUNCATE_BEFORE_MARKER == last* ]]; then
(( e = ${parts[(I)*$_2]} + ${_POWERLEVEL9K_DIR_TRUNCATE_BEFORE_MARKER#*:} ))
else
(( e = ${parts[(ib:2:)*$_2]} + ${_POWERLEVEL9K_DIR_TRUNCATE_BEFORE_MARKER#*:} ))
fi
if (( e > 1 && e <= $#parts )); then
parts[1,e-1]=()
fake_first=1
elif [[ $p == /?* ]]; then
parts[2]="\${(Q)\${:-${(qqq)${(q)orig[1]}}}}"$'\2'
fi
for ((i = $#parts < shortenlen ? $#parts : shortenlen; i > 0; --i)); do
[[ $#parts[-i] == *$'\2' ]] && continue
if [[ $orig[-i] == *["~!#\`\$^&*()\\\"'<>?{}[]"]* ]]; then
parts[-i]='${(Q)${:-'${(qqq)${(q)orig[-i]}}'}}'$'\2'
else
parts[-i]=${orig[-i]}$'\2'
fi
done
else
for ((; i <= $#parts; ++i)); do
[[ $parts[i] == *["~!#\`\$^&*()\\\"'<>?{}[]"]* ]] && parts[i]='${(Q)${:-'${(qqq)${(q)parts[i]}}'}}'
parts[i]+=$'\2'
done
fi
_p9k_cache_ephemeral_set "$key" "${parts[@]}"
fi
parts=("${(@)_p9k__cache_val[2,-1]}")
;;
truncate_with_folder_marker)
if [[ -n $_POWERLEVEL9K_SHORTEN_FOLDER_MARKER ]]; then
local dir=$_p9k__cwd
local -a m=()
local -i i=$(($#parts - 1))
for (( ; i > 1; --i )); do
dir=${dir:h}
[[ -n $dir/${~_POWERLEVEL9K_SHORTEN_FOLDER_MARKER}(#qN) ]] && m+=$i
done
m+=1
for (( i=1; i < $#m; ++i )); do
(( m[i] - m[i+1] > 2 )) && parts[m[i+1]+1,m[i]-1]=($'\1')
done
fi
;;
*)
if (( shortenlen > 0 )); then
local -i len=$#parts
[[ -z $parts[1] ]] && (( --len ))
if (( len > shortenlen )); then
parts[1,-shortenlen-1]=($'\1')
fi
fi
;;
esac
[[ $_POWERLEVEL9K_DIR_SHOW_WRITABLE != 0 && ! -w $_p9k__cwd ]]
local w=$?
if ! _p9k_cache_ephemeral_get $0 $_p9k__cwd $p $w $fake_first "${parts[@]}"; then
local state=$0
local icon=''
local a='' b='' c=''
for a b c in "${_POWERLEVEL9K_DIR_CLASSES[@]}"; do
if [[ $_p9k__cwd == ${~a} ]]; then
[[ -n $b ]] && state+=_${${(U)b}//ฤฐ/I}
icon=$'\1'$c
break
fi
done
if (( ! w )); then
if (( _POWERLEVEL9K_DIR_SHOW_WRITABLE == 1 )); then
state=${0}_NOT_WRITABLE
else
state+=_NOT_WRITABLE
fi
icon=LOCK_ICON
fi
local state_u=${${(U)state}//ฤฐ/I}
local style=%b
_p9k_color $state BACKGROUND blue
_p9k_background $_p9k__ret
style+=$_p9k__ret
_p9k_color $state FOREGROUND "$_p9k_color1"
_p9k_foreground $_p9k__ret
style+=$_p9k__ret
if (( expand )); then
_p9k_escape_style $style
style=$_p9k__ret
fi
parts=("${(@)parts//\%/%%}")
if [[ $_POWERLEVEL9K_HOME_FOLDER_ABBREVIATION != '~' && $fake_first == 0 && $p == ('~'|'~/'*) ]]; then
(( expand )) && _p9k_escape $_POWERLEVEL9K_HOME_FOLDER_ABBREVIATION || _p9k__ret=$_POWERLEVEL9K_HOME_FOLDER_ABBREVIATION
parts[1]=$_p9k__ret
[[ $_p9k__ret == *%* ]] && parts[1]+=$style
elif [[ $_POWERLEVEL9K_DIR_OMIT_FIRST_CHARACTER == 1 && $fake_first == 0 && $#parts > 1 && -z $parts[1] && -n $parts[2] ]]; then
parts[1]=()
fi
local last_style=
_p9k_param $state PATH_HIGHLIGHT_BOLD ''
[[ $_p9k__ret == true ]] && last_style+=%B
if (( $+parameters[_POWERLEVEL9K_DIR_PATH_HIGHLIGHT_FOREGROUND] ||
$+parameters[_POWERLEVEL9K_${state_u}_PATH_HIGHLIGHT_FOREGROUND] )); then
_p9k_color $state PATH_HIGHLIGHT_FOREGROUND ''
_p9k_foreground $_p9k__ret
last_style+=$_p9k__ret
fi
if [[ -n $last_style ]]; then
(( expand )) && _p9k_escape_style $last_style || _p9k__ret=$last_style
parts[-1]=$_p9k__ret${parts[-1]//$'\1'/$'\1'$_p9k__ret}$style
fi
local anchor_style=
_p9k_param $state ANCHOR_BOLD ''
[[ $_p9k__ret == true ]] && anchor_style+=%B
if (( $+parameters[_POWERLEVEL9K_DIR_ANCHOR_FOREGROUND] ||
$+parameters[_POWERLEVEL9K_${state_u}_ANCHOR_FOREGROUND] )); then
_p9k_color $state ANCHOR_FOREGROUND ''
_p9k_foreground $_p9k__ret
anchor_style+=$_p9k__ret
fi
if [[ -n $anchor_style ]]; then
(( expand )) && _p9k_escape_style $anchor_style || _p9k__ret=$anchor_style
if [[ -z $last_style ]]; then
parts=("${(@)parts/%(#b)(*)$'\2'/$_p9k__ret$match[1]$style}")
else
(( $#parts > 1 )) && parts[1,-2]=("${(@)parts[1,-2]/%(#b)(*)$'\2'/$_p9k__ret$match[1]$style}")
parts[-1]=${parts[-1]/$'\2'}
fi
else
parts=("${(@)parts/$'\2'}")
fi
if (( $+parameters[_POWERLEVEL9K_DIR_SHORTENED_FOREGROUND] ||
$+parameters[_POWERLEVEL9K_${state_u}_SHORTENED_FOREGROUND] )); then
_p9k_color $state SHORTENED_FOREGROUND ''
_p9k_foreground $_p9k__ret
(( expand )) && _p9k_escape_style $_p9k__ret
local shortened_fg=$_p9k__ret
(( expand )) && _p9k_escape $delim || _p9k__ret=$delim
[[ $_p9k__ret == *%* ]] && _p9k__ret+=$style$shortened_fg
parts=("${(@)parts/(#b)$'\3'(*)$'\1'(*)$'\3'/$shortened_fg$match[1]$_p9k__ret$match[2]$style}")
parts=("${(@)parts/(#b)(*)$'\1'(*)/$shortened_fg$match[1]$_p9k__ret$match[2]$style}")
else
(( expand )) && _p9k_escape $delim || _p9k__ret=$delim
[[ $_p9k__ret == *%* ]] && _p9k__ret+=$style
parts=("${(@)parts/$'\1'/$_p9k__ret}")
parts=("${(@)parts//$'\3'}")
fi
if [[ $_p9k__cwd == / && $_POWERLEVEL9K_DIR_OMIT_FIRST_CHARACTER == 1 ]]; then
local sep='/'
else
local sep=''
if (( $+parameters[_POWERLEVEL9K_DIR_PATH_SEPARATOR_FOREGROUND] ||
$+parameters[_POWERLEVEL9K_${state_u}_PATH_SEPARATOR_FOREGROUND] )); then
_p9k_color $state PATH_SEPARATOR_FOREGROUND ''
_p9k_foreground $_p9k__ret
(( expand )) && _p9k_escape_style $_p9k__ret
sep=$_p9k__ret
fi
_p9k_param $state PATH_SEPARATOR /
_p9k__ret=${(g::)_p9k__ret}
(( expand )) && _p9k_escape $_p9k__ret
sep+=$_p9k__ret
[[ $sep == *%* ]] && sep+=$style
fi
local content="${(pj.$sep.)parts}"
if (( _POWERLEVEL9K_DIR_HYPERLINK && _p9k_term_has_href )) && [[ $_p9k__cwd == /* ]]; then
local header=$'%{\e]8;;file://'${${_p9k__cwd//\%/%%25}//'#'/%%23}$'\a%}'
local footer=$'%{\e]8;;\a%}'
if (( expand )); then
_p9k_escape $header
header=$_p9k__ret
_p9k_escape $footer
footer=$_p9k__ret
fi
content=$header$content$footer
fi
(( expand )) && _p9k_prompt_length "${(e):-"\${\${_p9k__d::=0}+}$content"}" || _p9k__ret=
_p9k_cache_ephemeral_set "$state" "$icon" "$expand" "$content" $_p9k__ret
fi
if (( _p9k__cache_val[3] )); then
if (( $+_p9k__dir )); then
_p9k__cache_val[4]='${${_p9k__d::=-1024}+}'$_p9k__cache_val[4]
else
_p9k__dir=$_p9k__cache_val[4]
_p9k__dir_len=$_p9k__cache_val[5]
_p9k__cache_val[4]='%{d%}'$_p9k__cache_val[4]'%{d%}'
fi
fi
_p9k_prompt_segment "$_p9k__cache_val[1]" "blue" "$_p9k_color1" "$_p9k__cache_val[2]" "$_p9k__cache_val[3]" "" "$_p9k__cache_val[4]"
}
instant_prompt_dir() { prompt_dir; }
################################################################
# Docker machine
prompt_docker_machine() {
_p9k_prompt_segment "$0" "magenta" "$_p9k_color1" 'SERVER_ICON' 0 '' "${DOCKER_MACHINE_NAME//\%/%%}"
}
_p9k_prompt_docker_machine_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='$DOCKER_MACHINE_NAME'
}
################################################################
# GO prompt
prompt_go_version() {
_p9k_cached_cmd 0 go version || return
[[ $_p9k__ret == (#b)*go([[:digit:].]##)* ]] || return
local v=$match[1]
if (( _POWERLEVEL9K_GO_VERSION_PROJECT_ONLY )); then
local p=$GOPATH
if [[ -z $p ]]; then
if [[ -d $HOME/go ]]; then
p=$HOME/go
else
p="$(go env GOPATH 2>/dev/null)" && [[ -n $p ]] || return
fi
fi
if [[ $_p9k__cwd/ != $p/* && $_p9k__cwd_a/ != $p/* ]]; then
_p9k_upglob go.mod && return
fi
fi
_p9k_prompt_segment "$0" "green" "grey93" "GO_ICON" 0 '' "${v//\%/%%}"
}
_p9k_prompt_go_version_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='$commands[go]'
}
################################################################
# Command number (in local history)
prompt_history() {
local -i len=$#_p9k__prompt _p9k__has_upglob
_p9k_prompt_segment "$0" "grey50" "$_p9k_color1" '' 0 '' '%h'
(( _p9k__has_upglob )) || typeset -g "_p9k__segment_val_${_p9k__prompt_side}[_p9k__segment_index]"=$_p9k__prompt[len+1,-1]
}
prompt_package() {
unset P9K_PACKAGE_NAME P9K_PACKAGE_VERSION
_p9k_upglob package.json && return
local file=$_p9k__parent_dirs[$?]/package.json
if ! _p9k_cache_stat_get $0 $file; then
() {
local data field
local -A found
# Redneck json parsing. Yields correct results for any well-formed json document.
# Produces random garbage for invalid json.
{ data="$(<$file)" || return } 2>/dev/null
data=${${data//$'\r'}##[[:space:]]#}
[[ $data == '{'* ]] || return
data[1]=
local -i depth=1
while true; do
data=${data##[[:space:]]#}
[[ -n $data ]] || return
case $data[1] in
'{'|'[') data[1]=; (( ++depth ));;
'}'|']') data[1]=; (( --depth > 0 )) || return;;
':') data[1]=;;
',') data[1]=; field=;;
[[:alnum:].]) data=${data##[[:alnum:].]#};;
'"')
local tail=${data##\"([^\"\\]|\\?)#}
[[ $tail == '"'* ]] || return
local s=${data:1:-$#tail}
data=${tail:1}
(( depth == 1 )) || continue
if [[ -z $field ]]; then
field=${s:-x}
elif [[ $field == (name|version) ]]; then
(( ! $+found[$field] )) || return
[[ -n $s ]] || return
[[ $s != *($'\n'|'\')* ]] || return
found[$field]=$s
(( $#found == 2 )) && break
fi
;;
*) return 1;;
esac
done
_p9k_cache_stat_set 1 $found[name] $found[version]
return 0
} || _p9k_cache_stat_set 0
fi
(( _p9k__cache_val[1] )) || return
P9K_PACKAGE_NAME=$_p9k__cache_val[2]
P9K_PACKAGE_VERSION=$_p9k__cache_val[3]
_p9k_prompt_segment "$0" "cyan" "$_p9k_color1" PACKAGE_ICON 0 '' ${P9K_PACKAGE_VERSION//\%/%%}
}
################################################################
# Detection for virtualization (systemd based systems only)
prompt_detect_virt() {
local virt="$(systemd-detect-virt 2>/dev/null)"
if [[ "$virt" == "none" ]]; then
local -a inode
if zstat -A inode +inode / 2>/dev/null && [[ $inode[1] != 2 ]]; then
virt="chroot"
fi
fi
if [[ -n "${virt}" ]]; then
_p9k_prompt_segment "$0" "$_p9k_color1" "yellow" '' 0 '' "${virt//\%/%%}"
fi
}
_p9k_prompt_detect_virt_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='$commands[systemd-detect-virt]'
}
################################################################
# Segment to display the current IP address
prompt_ip() {
local -i len=$#_p9k__prompt _p9k__has_upglob
_p9k_prompt_segment "$0" "cyan" "$_p9k_color1" 'NETWORK_ICON' 1 '$P9K_IP_IP' '$P9K_IP_IP'
(( _p9k__has_upglob )) || typeset -g "_p9k__segment_val_${_p9k__prompt_side}[_p9k__segment_index]"=$_p9k__prompt[len+1,-1]
}
################################################################
# Segment to display if VPN is active
prompt_vpn_ip() {
typeset -ga _p9k__vpn_ip_segments
_p9k__vpn_ip_segments+=($_p9k__prompt_side $_p9k__line_index $_p9k__segment_index)
local p='${(e)_p9k__vpn_ip_'$_p9k__prompt_side$_p9k__segment_index'}'
_p9k__prompt+=$p
typeset -g "_p9k__segment_val_${_p9k__prompt_side}[_p9k__segment_index]"=$p
}
_p9k_vpn_ip_render() {
local _p9k__segment_name=vpn_ip _p9k__prompt_side ip
local -i _p9k__has_upglob _p9k__segment_index
for _p9k__prompt_side _p9k__line_index _p9k__segment_index in $_p9k__vpn_ip_segments; do
local _p9k__prompt=
for ip in $_p9k__vpn_ip_ips; do
_p9k_prompt_segment prompt_vpn_ip "cyan" "$_p9k_color1" 'VPN_ICON' 0 '' $ip
done
typeset -g _p9k__vpn_ip_$_p9k__prompt_side$_p9k__segment_index=$_p9k__prompt
done
}
################################################################
# Segment to display laravel version
prompt_laravel_version() {
_p9k_upglob artisan && return
local dir=$_p9k__parent_dirs[$?]
local app=$dir/vendor/laravel/framework/src/Illuminate/Foundation/Application.php
[[ -r $app ]] || return
if ! _p9k_cache_stat_get $0 $dir/artisan $app; then
local v="$(php $dir/artisan --version 2> /dev/null)"
_p9k_cache_stat_set "${${(M)v:#Laravel Framework *}#Laravel Framework }"
fi
[[ -n $_p9k__cache_val[1] ]] || return
_p9k_prompt_segment "$0" "maroon" "white" 'LARAVEL_ICON' 0 '' "${_p9k__cache_val[1]//\%/%%}"
}
_p9k_prompt_laravel_version_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='$commands[php]'
}
################################################################
# Segment to display load
prompt_load() {
if [[ $_p9k_os == (OSX|BSD) ]]; then
local -i len=$#_p9k__prompt _p9k__has_upglob
_p9k_prompt_segment $0_CRITICAL red "$_p9k_color1" LOAD_ICON 1 '$_p9k__load_critical' '$_p9k__load_value'
_p9k_prompt_segment $0_WARNING yellow "$_p9k_color1" LOAD_ICON 1 '$_p9k__load_warning' '$_p9k__load_value'
_p9k_prompt_segment $0_NORMAL green "$_p9k_color1" LOAD_ICON 1 '$_p9k__load_normal' '$_p9k__load_value'
(( _p9k__has_upglob )) || typeset -g "_p9k__segment_val_${_p9k__prompt_side}[_p9k__segment_index]"=$_p9k__prompt[len+1,-1]
return
fi
[[ -r /proc/loadavg ]] || return
_p9k_read_file /proc/loadavg || return
local load=${${(A)=_p9k__ret}[_POWERLEVEL9K_LOAD_WHICH]//,/.}
local -F pct='100. * load / _p9k_num_cpus'
if (( pct > 70 )); then
_p9k_prompt_segment $0_CRITICAL red "$_p9k_color1" LOAD_ICON 0 '' $load
elif (( pct > 50 )); then
_p9k_prompt_segment $0_WARNING yellow "$_p9k_color1" LOAD_ICON 0 '' $load
else
_p9k_prompt_segment $0_NORMAL green "$_p9k_color1" LOAD_ICON 0 '' $load
fi
}
_p9k_prompt_load_init() {
if [[ $_p9k_os == (OSX|BSD) ]]; then
typeset -g _p9k__load_value=
typeset -g _p9k__load_normal=
typeset -g _p9k__load_warning=
typeset -g _p9k__load_critical=
_p9k__async_segments_compute+='_p9k_worker_invoke load _p9k_prompt_load_compute'
elif [[ ! -r /proc/loadavg ]]; then
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='${:-}'
fi
}
_p9k_prompt_load_compute() {
(( $+commands[sysctl] )) || return
_p9k_worker_async _p9k_prompt_load_async _p9k_prompt_load_sync
}
_p9k_prompt_load_async() {
local load="$(sysctl -n vm.loadavg 2>/dev/null)" || return
load=${${(A)=load}[_POWERLEVEL9K_LOAD_WHICH+1]//,/.}
[[ $load == <->(|.<->) && $load != $_p9k__load_value ]] || return
_p9k__load_value=$load
_p9k__load_normal=
_p9k__load_warning=
_p9k__load_critical=
local -F pct='100. * _p9k__load_value / _p9k_num_cpus'
if (( pct > 70 )); then
_p9k__load_critical=1
elif (( pct > 50 )); then
_p9k__load_warning=1
else
_p9k__load_normal=1
fi
_p9k_print_params \
_p9k__load_value \
_p9k__load_normal \
_p9k__load_warning \
_p9k__load_critical
echo -E - 'reset=1'
}
_p9k_prompt_load_sync() {
eval $REPLY
_p9k_worker_reply $REPLY
}
# Usage: _p9k_cached_cmd <0|1> <cmd> [args...]
#
# The first argument says whether to capture stderr (1) or ignore it (0).
function _p9k_cached_cmd() {
local cmd=$commands[$2]
[[ -n $cmd ]] || return
if ! _p9k_cache_stat_get $0" ${(q)*}" $cmd; then
local out
if (( $1 )); then
out="$($cmd "${@:3}" 2>&1)"
else
out="$($cmd "${@:3}" 2>/dev/null)"
fi
_p9k_cache_stat_set $(( ! $? )) "$out"
fi
(( $_p9k__cache_val[1] )) || return
_p9k__ret=$_p9k__cache_val[2]
}
################################################################
# Segment to diplay Node version
prompt_node_version() {
if (( _POWERLEVEL9K_NODE_VERSION_PROJECT_ONLY )); then
_p9k_upglob package.json && return
fi
_p9k_cached_cmd 0 node --version && [[ $_p9k__ret == v?* ]] || return
_p9k_prompt_segment "$0" "green" "white" 'NODE_ICON' 0 '' "${_p9k__ret#v}"
}
_p9k_prompt_node_version_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='$commands[node]'
}
# Almost the same as `nvm_version default` but faster. The differences shouldn't affect
# the observable behavior of Powerlevel10k.
function _p9k_nvm_ls_default() {
local v=default
local -a seen=($v)
while [[ -r $NVM_DIR/alias/$v ]]; do
local target=
IFS='' read -r target <$NVM_DIR/alias/$v
target=${target%$'\r'}
[[ -z $target ]] && break
(( $seen[(I)$target] )) && return
seen+=$target
v=$target
done
case $v in
default|N/A)
return 1
;;
system|v)
_p9k__ret=system
return 0
;;
iojs-[0-9]*)
v=iojs-v${v#iojs-}
;;
[0-9]*)
v=v$v
;;
esac
if [[ $v == v*.*.* ]]; then
if [[ -x $NVM_DIR/versions/node/$v/bin/node || -x $NVM_DIR/$v/bin/node ]]; then
_p9k__ret=$v
return 0
elif [[ -x $NVM_DIR/versions/io.js/$v/bin/node ]]; then
_p9k__ret=iojs-$v
return 0
else
return 1
fi
fi
local -a dirs=()
case $v in
node|node-|stable)
dirs=($NVM_DIR/versions/node $NVM_DIR)
v='(v[1-9]*|v0.*[02468].*)'
;;
unstable)
dirs=($NVM_DIR/versions/node $NVM_DIR)
v='v0.*[13579].*'
;;
iojs*)
dirs=($NVM_DIR/versions/io.js)
v=v${${${v#iojs}#-}#v}'*'
;;
*)
dirs=($NVM_DIR/versions/node $NVM_DIR $NVM_DIR/versions/io.js)
v=v${v#v}'*'
;;
esac
local -a matches=(${^dirs}/${~v}(/N))
(( $#matches )) || return
local max path
for path in ${(Oa)matches}; do
[[ ${path:t} == (#b)v(*).(*).(*) ]] || continue
v=${(j::)${(@l:6::0:)match}}
[[ $v > $max ]] || continue
max=$v
_p9k__ret=${path:t}
[[ ${path:h:t} != io.js ]] || _p9k__ret=iojs-$_p9k__ret
done
[[ -n $max ]]
}
# The same as `nvm_version current` but faster.
_p9k_nvm_ls_current() {
local node_path=${commands[node]:A}
[[ -n $node_path ]] || return
local nvm_dir=${NVM_DIR:A}
if [[ -n $nvm_dir && $node_path == $nvm_dir/versions/io.js/* ]]; then
_p9k_cached_cmd 0 iojs --version || return
_p9k__ret=iojs-v${_p9k__ret#v}
elif [[ -n $nvm_dir && $node_path == $nvm_dir/* ]]; then
_p9k_cached_cmd 0 node --version || return
_p9k__ret=v${_p9k__ret#v}
else
_p9k__ret=system
fi
}
################################################################
# Segment to display Node version from NVM
# Only prints the segment if different than the default value
prompt_nvm() {
[[ -n $NVM_DIR ]] && _p9k_nvm_ls_current || return
local current=$_p9k__ret
! _p9k_nvm_ls_default || [[ $_p9k__ret != $current ]] || return
_p9k_prompt_segment "$0" "magenta" "black" 'NODE_ICON' 0 '' "${${current#v}//\%/%%}"
}
_p9k_prompt_nvm_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='${commands[nvm]:-${${+functions[nvm]}:#0}}'
}
################################################################
# Segment to display NodeEnv
prompt_nodeenv() {
local msg
if (( _POWERLEVEL9K_NODEENV_SHOW_NODE_VERSION )) && _p9k_cached_cmd 0 node --version; then
msg="${_p9k__ret//\%/%%} "
fi
msg+="$_POWERLEVEL9K_NODEENV_LEFT_DELIMITER${${NODE_VIRTUAL_ENV:t}//\%/%%}$_POWERLEVEL9K_NODEENV_RIGHT_DELIMITER"
_p9k_prompt_segment "$0" "black" "green" 'NODE_ICON' 0 '' "$msg"
}
_p9k_prompt_nodeenv_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='$NODE_VIRTUAL_ENV'
}
function _p9k_nodeenv_version_transform() {
local dir=${NODENV_ROOT:-$HOME/.nodenv}/versions
[[ -z $1 || $1 == system ]] && _p9k__ret=$1 && return
[[ -d $dir/$1 ]] && _p9k__ret=$1 && return
[[ -d $dir/${1/v} ]] && _p9k__ret=${1/v} && return
[[ -d $dir/${1#node-} ]] && _p9k__ret=${1#node-} && return
[[ -d $dir/${1#node-v} ]] && _p9k__ret=${1#node-v} && return
return 1
}
function _p9k_nodenv_global_version() {
_p9k_read_word ${NODENV_ROOT:-$HOME/.nodenv}/version || _p9k__ret=system
}
################################################################
# Segment to display nodenv information
# https://github.com/nodenv/nodenv
prompt_nodenv() {
if [[ -n $NODENV_VERSION ]]; then
(( ${_POWERLEVEL9K_NODENV_SOURCES[(I)shell]} )) || return
local v=$NODENV_VERSION
else
(( ${_POWERLEVEL9K_NODENV_SOURCES[(I)local|global]} )) || return
_p9k__ret=
if [[ $NODENV_DIR != (|.) ]]; then
[[ $NODENV_DIR == /* ]] && local dir=$NODENV_DIR || local dir="$_p9k__cwd_a/$NODENV_DIR"
dir=${dir:A}
if [[ $dir != $_p9k__cwd_a ]]; then
while true; do
if _p9k_read_word $dir/.node-version; then
(( ${_POWERLEVEL9K_NODENV_SOURCES[(I)local]} )) || return
break
fi
[[ $dir == (/|.) ]] && break
dir=${dir:h}
done
fi
fi
if [[ -z $_p9k__ret ]]; then
_p9k_upglob .node-version
local -i idx=$?
if (( idx )) && _p9k_read_word $_p9k__parent_dirs[idx]/.node-version; then
(( ${_POWERLEVEL9K_NODENV_SOURCES[(I)local]} )) || return
else
_p9k__ret=
fi
fi
if [[ -z $_p9k__ret ]]; then
(( _POWERLEVEL9K_NODENV_PROMPT_ALWAYS_SHOW )) || return
(( ${_POWERLEVEL9K_NODENV_SOURCES[(I)global]} )) || return
_p9k_nodenv_global_version
fi
_p9k_nodeenv_version_transform $_p9k__ret || return
local v=$_p9k__ret
fi
if (( !_POWERLEVEL9K_NODENV_PROMPT_ALWAYS_SHOW )); then
_p9k_nodenv_global_version
_p9k_nodeenv_version_transform $_p9k__ret && [[ $v == $_p9k__ret ]] && return
fi
if (( !_POWERLEVEL9K_NODENV_SHOW_SYSTEM )); then
[[ $v == system ]] && return
fi
_p9k_prompt_segment "$0" "black" "green" 'NODE_ICON' 0 '' "${v//\%/%%}"
}
_p9k_prompt_nodenv_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='${commands[nodenv]:-${${+functions[nodenv]}:#0}}'
}
prompt_dotnet_version() {
if (( _POWERLEVEL9K_DOTNET_VERSION_PROJECT_ONLY )); then
_p9k_upglob 'project.json|global.json|packet.dependencies|*.csproj|*.fsproj|*.xproj|*.sln' && return
fi
_p9k_cached_cmd 0 dotnet --version || return
_p9k_prompt_segment "$0" "magenta" "white" 'DOTNET_ICON' 0 '' "$_p9k__ret"
}
_p9k_prompt_dotnet_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='$commands[dotnet]'
}
################################################################
# Segment to print a little OS icon
prompt_os_icon() {
local -i len=$#_p9k__prompt _p9k__has_upglob
_p9k_prompt_segment "$0" "black" "white" '' 0 '' "$_p9k_os_icon"
(( _p9k__has_upglob )) || typeset -g "_p9k__segment_val_${_p9k__prompt_side}[_p9k__segment_index]"=$_p9k__prompt[len+1,-1]
}
instant_prompt_os_icon() { prompt_os_icon; }
################################################################
# Segment to display PHP version number
prompt_php_version() {
if (( _POWERLEVEL9K_PHP_VERSION_PROJECT_ONLY )); then
_p9k_upglob 'composer.json|*.php' && return
fi
_p9k_cached_cmd 0 php --version || return
[[ $_p9k__ret == (#b)(*$'\n')#'PHP '([[:digit:].]##)* ]] || return
local v=$match[2]
_p9k_prompt_segment "$0" "fuchsia" "grey93" 'PHP_ICON' 0 '' "${v//\%/%%}"
}
_p9k_prompt_php_version_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='$commands[php]'
}
################################################################
# Segment to display free RAM and used Swap
prompt_ram() {
local -i len=$#_p9k__prompt _p9k__has_upglob
_p9k_prompt_segment $0 yellow "$_p9k_color1" RAM_ICON 1 '$_p9k__ram_free' '$_p9k__ram_free'
(( _p9k__has_upglob )) || typeset -g "_p9k__segment_val_${_p9k__prompt_side}[_p9k__segment_index]"=$_p9k__prompt[len+1,-1]
}
function _p9k_prompt_ram_init() {
if [[ $_p9k_os == OSX && $+commands[vm_stat] == 0 ||
$_p9k_os == BSD && ! -r /var/run/dmesg.boot ||
$_p9k_os != (OSX|BSD) && ! -r /proc/meminfo ]]; then
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='${:-}'
return
fi
typeset -g _p9k__ram_free=
_p9k__async_segments_compute+='_p9k_worker_invoke ram _p9k_prompt_ram_compute'
}
_p9k_prompt_ram_compute() {
_p9k_worker_async _p9k_prompt_ram_async _p9k_prompt_ram_sync
}
_p9k_prompt_ram_async() {
local -F free_bytes
case $_p9k_os in
OSX)
(( $+commands[vm_stat] )) || return
local stat && stat="$(vm_stat 2>/dev/null)" || return
[[ $stat =~ 'Pages free:[[:space:]]+([0-9]+)' ]] || return
(( free_bytes += match[1] ))
[[ $stat =~ 'Pages inactive:[[:space:]]+([0-9]+)' ]] || return
(( free_bytes += match[1] ))
(( free_bytes *= 4096 ))
;;
BSD)
local stat && stat="$(grep -F 'avail memory' /var/run/dmesg.boot 2>/dev/null)" || return
free_bytes=${${(A)=stat}[4]}
;;
*)
[[ -r /proc/meminfo ]] || return
local stat && stat="$(</proc/meminfo)" || return
[[ $stat == (#b)*(MemAvailable:|MemFree:)[[:space:]]#(<->)* ]] || return
free_bytes=$(( $match[2] * 1024 ))
;;
esac
_p9k_human_readable_bytes $free_bytes
[[ $_p9k__ret != $_p9k__ram_free ]] || return
_p9k__ram_free=$_p9k__ret
_p9k_print_params _p9k__ram_free
echo -E - 'reset=1'
}
_p9k_prompt_ram_sync() {
eval $REPLY
_p9k_worker_reply $REPLY
}
function _p9k_rbenv_global_version() {
_p9k_read_word ${RBENV_ROOT:-$HOME/.rbenv}/version || _p9k__ret=system
}
################################################################
# Segment to display rbenv information
# https://github.com/rbenv/rbenv#choosing-the-ruby-version
prompt_rbenv() {
if [[ -n $RBENV_VERSION ]]; then
(( ${_POWERLEVEL9K_RBENV_SOURCES[(I)shell]} )) || return
local v=$RBENV_VERSION
else
(( ${_POWERLEVEL9K_RBENV_SOURCES[(I)local|global]} )) || return
_p9k__ret=
if [[ $RBENV_DIR != (|.) ]]; then
[[ $RBENV_DIR == /* ]] && local dir=$RBENV_DIR || local dir="$_p9k__cwd_a/$RBENV_DIR"
dir=${dir:A}
if [[ $dir != $_p9k__cwd_a ]]; then
while true; do
if _p9k_read_word $dir/.ruby-version; then
(( ${_POWERLEVEL9K_RBENV_SOURCES[(I)local]} )) || return
break
fi
[[ $dir == (/|.) ]] && break
dir=${dir:h}
done
fi
fi
if [[ -z $_p9k__ret ]]; then
_p9k_upglob .ruby-version
local -i idx=$?
if (( idx )) && _p9k_read_word $_p9k__parent_dirs[idx]/.ruby-version; then
(( ${_POWERLEVEL9K_RBENV_SOURCES[(I)local]} )) || return
else
_p9k__ret=
fi
fi
if [[ -z $_p9k__ret ]]; then
(( _POWERLEVEL9K_RBENV_PROMPT_ALWAYS_SHOW )) || return
(( ${_POWERLEVEL9K_RBENV_SOURCES[(I)global]} )) || return
_p9k_rbenv_global_version
fi
local v=$_p9k__ret
fi
if (( !_POWERLEVEL9K_RBENV_PROMPT_ALWAYS_SHOW )); then
_p9k_rbenv_global_version
[[ $v == $_p9k__ret ]] && return
fi
if (( !_POWERLEVEL9K_RBENV_SHOW_SYSTEM )); then
[[ $v == system ]] && return
fi
_p9k_prompt_segment "$0" "red" "$_p9k_color1" 'RUBY_ICON' 0 '' "${v//\%/%%}"
}
_p9k_prompt_rbenv_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='${commands[rbenv]:-${${+functions[rbenv]}:#0}}'
}
function _p9k_phpenv_global_version() {
_p9k_read_word ${PHPENV_ROOT:-$HOME/.phpenv}/version || _p9k__ret=system
}
function _p9k_scalaenv_global_version() {
_p9k_read_word ${SCALAENV_ROOT:-$HOME/.scalaenv}/version || _p9k__ret=system
}
# https://github.com/scalaenv/scalaenv
prompt_scalaenv() {
if [[ -n $SCALAENV_VERSION ]]; then
(( ${_POWERLEVEL9K_SCALAENV_SOURCES[(I)shell]} )) || return
local v=$SCALAENV_VERSION
else
(( ${_POWERLEVEL9K_SCALAENV_SOURCES[(I)local|global]} )) || return
_p9k__ret=
if [[ $SCALAENV_DIR != (|.) ]]; then
[[ $SCALAENV_DIR == /* ]] && local dir=$SCALAENV_DIR || local dir="$_p9k__cwd_a/$SCALAENV_DIR"
dir=${dir:A}
if [[ $dir != $_p9k__cwd_a ]]; then
while true; do
if _p9k_read_word $dir/.scala-version; then
(( ${_POWERLEVEL9K_SCALAENV_SOURCES[(I)local]} )) || return
break
fi
[[ $dir == (/|.) ]] && break
dir=${dir:h}
done
fi
fi
if [[ -z $_p9k__ret ]]; then
_p9k_upglob .scala-version
local -i idx=$?
if (( idx )) && _p9k_read_word $_p9k__parent_dirs[idx]/.scala-version; then
(( ${_POWERLEVEL9K_SCALAENV_SOURCES[(I)local]} )) || return
else
_p9k__ret=
fi
fi
if [[ -z $_p9k__ret ]]; then
(( _POWERLEVEL9K_SCALAENV_PROMPT_ALWAYS_SHOW )) || return
(( ${_POWERLEVEL9K_SCALAENV_SOURCES[(I)global]} )) || return
_p9k_scalaenv_global_version
fi
local v=$_p9k__ret
fi
if (( !_POWERLEVEL9K_SCALAENV_PROMPT_ALWAYS_SHOW )); then
_p9k_scalaenv_global_version
[[ $v == $_p9k__ret ]] && return
fi
if (( !_POWERLEVEL9K_SCALAENV_SHOW_SYSTEM )); then
[[ $v == system ]] && return
fi
_p9k_prompt_segment "$0" "red" "$_p9k_color1" 'SCALA_ICON' 0 '' "${v//\%/%%}"
}
_p9k_prompt_scalaenv_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='${commands[scalaenv]:-${${+functions[scalaenv]}:#0}}'
}
function _p9k_phpenv_global_version() {
_p9k_read_word ${PHPENV_ROOT:-$HOME/.phpenv}/version || _p9k__ret=system
}
prompt_phpenv() {
if [[ -n $PHPENV_VERSION ]]; then
(( ${_POWERLEVEL9K_PHPENV_SOURCES[(I)shell]} )) || return
local v=$PHPENV_VERSION
else
(( ${_POWERLEVEL9K_PHPENV_SOURCES[(I)local|global]} )) || return
_p9k__ret=
if [[ $PHPENV_DIR != (|.) ]]; then
[[ $PHPENV_DIR == /* ]] && local dir=$PHPENV_DIR || local dir="$_p9k__cwd_a/$PHPENV_DIR"
dir=${dir:A}
if [[ $dir != $_p9k__cwd_a ]]; then
while true; do
if _p9k_read_word $dir/.php-version; then
(( ${_POWERLEVEL9K_PHPENV_SOURCES[(I)local]} )) || return
break
fi
[[ $dir == (/|.) ]] && break
dir=${dir:h}
done
fi
fi
if [[ -z $_p9k__ret ]]; then
_p9k_upglob .php-version
local -i idx=$?
if (( idx )) && _p9k_read_word $_p9k__parent_dirs[idx]/.php-version; then
(( ${_POWERLEVEL9K_PHPENV_SOURCES[(I)local]} )) || return
else
_p9k__ret=
fi
fi
if [[ -z $_p9k__ret ]]; then
(( _POWERLEVEL9K_PHPENV_PROMPT_ALWAYS_SHOW )) || return
(( ${_POWERLEVEL9K_PHPENV_SOURCES[(I)global]} )) || return
_p9k_phpenv_global_version
fi
local v=$_p9k__ret
fi
if (( !_POWERLEVEL9K_PHPENV_PROMPT_ALWAYS_SHOW )); then
_p9k_phpenv_global_version
[[ $v == $_p9k__ret ]] && return
fi
if (( !_POWERLEVEL9K_PHPENV_SHOW_SYSTEM )); then
[[ $v == system ]] && return
fi
_p9k_prompt_segment "$0" "magenta" "$_p9k_color1" 'PHP_ICON' 0 '' "${v//\%/%%}"
}
_p9k_prompt_phpenv_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='${commands[phpenv]:-${${+functions[phpenv]}:#0}}'
}
function _p9k_luaenv_global_version() {
_p9k_read_word ${LUAENV_ROOT:-$HOME/.luaenv}/version || _p9k__ret=system
}
################################################################
# Segment to display luaenv information
# https://github.com/cehoffman/luaenv
prompt_luaenv() {
if [[ -n $LUAENV_VERSION ]]; then
(( ${_POWERLEVEL9K_LUAENV_SOURCES[(I)shell]} )) || return
local v=$LUAENV_VERSION
else
(( ${_POWERLEVEL9K_LUAENV_SOURCES[(I)local|global]} )) || return
_p9k__ret=
if [[ $LUAENV_DIR != (|.) ]]; then
[[ $LUAENV_DIR == /* ]] && local dir=$LUAENV_DIR || local dir="$_p9k__cwd_a/$LUAENV_DIR"
dir=${dir:A}
if [[ $dir != $_p9k__cwd_a ]]; then
while true; do
if _p9k_read_word $dir/.lua-version; then
(( ${_POWERLEVEL9K_LUAENV_SOURCES[(I)local]} )) || return
break
fi
[[ $dir == (/|.) ]] && break
dir=${dir:h}
done
fi
fi
if [[ -z $_p9k__ret ]]; then
_p9k_upglob .lua-version
local -i idx=$?
if (( idx )) && _p9k_read_word $_p9k__parent_dirs[idx]/.lua-version; then
(( ${_POWERLEVEL9K_LUAENV_SOURCES[(I)local]} )) || return
else
_p9k__ret=
fi
fi
if [[ -z $_p9k__ret ]]; then
(( _POWERLEVEL9K_LUAENV_PROMPT_ALWAYS_SHOW )) || return
(( ${_POWERLEVEL9K_LUAENV_SOURCES[(I)global]} )) || return
_p9k_luaenv_global_version
fi
local v=$_p9k__ret
fi
if (( !_POWERLEVEL9K_LUAENV_PROMPT_ALWAYS_SHOW )); then
_p9k_luaenv_global_version
[[ $v == $_p9k__ret ]] && return
fi
if (( !_POWERLEVEL9K_LUAENV_SHOW_SYSTEM )); then
[[ $v == system ]] && return
fi
_p9k_prompt_segment "$0" blue "$_p9k_color1" 'LUA_ICON' 0 '' "${v//\%/%%}"
}
_p9k_prompt_luaenv_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='${commands[luaenv]:-${${+functions[luaenv]}:#0}}'
}
function _p9k_jenv_global_version() {
_p9k_read_word ${JENV_ROOT:-$HOME/.jenv}/version || _p9k__ret=system
}
################################################################
# Segment to display jenv information
# https://github.com/jenv/jenv
prompt_jenv() {
if [[ -n $JENV_VERSION ]]; then
(( ${_POWERLEVEL9K_JENV_SOURCES[(I)shell]} )) || return
local v=$JENV_VERSION
else
(( ${_POWERLEVEL9K_JENV_SOURCES[(I)local|global]} )) || return
_p9k__ret=
if [[ $JENV_DIR != (|.) ]]; then
[[ $JENV_DIR == /* ]] && local dir=$JENV_DIR || local dir="$_p9k__cwd_a/$JENV_DIR"
dir=${dir:A}
if [[ $dir != $_p9k__cwd_a ]]; then
while true; do
if _p9k_read_word $dir/.java-version; then
(( ${_POWERLEVEL9K_JENV_SOURCES[(I)local]} )) || return
break
fi
[[ $dir == (/|.) ]] && break
dir=${dir:h}
done
fi
fi
if [[ -z $_p9k__ret ]]; then
_p9k_upglob .java-version
local -i idx=$?
if (( idx )) && _p9k_read_word $_p9k__parent_dirs[idx]/.java-version; then
(( ${_POWERLEVEL9K_JENV_SOURCES[(I)local]} )) || return
else
_p9k__ret=
fi
fi
if [[ -z $_p9k__ret ]]; then
(( _POWERLEVEL9K_JENV_PROMPT_ALWAYS_SHOW )) || return
(( ${_POWERLEVEL9K_JENV_SOURCES[(I)global]} )) || return
_p9k_jenv_global_version
fi
local v=$_p9k__ret
fi
if (( !_POWERLEVEL9K_JENV_PROMPT_ALWAYS_SHOW )); then
_p9k_jenv_global_version
[[ $v == $_p9k__ret ]] && return
fi
if (( !_POWERLEVEL9K_JENV_SHOW_SYSTEM )); then
[[ $v == system ]] && return
fi
_p9k_prompt_segment "$0" white red 'JAVA_ICON' 0 '' "${v//\%/%%}"
}
_p9k_prompt_jenv_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='${commands[jenv]:-${${+functions[jenv]}:#0}}'
}
function _p9k_plenv_global_version() {
_p9k_read_word ${PLENV_ROOT:-$HOME/.plenv}/version || _p9k__ret=system
}
################################################################
# Segment to display plenv information
# https://github.com/plenv/plenv#choosing-the-perl-version
prompt_plenv() {
if [[ -n $PLENV_VERSION ]]; then
(( ${_POWERLEVEL9K_PLENV_SOURCES[(I)shell]} )) || return
local v=$PLENV_VERSION
else
(( ${_POWERLEVEL9K_PLENV_SOURCES[(I)local|global]} )) || return
_p9k__ret=
if [[ $PLENV_DIR != (|.) ]]; then
[[ $PLENV_DIR == /* ]] && local dir=$PLENV_DIR || local dir="$_p9k__cwd_a/$PLENV_DIR"
dir=${dir:A}
if [[ $dir != $_p9k__cwd_a ]]; then
while true; do
if _p9k_read_word $dir/.perl-version; then
(( ${_POWERLEVEL9K_PLENV_SOURCES[(I)local]} )) || return
break
fi
[[ $dir == (/|.) ]] && break
dir=${dir:h}
done
fi
fi
if [[ -z $_p9k__ret ]]; then
_p9k_upglob .perl-version
local -i idx=$?
if (( idx )) && _p9k_read_word $_p9k__parent_dirs[idx]/.perl-version; then
(( ${_POWERLEVEL9K_PLENV_SOURCES[(I)local]} )) || return
else
_p9k__ret=
fi
fi
if [[ -z $_p9k__ret ]]; then
(( _POWERLEVEL9K_PLENV_PROMPT_ALWAYS_SHOW )) || return
(( ${_POWERLEVEL9K_PLENV_SOURCES[(I)global]} )) || return
_p9k_plenv_global_version
fi
local v=$_p9k__ret
fi
if (( !_POWERLEVEL9K_PLENV_PROMPT_ALWAYS_SHOW )); then
_p9k_plenv_global_version
[[ $v == $_p9k__ret ]] && return
fi
if (( !_POWERLEVEL9K_PLENV_SHOW_SYSTEM )); then
[[ $v == system ]] && return
fi
_p9k_prompt_segment "$0" "blue" "$_p9k_color1" 'PERL_ICON' 0 '' "${v//\%/%%}"
}
_p9k_prompt_plenv_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='${commands[plenv]:-${${+functions[plenv]}:#0}}'
}
################################################################
# Segment to display chruby information
# see https://github.com/postmodern/chruby/issues/245 for chruby_auto issue with ZSH
prompt_chruby() {
local v
(( _POWERLEVEL9K_CHRUBY_SHOW_ENGINE )) && v=$RUBY_ENGINE
if [[ $_POWERLEVEL9K_CHRUBY_SHOW_VERSION == 1 && -n $RUBY_VERSION ]] && v+=${v:+ }$RUBY_VERSION
_p9k_prompt_segment "$0" "red" "$_p9k_color1" 'RUBY_ICON' 0 '' "${v//\%/%%}"
}
_p9k_prompt_chruby_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='$RUBY_ENGINE'
}
################################################################
# Segment to print an icon if user is root.
prompt_root_indicator() {
local -i len=$#_p9k__prompt _p9k__has_upglob
_p9k_prompt_segment "$0" "$_p9k_color1" "yellow" 'ROOT_ICON' 0 '${${(%):-%#}:#\%}' ''
(( _p9k__has_upglob )) || typeset -g "_p9k__segment_val_${_p9k__prompt_side}[_p9k__segment_index]"=$_p9k__prompt[len+1,-1]
}
instant_prompt_root_indicator() { prompt_root_indicator; }
################################################################
# Segment to display Rust version number
prompt_rust_version() {
unset P9K_RUST_VERSION
if (( _POWERLEVEL9K_RUST_VERSION_PROJECT_ONLY )); then
_p9k_upglob Cargo.toml && return
fi
local rustc=$commands[rustc] toolchain deps=()
if (( $+commands[ldd] )); then
if ! _p9k_cache_stat_get $0_so $rustc; then
local line so
for line in "${(@f)$(ldd $rustc 2>/dev/null)}"; do
[[ $line == (#b)[[:space:]]#librustc_driver[^[:space:]]#.so' => '(*)' (0x'[[:xdigit:]]#')' ]] || continue
so=$match[1]
break
done
_p9k_cache_stat_set "$so"
fi
deps+=$_p9k__cache_val[1]
fi
if (( $+commands[rustup] )); then
local rustup=$commands[rustup]
local rustup_home=${RUSTUP_HOME:-~/.rustup}
local cfg=($rustup_home/settings.toml(.N))
deps+=($cfg $rustup_home/update-hashes/*(.N))
if [[ -z ${toolchain::=$RUSTUP_TOOLCHAIN} ]]; then
if ! _p9k_cache_stat_get $0_overrides $rustup $cfg; then
local lines=(${(f)"$(rustup override list 2>/dev/null)"})
if [[ $lines[1] == "no overrides" ]]; then
_p9k_cache_stat_set
else
local MATCH
local keys=(${(@)${lines%%[[:space:]]#[^[:space:]]#}/(#m)*/${(b)MATCH}/})
local vals=(${(@)lines/(#m)*/$MATCH[(I)/] ${MATCH##*[[:space:]]}})
_p9k_cache_stat_set ${keys:^vals}
fi
fi
local -A overrides=($_p9k__cache_val)
_p9k_upglob rust-toolchain
local dir=$_p9k__parent_dirs[$?]
local -i n m=${dir[(I)/]}
local pair
for pair in ${overrides[(K)$_p9k__cwd/]}; do
n=${pair%% *}
(( n <= m )) && continue
m=n
toolchain=${pair#* }
done
if [[ -z $toolchain && -n $dir ]]; then
_p9k_read_word $dir/rust-toolchain
toolchain=$_p9k__ret
fi
fi
fi
if ! _p9k_cache_stat_get $0_v$toolchain $rustc $deps; then
_p9k_cache_stat_set "$($rustc --version 2>/dev/null)"
fi
local v=${${_p9k__cache_val[1]#rustc }%% *}
[[ -n $v ]] || return
typeset -g P9K_RUST_VERSION=$_p9k__cache_val[1]
_p9k_prompt_segment "$0" "darkorange" "$_p9k_color1" 'RUST_ICON' 0 '' "${v//\%/%%}"
}
_p9k_prompt_rust_version_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='$commands[rustc]'
}
# RSpec test ratio
prompt_rspec_stats() {
if [[ -d app && -d spec ]]; then
local -a code=(app/**/*.rb(N))
(( $#code )) || return
local tests=(spec/**/*.rb(N))
_p9k_build_test_stats "$0" "$#code" "$#tests" "RSpec" 'TEST_ICON'
fi
}
################################################################
# Segment to display Ruby Version Manager information
prompt_rvm() {
[[ $GEM_HOME == *rvm* && $ruby_string != $rvm_path/bin/ruby ]] || return
local v=${GEM_HOME:t}
(( _POWERLEVEL9K_RVM_SHOW_GEMSET )) || v=${v%%${rvm_gemset_separator:-@}*}
(( _POWERLEVEL9K_RVM_SHOW_PREFIX )) || v=${v#*-}
[[ -n $v ]] || return
_p9k_prompt_segment "$0" "240" "$_p9k_color1" 'RUBY_ICON' 0 '' "${v//\%/%%}"
}
_p9k_prompt_rvm_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='${commands[rvm-prompt]:-${${+functions[rvm-prompt]}:#0}}'
}
################################################################
# Segment to display SSH icon when connected
prompt_ssh() {
local -i len=$#_p9k__prompt _p9k__has_upglob
_p9k_prompt_segment "$0" "$_p9k_color1" "yellow" 'SSH_ICON' 0 '' ''
(( _p9k__has_upglob )) || typeset -g "_p9k__segment_val_${_p9k__prompt_side}[_p9k__segment_index]"=$_p9k__prompt[len+1,-1]
}
_p9k_prompt_ssh_init() {
if (( ! P9K_SSH )); then
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='${:-}'
fi
}
instant_prompt_ssh() {
if (( ! P9K_SSH )); then
return
fi
prompt_ssh
}
################################################################
# Status: When an error occur, return the error code, or a cross icon if option is set
# Display an ok icon when no error occur, or hide the segment if option is set to false
prompt_status() {
if ! _p9k_cache_get $0 $_p9k__status $_p9k__pipestatus; then
(( _p9k__status )) && local state=ERROR || local state=OK
if (( _POWERLEVEL9K_STATUS_EXTENDED_STATES )); then
if (( _p9k__status )); then
if (( $#_p9k__pipestatus > 1 )); then
state+=_PIPE
elif (( _p9k__status > 128 )); then
state+=_SIGNAL
fi
elif [[ "$_p9k__pipestatus" == *[1-9]* ]]; then
state+=_PIPE
fi
fi
_p9k__cache_val=(:)
if (( _POWERLEVEL9K_STATUS_$state )); then
if (( _POWERLEVEL9K_STATUS_SHOW_PIPESTATUS )); then
local text=${(j:|:)${(@)_p9k__pipestatus:/(#b)(*)/$_p9k_exitcode2str[$match[1]+1]}}
else
local text=$_p9k_exitcode2str[_p9k__status+1]
fi
if (( _p9k__status )); then
if (( !_POWERLEVEL9K_STATUS_CROSS && _POWERLEVEL9K_STATUS_VERBOSE )); then
_p9k__cache_val=($0_$state red yellow1 CARRIAGE_RETURN_ICON 0 '' "$text")
else
_p9k__cache_val=($0_$state $_p9k_color1 red FAIL_ICON 0 '' '')
fi
elif (( _POWERLEVEL9K_STATUS_VERBOSE || _POWERLEVEL9K_STATUS_OK_IN_NON_VERBOSE )); then
[[ $state == OK ]] && text=''
_p9k__cache_val=($0_$state "$_p9k_color1" green OK_ICON 0 '' "$text")
fi
fi
if (( $#_p9k__pipestatus < 3 )); then
_p9k_cache_set "${(@)_p9k__cache_val}"
fi
fi
_p9k_prompt_segment "${(@)_p9k__cache_val}"
}
instant_prompt_status() {
if (( _POWERLEVEL9K_STATUS_OK )); then
_p9k_prompt_segment prompt_status_OK "$_p9k_color1" green OK_ICON 0 '' ''
fi
}
prompt_prompt_char() {
local saved=$_p9k__prompt_char_saved[$_p9k__prompt_side$_p9k__segment_index$((!_p9k__status))]
if [[ -n $saved ]]; then
_p9k__prompt+=$saved
return
fi
local -i len=$#_p9k__prompt _p9k__has_upglob
if (( __p9k_sh_glob )); then
if (( _p9k__status )); then
if (( _POWERLEVEL9K_PROMPT_CHAR_OVERWRITE_STATE )); then
_p9k_prompt_segment $0_ERROR_VIINS "$_p9k_color1" 196 '' 0 '${${${${${${:-$_p9k__keymap.$_p9k__zle_state}:#vicmd.*}:#vivis.*}:#vivli.*}:#*.*overwrite*}}' 'โฏ'
_p9k_prompt_segment $0_ERROR_VIOWR "$_p9k_color1" 196 '' 0 '${${${${${${:-$_p9k__keymap.$_p9k__zle_state}:#vicmd.*}:#vivis.*}:#vivli.*}:#*.*insert*}}' 'โถ'
else
_p9k_prompt_segment $0_ERROR_VIINS "$_p9k_color1" 196 '' 0 '${${${${_p9k__keymap:#vicmd}:#vivis}:#vivli}}' 'โฏ'
fi
_p9k_prompt_segment $0_ERROR_VICMD "$_p9k_color1" 196 '' 0 '${(M)${:-$_p9k__keymap$_p9k__region_active}:#vicmd0}' 'โฎ'
_p9k_prompt_segment $0_ERROR_VIVIS "$_p9k_color1" 196 '' 0 '${$((! ${#${${${${:-$_p9k__keymap$_p9k__region_active}:#vicmd1}:#vivis?}:#vivli?}})):#0}' 'โ
ค'
else
if (( _POWERLEVEL9K_PROMPT_CHAR_OVERWRITE_STATE )); then
_p9k_prompt_segment $0_OK_VIINS "$_p9k_color1" 76 '' 0 '${${${${${${:-$_p9k__keymap.$_p9k__zle_state}:#vicmd.*}:#vivis.*}:#vivli.*}:#*.*overwrite*}}' 'โฏ'
_p9k_prompt_segment $0_OK_VIOWR "$_p9k_color1" 76 '' 0 '${${${${${${:-$_p9k__keymap.$_p9k__zle_state}:#vicmd.*}:#vivis.*}:#vivli.*}:#*.*insert*}}' 'โถ'
else
_p9k_prompt_segment $0_OK_VIINS "$_p9k_color1" 76 '' 0 '${${${${_p9k__keymap:#vicmd}:#vivis}:#vivli}}' 'โฏ'
fi
_p9k_prompt_segment $0_OK_VICMD "$_p9k_color1" 76 '' 0 '${(M)${:-$_p9k__keymap$_p9k__region_active}:#vicmd0}' 'โฎ'
_p9k_prompt_segment $0_OK_VIVIS "$_p9k_color1" 76 '' 0 '${$((! ${#${${${${:-$_p9k__keymap$_p9k__region_active}:#vicmd1}:#vivis?}:#vivli?}})):#0}' 'โ
ค'
fi
else
if (( _p9k__status )); then
if (( _POWERLEVEL9K_PROMPT_CHAR_OVERWRITE_STATE )); then
_p9k_prompt_segment $0_ERROR_VIINS "$_p9k_color1" 196 '' 0 '${${:-$_p9k__keymap.$_p9k__zle_state}:#(vicmd.*|vivis.*|vivli.*|*.*overwrite*)}' 'โฏ'
_p9k_prompt_segment $0_ERROR_VIOWR "$_p9k_color1" 196 '' 0 '${${:-$_p9k__keymap.$_p9k__zle_state}:#(vicmd.*|vivis.*|vivli.*|*.*insert*)}' 'โถ'
else
_p9k_prompt_segment $0_ERROR_VIINS "$_p9k_color1" 196 '' 0 '${_p9k__keymap:#(vicmd|vivis|vivli)}' 'โฏ'
fi
_p9k_prompt_segment $0_ERROR_VICMD "$_p9k_color1" 196 '' 0 '${(M)${:-$_p9k__keymap$_p9k__region_active}:#vicmd0}' 'โฎ'
_p9k_prompt_segment $0_ERROR_VIVIS "$_p9k_color1" 196 '' 0 '${(M)${:-$_p9k__keymap$_p9k__region_active}:#(vicmd1|vivis?|vivli?)}' 'โ
ค'
else
if (( _POWERLEVEL9K_PROMPT_CHAR_OVERWRITE_STATE )); then
_p9k_prompt_segment $0_OK_VIINS "$_p9k_color1" 76 '' 0 '${${:-$_p9k__keymap.$_p9k__zle_state}:#(vicmd.*|vivis.*|vivli.*|*.*overwrite*)}' 'โฏ'
_p9k_prompt_segment $0_OK_VIOWR "$_p9k_color1" 76 '' 0 '${${:-$_p9k__keymap.$_p9k__zle_state}:#(vicmd.*|vivis.*|vivli.*|*.*insert*)}' 'โถ'
else
_p9k_prompt_segment $0_OK_VIINS "$_p9k_color1" 76 '' 0 '${_p9k__keymap:#(vicmd|vivis|vivli)}' 'โฏ'
fi
_p9k_prompt_segment $0_OK_VICMD "$_p9k_color1" 76 '' 0 '${(M)${:-$_p9k__keymap$_p9k__region_active}:#vicmd0}' 'โฎ'
_p9k_prompt_segment $0_OK_VIVIS "$_p9k_color1" 76 '' 0 '${(M)${:-$_p9k__keymap$_p9k__region_active}:#(vicmd1|vivis?|vivli?)}' 'โ
ค'
fi
fi
(( _p9k__has_upglob )) || _p9k__prompt_char_saved[$_p9k__prompt_side$_p9k__segment_index$((!_p9k__status))]=$_p9k__prompt[len+1,-1]
}
instant_prompt_prompt_char() {
_p9k_prompt_segment prompt_prompt_char_OK_VIINS "$_p9k_color1" 76 '' 0 '' 'โฏ'
}
################################################################
# Segment to display Swap information
prompt_swap() {
local -i len=$#_p9k__prompt _p9k__has_upglob
_p9k_prompt_segment $0 yellow "$_p9k_color1" SWAP_ICON 1 '$_p9k__swap_used' '$_p9k__swap_used'
(( _p9k__has_upglob )) || typeset -g "_p9k__segment_val_${_p9k__prompt_side}[_p9k__segment_index]"=$_p9k__prompt[len+1,-1]
}
function _p9k_prompt_swap_init() {
if [[ $_p9k_os == OSX && $+commands[sysctl] == 0 || $_p9k_os != OSX && ! -r /proc/meminfo ]]; then
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='${:-}'
return
fi
typeset -g _p9k__swap_used=
_p9k__async_segments_compute+='_p9k_worker_invoke swap _p9k_prompt_swap_compute'
}
_p9k_prompt_swap_compute() {
_p9k_worker_async _p9k_prompt_swap_async _p9k_prompt_swap_sync
}
_p9k_prompt_swap_async() {
local -F used_bytes
if [[ "$_p9k_os" == "OSX" ]]; then
(( $+commands[sysctl] )) || return
[[ "$(sysctl vm.swapusage 2>/dev/null)" =~ "used = ([0-9,.]+)([A-Z]+)" ]] || return
used_bytes=${match[1]//,/.}
case ${match[2]} in
'K') (( used_bytes *= 1024 ));;
'M') (( used_bytes *= 1048576 ));;
'G') (( used_bytes *= 1073741824 ));;
'T') (( used_bytes *= 1099511627776 ));;
*) return 0;;
esac
else
local meminfo && meminfo="$(grep -F 'Swap' /proc/meminfo 2>/dev/null)" || return
[[ $meminfo =~ 'SwapTotal:[[:space:]]+([0-9]+)' ]] || return
(( used_bytes+=match[1] ))
[[ $meminfo =~ 'SwapFree:[[:space:]]+([0-9]+)' ]] || return
(( used_bytes-=match[1] ))
(( used_bytes *= 1024 ))
fi
_p9k_human_readable_bytes $used_bytes
[[ $_p9k__ret != $_p9k__swap_used ]] || return
_p9k__swap_used=$_p9k__ret
_p9k_print_params _p9k__swap_used
echo -E - 'reset=1'
}
_p9k_prompt_swap_sync() {
eval $REPLY
_p9k_worker_reply $REPLY
}
################################################################
# Symfony2-PHPUnit test ratio
prompt_symfony2_tests() {
if [[ -d src && -d app && -f app/AppKernel.php ]]; then
local -a all=(src/**/*.php(N))
local -a code=(${(@)all##*Tests*})
(( $#code )) || return
_p9k_build_test_stats "$0" "$#code" "$(($#all - $#code))" "SF2" 'TEST_ICON'
fi
}
################################################################
# Segment to display Symfony2-Version
prompt_symfony2_version() {
if [[ -r app/bootstrap.php.cache ]]; then
local v="${$(grep -F " VERSION " app/bootstrap.php.cache 2>/dev/null)//[![:digit:].]}"
_p9k_prompt_segment "$0" "grey35" "$_p9k_color1" 'SYMFONY_ICON' 0 '' "${v//\%/%%}"
fi
}
################################################################
# Show a ratio of tests vs code
_p9k_build_test_stats() {
local code_amount="$2"
local tests_amount="$3"
local headline="$4"
(( code_amount > 0 )) || return
local -F 2 ratio=$(( 100. * tests_amount / code_amount ))
(( ratio >= 75 )) && _p9k_prompt_segment "${1}_GOOD" "cyan" "$_p9k_color1" "$5" 0 '' "$headline: $ratio%%"
(( ratio >= 50 && ratio < 75 )) && _p9k_prompt_segment "$1_AVG" "yellow" "$_p9k_color1" "$5" 0 '' "$headline: $ratio%%"
(( ratio < 50 )) && _p9k_prompt_segment "$1_BAD" "red" "$_p9k_color1" "$5" 0 '' "$headline: $ratio%%"
}
################################################################
# System time
prompt_time() {
if (( _POWERLEVEL9K_EXPERIMENTAL_TIME_REALTIME )); then
_p9k_prompt_segment "$0" "$_p9k_color2" "$_p9k_color1" "TIME_ICON" 0 '' "$_POWERLEVEL9K_TIME_FORMAT"
else
if [[ $_p9k__refresh_reason == precmd ]]; then
if [[ $+__p9k_instant_prompt_active == 1 && $__p9k_instant_prompt_time_format == $_POWERLEVEL9K_TIME_FORMAT ]]; then
_p9k__time=${__p9k_instant_prompt_time//\%/%%}
else
_p9k__time=${${(%)_POWERLEVEL9K_TIME_FORMAT}//\%/%%}
fi
fi
if (( _POWERLEVEL9K_TIME_UPDATE_ON_COMMAND )); then
_p9k_escape $_p9k__time
local t=$_p9k__ret
_p9k_escape $_POWERLEVEL9K_TIME_FORMAT
_p9k_prompt_segment "$0" "$_p9k_color2" "$_p9k_color1" "TIME_ICON" 1 '' \
"\${_p9k__line_finished-$t}\${_p9k__line_finished+$_p9k__ret}"
else
_p9k_prompt_segment "$0" "$_p9k_color2" "$_p9k_color1" "TIME_ICON" 0 '' $_p9k__time
fi
fi
}
instant_prompt_time() {
_p9k_escape $_POWERLEVEL9K_TIME_FORMAT
local stash='${${__p9k_instant_prompt_time::=${(%)${__p9k_instant_prompt_time_format::='$_p9k__ret'}}}+}'
_p9k_escape $_POWERLEVEL9K_TIME_FORMAT
_p9k_prompt_segment prompt_time "$_p9k_color2" "$_p9k_color1" "TIME_ICON" 1 '' $stash$_p9k__ret
}
_p9k_prompt_time_init() {
(( _POWERLEVEL9K_EXPERIMENTAL_TIME_REALTIME )) || return
_p9k__async_segments_compute+='_p9k_worker_invoke time _p9k_prompt_time_compute'
}
_p9k_prompt_time_compute() {
_p9k_worker_async _p9k_prompt_time_async _p9k_prompt_time_sync
}
_p9k_prompt_time_async() {
sleep 1 || true
}
_p9k_prompt_time_sync() {
_p9k_worker_reply '_p9k_worker_invoke _p9k_prompt_time_compute _p9k_prompt_time_compute; reset=1'
}
################################################################
# System date
prompt_date() {
if [[ $_p9k__refresh_reason == precmd ]]; then
if [[ $+__p9k_instant_prompt_active == 1 && $__p9k_instant_prompt_date_format == $_POWERLEVEL9K_DATE_FORMAT ]]; then
_p9k__date=${__p9k_instant_prompt_date//\%/%%}
else
_p9k__date=${${(%)_POWERLEVEL9K_DATE_FORMAT}//\%/%%}
fi
fi
_p9k_prompt_segment "$0" "$_p9k_color2" "$_p9k_color1" "DATE_ICON" 0 '' "$_p9k__date"
}
instant_prompt_date() {
_p9k_escape $_POWERLEVEL9K_DATE_FORMAT
local stash='${${__p9k_instant_prompt_date::=${(%)${__p9k_instant_prompt_date_format::='$_p9k__ret'}}}+}'
_p9k_escape $_POWERLEVEL9K_DATE_FORMAT
_p9k_prompt_segment prompt_date "$_p9k_color2" "$_p9k_color1" "DATE_ICON" 1 '' $stash$_p9k__ret
}
################################################################
# todo.sh: shows the number of tasks in your todo.sh file
prompt_todo() {
unset P9K_TODO_TOTAL_TASK_COUNT P9K_TODO_FILTERED_TASK_COUNT
[[ -r $_p9k__todo_file && -x $_p9k__todo_command ]] || return
if ! _p9k_cache_stat_get $0 $_p9k__todo_file; then
local count="$($_p9k__todo_command -p ls | command tail -1)"
if [[ $count == (#b)'TODO: '([[:digit:]]##)' of '([[:digit:]]##)' '* ]]; then
_p9k_cache_stat_set 1 $match[1] $match[2]
else
_p9k_cache_stat_set 0
fi
fi
(( $_p9k__cache_val[1] )) || return
typeset -gi P9K_TODO_FILTERED_TASK_COUNT=$_p9k__cache_val[2]
typeset -gi P9K_TODO_TOTAL_TASK_COUNT=$_p9k__cache_val[3]
if (( (P9K_TODO_TOTAL_TASK_COUNT || !_POWERLEVEL9K_TODO_HIDE_ZERO_TOTAL) &&
(P9K_TODO_FILTERED_TASK_COUNT || !_POWERLEVEL9K_TODO_HIDE_ZERO_FILTERED) )); then
if (( P9K_TODO_TOTAL_TASK_COUNT == P9K_TODO_FILTERED_TASK_COUNT )); then
local text=$P9K_TODO_TOTAL_TASK_COUNT
else
local text="$P9K_TODO_FILTERED_TASK_COUNT/$P9K_TODO_TOTAL_TASK_COUNT"
fi
_p9k_prompt_segment "$0" "grey50" "$_p9k_color1" 'TODO_ICON' 0 '' "$text"
fi
}
_p9k_prompt_todo_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='$_p9k__todo_file'
}
################################################################
# VCS segment: shows the state of your repository, if you are in a folder under
# version control
# The vcs segment can have 4 different states - defaults to 'CLEAN'.
typeset -gA __p9k_vcs_states=(
'CLEAN' '2'
'MODIFIED' '3'
'UNTRACKED' '2'
'LOADING' '8'
'CONFLICTED' '3'
)
function +vi-git-untracked() {
[[ -z "${vcs_comm[gitdir]}" || "${vcs_comm[gitdir]}" == "." ]] && return
# get the root for the current repo or submodule
local repoTopLevel="$(git rev-parse --show-toplevel 2> /dev/null)"
# dump out if we're outside a git repository (which includes being in the .git folder)
[[ $? != 0 || -z $repoTopLevel ]] && return
local untrackedFiles="$(git ls-files --others --exclude-standard "${repoTopLevel}" 2> /dev/null)"
if [[ -z $untrackedFiles && $_POWERLEVEL9K_VCS_SHOW_SUBMODULE_DIRTY == 1 ]]; then
untrackedFiles+="$(git submodule foreach --quiet --recursive 'git ls-files --others --exclude-standard' 2> /dev/null)"
fi
[[ -z $untrackedFiles ]] && return
hook_com[unstaged]+=" $(print_icon 'VCS_UNTRACKED_ICON')"
VCS_WORKDIR_HALF_DIRTY=true
}
function +vi-git-aheadbehind() {
local ahead behind
local -a gitstatus
# for git prior to 1.7
# ahead=$(git rev-list origin/${hook_com[branch]}..HEAD | wc -l)
ahead="$(git rev-list --count "${hook_com[branch]}"@{upstream}..HEAD 2>/dev/null)"
(( ahead )) && gitstatus+=( " $(print_icon 'VCS_OUTGOING_CHANGES_ICON')${ahead// /}" )
# for git prior to 1.7
# behind=$(git rev-list HEAD..origin/${hook_com[branch]} | wc -l)
behind="$(git rev-list --count HEAD.."${hook_com[branch]}"@{upstream} 2>/dev/null)"
(( behind )) && gitstatus+=( " $(print_icon 'VCS_INCOMING_CHANGES_ICON')${behind// /}" )
hook_com[misc]+=${(j::)gitstatus}
}
function +vi-git-remotebranch() {
local remote
local branch_name="${hook_com[branch]}"
# Are we on a remote-tracking branch?
remote="$(git rev-parse --verify HEAD@{upstream} --symbolic-full-name 2>/dev/null)"
remote=${remote/refs\/(remotes|heads)\/}
if (( $+_POWERLEVEL9K_VCS_SHORTEN_LENGTH && $+_POWERLEVEL9K_VCS_SHORTEN_MIN_LENGTH )); then
if (( ${#hook_com[branch]} > _POWERLEVEL9K_VCS_SHORTEN_MIN_LENGTH && ${#hook_com[branch]} > _POWERLEVEL9K_VCS_SHORTEN_LENGTH )); then
case $_POWERLEVEL9K_VCS_SHORTEN_STRATEGY in
truncate_middle)
hook_com[branch]="${branch_name:0:$_POWERLEVEL9K_VCS_SHORTEN_LENGTH}${_POWERLEVEL9K_VCS_SHORTEN_DELIMITER}${branch_name: -$_POWERLEVEL9K_VCS_SHORTEN_LENGTH}"
;;
truncate_from_right)
hook_com[branch]="${branch_name:0:$_POWERLEVEL9K_VCS_SHORTEN_LENGTH}${_POWERLEVEL9K_VCS_SHORTEN_DELIMITER}"
;;
esac
fi
fi
if (( _POWERLEVEL9K_HIDE_BRANCH_ICON )); then
hook_com[branch]="${hook_com[branch]}"
else
hook_com[branch]="$(print_icon 'VCS_BRANCH_ICON')${hook_com[branch]}"
fi
# Always show the remote
#if [[ -n ${remote} ]] ; then
# Only show the remote if it differs from the local
if [[ -n ${remote} ]] && [[ "${remote#*/}" != "${branch_name}" ]] ; then
hook_com[branch]+="$(print_icon 'VCS_REMOTE_BRANCH_ICON')${remote// /}"
fi
}
function +vi-git-tagname() {
if (( !_POWERLEVEL9K_VCS_HIDE_TAGS )); then
# If we are on a tag, append the tagname to the current branch string.
local tag
tag="$(git describe --tags --exact-match HEAD 2>/dev/null)"
if [[ -n "${tag}" ]] ; then
# There is a tag that points to our current commit. Need to determine if we
# are also on a branch, or are in a DETACHED_HEAD state.
if [[ -z "$(git symbolic-ref HEAD 2>/dev/null)" ]]; then
# DETACHED_HEAD state. We want to append the tag name to the commit hash
# and print it. Unfortunately, `vcs_info` blows away the hash when a tag
# exists, so we have to manually retrieve it and clobber the branch
# string.
local revision
revision="$(git rev-list -n 1 --abbrev-commit --abbrev=${_POWERLEVEL9K_CHANGESET_HASH_LENGTH} HEAD)"
if (( _POWERLEVEL9K_HIDE_BRANCH_ICON )); then
hook_com[branch]="${revision} $(print_icon 'VCS_TAG_ICON')${tag}"
else
hook_com[branch]="$(print_icon 'VCS_BRANCH_ICON')${revision} $(print_icon 'VCS_TAG_ICON')${tag}"
fi
else
# We are on both a tag and a branch; print both by appending the tag name.
hook_com[branch]+=" $(print_icon 'VCS_TAG_ICON')${tag}"
fi
fi
fi
}
# Show count of stashed changes
# Port from https://github.com/whiteinge/dotfiles/blob/5dfd08d30f7f2749cfc60bc55564c6ea239624d9/.zsh_shouse_prompt#L268
function +vi-git-stash() {
if [[ -s "${vcs_comm[gitdir]}/logs/refs/stash" ]] ; then
local -a stashes=( "${(@f)"$(<${vcs_comm[gitdir]}/logs/refs/stash)"}" )
hook_com[misc]+=" $(print_icon 'VCS_STASH_ICON')${#stashes}"
fi
}
function +vi-hg-bookmarks() {
if [[ -n "${hgbmarks[@]}" ]]; then
hook_com[hg-bookmark-string]=" $(print_icon 'VCS_BOOKMARK_ICON')${hgbmarks[@]}"
# To signal that we want to use the sting we just generated, set the special
# variable `ret' to something other than the default zero:
ret=1
return 0
fi
}
function +vi-vcs-detect-changes() {
if [[ "${hook_com[vcs]}" == "git" ]]; then
local remote="$(git ls-remote --get-url 2> /dev/null)"
if [[ "$remote" =~ "github" ]] then
vcs_visual_identifier='VCS_GIT_GITHUB_ICON'
elif [[ "$remote" =~ "bitbucket" ]] then
vcs_visual_identifier='VCS_GIT_BITBUCKET_ICON'
elif [[ "$remote" =~ "stash" ]] then
vcs_visual_identifier='VCS_GIT_BITBUCKET_ICON'
elif [[ "$remote" =~ "gitlab" ]] then
vcs_visual_identifier='VCS_GIT_GITLAB_ICON'
else
vcs_visual_identifier='VCS_GIT_ICON'
fi
elif [[ "${hook_com[vcs]}" == "hg" ]]; then
vcs_visual_identifier='VCS_HG_ICON'
elif [[ "${hook_com[vcs]}" == "svn" ]]; then
vcs_visual_identifier='VCS_SVN_ICON'
fi
if [[ -n "${hook_com[staged]}" ]] || [[ -n "${hook_com[unstaged]}" ]]; then
VCS_WORKDIR_DIRTY=true
else
VCS_WORKDIR_DIRTY=false
fi
}
function +vi-svn-detect-changes() {
local svn_status="$(svn status)"
if [[ -n "$(echo "$svn_status" | \grep \^\?)" ]]; then
hook_com[unstaged]+=" $(print_icon 'VCS_UNTRACKED_ICON')"
VCS_WORKDIR_HALF_DIRTY=true
fi
if [[ -n "$(echo "$svn_status" | \grep \^\M)" ]]; then
hook_com[unstaged]+=" $(print_icon 'VCS_UNSTAGED_ICON')"
VCS_WORKDIR_DIRTY=true
fi
if [[ -n "$(echo "$svn_status" | \grep \^\A)" ]]; then
hook_com[staged]+=" $(print_icon 'VCS_STAGED_ICON')"
VCS_WORKDIR_DIRTY=true
fi
}
_p9k_vcs_info_init() {
autoload -Uz vcs_info
local prefix=''
if (( _POWERLEVEL9K_SHOW_CHANGESET )); then
_p9k_get_icon '' VCS_COMMIT_ICON
prefix="$_p9k__ret%0.${_POWERLEVEL9K_CHANGESET_HASH_LENGTH}i "
fi
zstyle ':vcs_info:*' check-for-changes true
zstyle ':vcs_info:*' formats "$prefix%b%c%u%m"
zstyle ':vcs_info:*' actionformats "%b %F{$_POWERLEVEL9K_VCS_ACTIONFORMAT_FOREGROUND}| %a%f"
_p9k_get_icon '' VCS_STAGED_ICON
zstyle ':vcs_info:*' stagedstr " $_p9k__ret"
_p9k_get_icon '' VCS_UNSTAGED_ICON
zstyle ':vcs_info:*' unstagedstr " $_p9k__ret"
zstyle ':vcs_info:git*+set-message:*' hooks $_POWERLEVEL9K_VCS_GIT_HOOKS
zstyle ':vcs_info:hg*+set-message:*' hooks $_POWERLEVEL9K_VCS_HG_HOOKS
zstyle ':vcs_info:svn*+set-message:*' hooks $_POWERLEVEL9K_VCS_SVN_HOOKS
# For Hg, only show the branch name
if (( _POWERLEVEL9K_HIDE_BRANCH_ICON )); then
zstyle ':vcs_info:hg*:*' branchformat "%b"
else
_p9k_get_icon '' VCS_BRANCH_ICON
zstyle ':vcs_info:hg*:*' branchformat "$_p9k__ret%b"
fi
# The `get-revision` function must be turned on for dirty-check to work for Hg
zstyle ':vcs_info:hg*:*' get-revision true
zstyle ':vcs_info:hg*:*' get-bookmarks true
zstyle ':vcs_info:hg*+gen-hg-bookmark-string:*' hooks hg-bookmarks
# TODO: fix the %b (branch) format for svn. Using %b breaks color-encoding of the foreground
# for the rest of the powerline.
zstyle ':vcs_info:svn*:*' formats "$prefix%c%u"
zstyle ':vcs_info:svn*:*' actionformats "$prefix%c%u %F{$_POWERLEVEL9K_VCS_ACTIONFORMAT_FOREGROUND}| %a%f"
if (( _POWERLEVEL9K_SHOW_CHANGESET )); then
zstyle ':vcs_info:*' get-revision true
else
zstyle ':vcs_info:*' get-revision false
fi
}
function _p9k_vcs_status_save() {
local z=$'\0'
_p9k__gitstatus_last[${${_p9k__git_dir:+GIT_DIR:$_p9k__git_dir}:-$VCS_STATUS_WORKDIR}]=\
$VCS_STATUS_COMMIT$z$VCS_STATUS_LOCAL_BRANCH$z$VCS_STATUS_REMOTE_BRANCH$z$VCS_STATUS_REMOTE_NAME$z\
$VCS_STATUS_REMOTE_URL$z$VCS_STATUS_ACTION$z$VCS_STATUS_INDEX_SIZE$z$VCS_STATUS_NUM_STAGED$z\
$VCS_STATUS_NUM_UNSTAGED$z$VCS_STATUS_NUM_CONFLICTED$z$VCS_STATUS_NUM_UNTRACKED$z\
$VCS_STATUS_HAS_STAGED$z$VCS_STATUS_HAS_UNSTAGED$z$VCS_STATUS_HAS_CONFLICTED$z\
$VCS_STATUS_HAS_UNTRACKED$z$VCS_STATUS_COMMITS_AHEAD$z$VCS_STATUS_COMMITS_BEHIND$z\
$VCS_STATUS_STASHES$z$VCS_STATUS_TAG$z$VCS_STATUS_NUM_UNSTAGED_DELETED$z\
$VCS_STATUS_NUM_STAGED_NEW$z$VCS_STATUS_NUM_STAGED_DELETED$z$VCS_STATUS_PUSH_REMOTE_NAME$z\
$VCS_STATUS_PUSH_REMOTE_URL$z$VCS_STATUS_PUSH_COMMITS_AHEAD$z$VCS_STATUS_PUSH_COMMITS_BEHIND$z\
$VCS_STATUS_NUM_SKIP_WORKTREE$z$VCS_STATUS_NUM_ASSUME_UNCHANGED
}
function _p9k_vcs_status_restore() {
for VCS_STATUS_COMMIT VCS_STATUS_LOCAL_BRANCH VCS_STATUS_REMOTE_BRANCH VCS_STATUS_REMOTE_NAME \
VCS_STATUS_REMOTE_URL VCS_STATUS_ACTION VCS_STATUS_INDEX_SIZE VCS_STATUS_NUM_STAGED \
VCS_STATUS_NUM_UNSTAGED VCS_STATUS_NUM_CONFLICTED VCS_STATUS_NUM_UNTRACKED \
VCS_STATUS_HAS_STAGED VCS_STATUS_HAS_UNSTAGED VCS_STATUS_HAS_CONFLICTED \
VCS_STATUS_HAS_UNTRACKED VCS_STATUS_COMMITS_AHEAD VCS_STATUS_COMMITS_BEHIND \
VCS_STATUS_STASHES VCS_STATUS_TAG VCS_STATUS_NUM_UNSTAGED_DELETED VCS_STATUS_NUM_STAGED_NEW \
VCS_STATUS_NUM_STAGED_DELETED VCS_STATUS_PUSH_REMOTE_NAME VCS_STATUS_PUSH_REMOTE_URL \
VCS_STATUS_PUSH_COMMITS_AHEAD VCS_STATUS_PUSH_COMMITS_BEHIND VCS_STATUS_NUM_SKIP_WORKTREE \
VCS_STATUS_NUM_ASSUME_UNCHANGED
in "${(@0)1}"; do done
}
function _p9k_vcs_status_for_dir() {
if [[ -n $GIT_DIR ]]; then
_p9k__ret=$_p9k__gitstatus_last[GIT_DIR:$GIT_DIR]
[[ -n $_p9k__ret ]]
else
local dir=$_p9k__cwd_a
while true; do
_p9k__ret=$_p9k__gitstatus_last[$dir]
[[ -n $_p9k__ret ]] && return 0
[[ $dir == (/|.) ]] && return 1
dir=${dir:h}
done
fi
}
function _p9k_vcs_status_purge() {
if [[ -n $_p9k__git_dir ]]; then
_p9k__gitstatus_last[GIT_DIR:$_p9k__git_dir]=""
else
local dir=$1
while true; do
# unset doesn't work if $dir contains weird shit
_p9k__gitstatus_last[$dir]=""
_p9k_git_slow[$dir]=""
[[ $dir == (/|.) ]] && break
dir=${dir:h}
done
fi
}
function _p9k_vcs_icon() {
case "$VCS_STATUS_REMOTE_URL" in
*github*) _p9k__ret=VCS_GIT_GITHUB_ICON;;
*bitbucket*) _p9k__ret=VCS_GIT_BITBUCKET_ICON;;
*stash*) _p9k__ret=VCS_GIT_BITBUCKET_ICON;;
*gitlab*) _p9k__ret=VCS_GIT_GITLAB_ICON;;
*) _p9k__ret=VCS_GIT_ICON;;
esac
}
function _p9k_vcs_render() {
local state
if (( $+_p9k__gitstatus_next_dir )); then
if _p9k_vcs_status_for_dir; then
_p9k_vcs_status_restore $_p9k__ret
state=LOADING
else
_p9k_prompt_segment prompt_vcs_LOADING "${__p9k_vcs_states[LOADING]}" "$_p9k_color1" VCS_LOADING_ICON 0 '' "$_POWERLEVEL9K_VCS_LOADING_TEXT"
return 0
fi
elif [[ $VCS_STATUS_RESULT != ok-* ]]; then
return 1
fi
if (( _POWERLEVEL9K_VCS_DISABLE_GITSTATUS_FORMATTING )); then
if [[ -z $state ]]; then
if [[ $VCS_STATUS_HAS_CONFLICTED == 1 && $_POWERLEVEL9K_VCS_CONFLICTED_STATE == 1 ]]; then
state=CONFLICTED
elif [[ $VCS_STATUS_HAS_STAGED != 0 || $VCS_STATUS_HAS_UNSTAGED != 0 ]]; then
state=MODIFIED
elif [[ $VCS_STATUS_HAS_UNTRACKED != 0 ]]; then
state=UNTRACKED
else
state=CLEAN
fi
fi
_p9k_vcs_icon
_p9k_prompt_segment prompt_vcs_$state "${__p9k_vcs_states[$state]}" "$_p9k_color1" "$_p9k__ret" 0 '' ""
return 0
fi
(( ${_POWERLEVEL9K_VCS_GIT_HOOKS[(I)git-untracked]} )) || VCS_STATUS_HAS_UNTRACKED=0
(( ${_POWERLEVEL9K_VCS_GIT_HOOKS[(I)git-aheadbehind]} )) || { VCS_STATUS_COMMITS_AHEAD=0 && VCS_STATUS_COMMITS_BEHIND=0 }
(( ${_POWERLEVEL9K_VCS_GIT_HOOKS[(I)git-stash]} )) || VCS_STATUS_STASHES=0
(( ${_POWERLEVEL9K_VCS_GIT_HOOKS[(I)git-remotebranch]} )) || VCS_STATUS_REMOTE_BRANCH=""
(( ${_POWERLEVEL9K_VCS_GIT_HOOKS[(I)git-tagname]} )) || VCS_STATUS_TAG=""
(( _POWERLEVEL9K_VCS_COMMITS_AHEAD_MAX_NUM >= 0 && VCS_STATUS_COMMITS_AHEAD > _POWERLEVEL9K_VCS_COMMITS_AHEAD_MAX_NUM )) &&
VCS_STATUS_COMMITS_AHEAD=$_POWERLEVEL9K_VCS_COMMITS_AHEAD_MAX_NUM
(( _POWERLEVEL9K_VCS_COMMITS_BEHIND_MAX_NUM >= 0 && VCS_STATUS_COMMITS_BEHIND > _POWERLEVEL9K_VCS_COMMITS_BEHIND_MAX_NUM )) &&
VCS_STATUS_COMMITS_BEHIND=$_POWERLEVEL9K_VCS_COMMITS_BEHIND_MAX_NUM
local -a cache_key=(
"$VCS_STATUS_LOCAL_BRANCH"
"$VCS_STATUS_REMOTE_BRANCH"
"$VCS_STATUS_REMOTE_URL"
"$VCS_STATUS_ACTION"
"$VCS_STATUS_NUM_STAGED"
"$VCS_STATUS_NUM_UNSTAGED"
"$VCS_STATUS_NUM_UNTRACKED"
"$VCS_STATUS_HAS_CONFLICTED"
"$VCS_STATUS_HAS_STAGED"
"$VCS_STATUS_HAS_UNSTAGED"
"$VCS_STATUS_HAS_UNTRACKED"
"$VCS_STATUS_COMMITS_AHEAD"
"$VCS_STATUS_COMMITS_BEHIND"
"$VCS_STATUS_STASHES"
"$VCS_STATUS_TAG"
"$VCS_STATUS_NUM_UNSTAGED_DELETED"
)
if [[ $_POWERLEVEL9K_SHOW_CHANGESET == 1 || -z $VCS_STATUS_LOCAL_BRANCH ]]; then
cache_key+=$VCS_STATUS_COMMIT
fi
if ! _p9k_cache_ephemeral_get "$state" "${(@)cache_key}"; then
local icon
local content
if (( ${_POWERLEVEL9K_VCS_GIT_HOOKS[(I)vcs-detect-changes]} )); then
if [[ $VCS_STATUS_HAS_CONFLICTED == 1 && $_POWERLEVEL9K_VCS_CONFLICTED_STATE == 1 ]]; then
: ${state:=CONFLICTED}
elif [[ $VCS_STATUS_HAS_STAGED != 0 || $VCS_STATUS_HAS_UNSTAGED != 0 ]]; then
: ${state:=MODIFIED}
elif [[ $VCS_STATUS_HAS_UNTRACKED != 0 ]]; then
: ${state:=UNTRACKED}
fi
# It's weird that removing vcs-detect-changes from POWERLEVEL9K_VCS_GIT_HOOKS gets rid
# of the GIT icon. That's what vcs_info does, so we do the same in the name of compatiblity.
_p9k_vcs_icon
icon=$_p9k__ret
fi
: ${state:=CLEAN}
function _$0_fmt() {
_p9k_vcs_style $state $1
content+="$_p9k__ret$2"
}
local ws
if [[ $_POWERLEVEL9K_SHOW_CHANGESET == 1 || -z $VCS_STATUS_LOCAL_BRANCH ]]; then
_p9k_get_icon prompt_vcs_$state VCS_COMMIT_ICON
_$0_fmt COMMIT "$_p9k__ret${${VCS_STATUS_COMMIT:0:$_POWERLEVEL9K_CHANGESET_HASH_LENGTH}:-HEAD}"
ws=' '
fi
if [[ -n $VCS_STATUS_LOCAL_BRANCH ]]; then
local branch=$ws
if (( !_POWERLEVEL9K_HIDE_BRANCH_ICON )); then
_p9k_get_icon prompt_vcs_$state VCS_BRANCH_ICON
branch+=$_p9k__ret
fi
if (( $+_POWERLEVEL9K_VCS_SHORTEN_LENGTH && $+_POWERLEVEL9K_VCS_SHORTEN_MIN_LENGTH &&
$#VCS_STATUS_LOCAL_BRANCH > _POWERLEVEL9K_VCS_SHORTEN_MIN_LENGTH &&
$#VCS_STATUS_LOCAL_BRANCH > _POWERLEVEL9K_VCS_SHORTEN_LENGTH )) &&
[[ $_POWERLEVEL9K_VCS_SHORTEN_STRATEGY == (truncate_middle|truncate_from_right) ]]; then
branch+=${VCS_STATUS_LOCAL_BRANCH[1,_POWERLEVEL9K_VCS_SHORTEN_LENGTH]//\%/%%}${_POWERLEVEL9K_VCS_SHORTEN_DELIMITER}
if [[ $_POWERLEVEL9K_VCS_SHORTEN_STRATEGY == truncate_middle ]]; then
_p9k_vcs_style $state BRANCH
branch+=${_p9k__ret}${VCS_STATUS_LOCAL_BRANCH[-_POWERLEVEL9K_VCS_SHORTEN_LENGTH,-1]//\%/%%}
fi
else
branch+=${VCS_STATUS_LOCAL_BRANCH//\%/%%}
fi
_$0_fmt BRANCH $branch
fi
if [[ $_POWERLEVEL9K_VCS_HIDE_TAGS == 0 && -n $VCS_STATUS_TAG ]]; then
_p9k_get_icon prompt_vcs_$state VCS_TAG_ICON
_$0_fmt TAG " $_p9k__ret${VCS_STATUS_TAG//\%/%%}"
fi
if [[ -n $VCS_STATUS_ACTION ]]; then
_$0_fmt ACTION " | ${VCS_STATUS_ACTION//\%/%%}"
else
if [[ -n $VCS_STATUS_REMOTE_BRANCH &&
$VCS_STATUS_LOCAL_BRANCH != $VCS_STATUS_REMOTE_BRANCH ]]; then
_p9k_get_icon prompt_vcs_$state VCS_REMOTE_BRANCH_ICON
_$0_fmt REMOTE_BRANCH " $_p9k__ret${VCS_STATUS_REMOTE_BRANCH//\%/%%}"
fi
if [[ $VCS_STATUS_HAS_STAGED == 1 || $VCS_STATUS_HAS_UNSTAGED == 1 || $VCS_STATUS_HAS_UNTRACKED == 1 ]]; then
_p9k_get_icon prompt_vcs_$state VCS_DIRTY_ICON
_$0_fmt DIRTY "$_p9k__ret"
if [[ $VCS_STATUS_HAS_STAGED == 1 ]]; then
_p9k_get_icon prompt_vcs_$state VCS_STAGED_ICON
(( _POWERLEVEL9K_VCS_STAGED_MAX_NUM != 1 )) && _p9k__ret+=$VCS_STATUS_NUM_STAGED
_$0_fmt STAGED " $_p9k__ret"
fi
if [[ $VCS_STATUS_HAS_UNSTAGED == 1 ]]; then
_p9k_get_icon prompt_vcs_$state VCS_UNSTAGED_ICON
(( _POWERLEVEL9K_VCS_UNSTAGED_MAX_NUM != 1 )) && _p9k__ret+=$VCS_STATUS_NUM_UNSTAGED
_$0_fmt UNSTAGED " $_p9k__ret"
fi
if [[ $VCS_STATUS_HAS_UNTRACKED == 1 ]]; then
_p9k_get_icon prompt_vcs_$state VCS_UNTRACKED_ICON
(( _POWERLEVEL9K_VCS_UNTRACKED_MAX_NUM != 1 )) && _p9k__ret+=$VCS_STATUS_NUM_UNTRACKED
_$0_fmt UNTRACKED " $_p9k__ret"
fi
fi
if [[ $VCS_STATUS_COMMITS_BEHIND -gt 0 ]]; then
_p9k_get_icon prompt_vcs_$state VCS_INCOMING_CHANGES_ICON
(( _POWERLEVEL9K_VCS_COMMITS_BEHIND_MAX_NUM != 1 )) && _p9k__ret+=$VCS_STATUS_COMMITS_BEHIND
_$0_fmt INCOMING_CHANGES " $_p9k__ret"
fi
if [[ $VCS_STATUS_COMMITS_AHEAD -gt 0 ]]; then
_p9k_get_icon prompt_vcs_$state VCS_OUTGOING_CHANGES_ICON
(( _POWERLEVEL9K_VCS_COMMITS_AHEAD_MAX_NUM != 1 )) && _p9k__ret+=$VCS_STATUS_COMMITS_AHEAD
_$0_fmt OUTGOING_CHANGES " $_p9k__ret"
fi
if [[ $VCS_STATUS_STASHES -gt 0 ]]; then
_p9k_get_icon prompt_vcs_$state VCS_STASH_ICON
_$0_fmt STASH " $_p9k__ret$VCS_STATUS_STASHES"
fi
fi
_p9k_cache_ephemeral_set "prompt_vcs_$state" "${__p9k_vcs_states[$state]}" "$_p9k_color1" "$icon" 0 '' "$content"
fi
_p9k_prompt_segment "$_p9k__cache_val[@]"
return 0
}
function _p9k_maybe_ignore_git_repo() {
if [[ $VCS_STATUS_RESULT == ok-* && $VCS_STATUS_WORKDIR == $~_POWERLEVEL9K_VCS_DISABLED_WORKDIR_PATTERN ]]; then
VCS_STATUS_RESULT=norepo${VCS_STATUS_RESULT#ok}
fi
}
function _p9k_vcs_resume() {
eval "$__p9k_intro"
_p9k_maybe_ignore_git_repo
if [[ $VCS_STATUS_RESULT == ok-async ]]; then
local latency=$((EPOCHREALTIME - _p9k__gitstatus_start_time))
if (( latency > _POWERLEVEL9K_VCS_MAX_SYNC_LATENCY_SECONDS )); then
_p9k_git_slow[${${_p9k__git_dir:+GIT_DIR:$_p9k__git_dir}:-$VCS_STATUS_WORKDIR}]=1
elif (( $1 && latency < 0.8 * _POWERLEVEL9K_VCS_MAX_SYNC_LATENCY_SECONDS )); then # 0.8 to avoid flip-flopping
_p9k_git_slow[${${_p9k__git_dir:+GIT_DIR:$_p9k__git_dir}:-$VCS_STATUS_WORKDIR}]=0
fi
_p9k_vcs_status_save
fi
if [[ -z $_p9k__gitstatus_next_dir ]]; then
unset _p9k__gitstatus_next_dir
case $VCS_STATUS_RESULT in
norepo-async) (( $1 )) && _p9k_vcs_status_purge $_p9k__cwd_a;;
ok-async) (( $1 )) || _p9k__gitstatus_next_dir=$_p9k__cwd_a;;
esac
fi
if [[ -n $_p9k__gitstatus_next_dir ]]; then
_p9k__git_dir=$GIT_DIR
if ! gitstatus_query_p9k_ -d $_p9k__gitstatus_next_dir -t 0 -c '_p9k_vcs_resume 1' POWERLEVEL9K; then
unset _p9k__gitstatus_next_dir
unset VCS_STATUS_RESULT
else
_p9k_maybe_ignore_git_repo
case $VCS_STATUS_RESULT in
tout) _p9k__gitstatus_next_dir=''; _p9k__gitstatus_start_time=$EPOCHREALTIME;;
norepo-sync) _p9k_vcs_status_purge $_p9k__gitstatus_next_dir; unset _p9k__gitstatus_next_dir;;
ok-sync) _p9k_vcs_status_save; unset _p9k__gitstatus_next_dir;;
esac
fi
fi
if (( _p9k_vcs_index && $+GITSTATUS_DAEMON_PID_POWERLEVEL9K )); then
local _p9k__prompt _p9k__prompt_side=$_p9k_vcs_side _p9k__segment_name=vcs
local -i _p9k__has_upglob _p9k__segment_index=_p9k_vcs_index _p9k__line_index=_p9k_vcs_line_index
_p9k_vcs_render
typeset -g _p9k__vcs=$_p9k__prompt
else
_p9k__refresh_reason=gitstatus
_p9k_set_prompt
_p9k__refresh_reason=''
fi
_p9k_reset_prompt
}
function _p9k_vcs_gitstatus() {
if [[ $_p9k__refresh_reason == precmd ]]; then
if (( $+_p9k__gitstatus_next_dir )); then
_p9k__gitstatus_next_dir=$_p9k__cwd_a
else
local -F timeout=_POWERLEVEL9K_VCS_MAX_SYNC_LATENCY_SECONDS
if ! _p9k_vcs_status_for_dir; then
_p9k__git_dir=$GIT_DIR
gitstatus_query_p9k_ -d $_p9k__cwd_a -t $timeout -p -c '_p9k_vcs_resume 0' POWERLEVEL9K || return 1
_p9k_maybe_ignore_git_repo
case $VCS_STATUS_RESULT in
tout) _p9k__gitstatus_next_dir=''; _p9k__gitstatus_start_time=$EPOCHREALTIME; return 0;;
norepo-sync) return 0;;
ok-sync) _p9k_vcs_status_save;;
esac
else
if [[ -n $GIT_DIR ]]; then
[[ $_p9k_git_slow[GIT_DIR:$GIT_DIR] == 1 ]] && timeout=0
else
local dir=$_p9k__cwd_a
while true; do
case $_p9k_git_slow[$dir] in
"") [[ $dir == (/|.) ]] && break; dir=${dir:h};;
0) break;;
1) timeout=0; break;;
esac
done
fi
fi
(( _p9k__prompt_idx == 1 )) && timeout=0
_p9k__git_dir=$GIT_DIR
if (( _p9k_vcs_index && $+GITSTATUS_DAEMON_PID_POWERLEVEL9K )); then
if ! gitstatus_query_p9k_ -d $_p9k__cwd_a -t 0 -c '_p9k_vcs_resume 1' POWERLEVEL9K; then
unset VCS_STATUS_RESULT
return 1
fi
typeset -gF _p9k__vcs_timeout=timeout
_p9k__gitstatus_next_dir=''
_p9k__gitstatus_start_time=$EPOCHREALTIME
return 0
fi
if ! gitstatus_query_p9k_ -d $_p9k__cwd_a -t $timeout -c '_p9k_vcs_resume 1' POWERLEVEL9K; then
unset VCS_STATUS_RESULT
return 1
fi
_p9k_maybe_ignore_git_repo
case $VCS_STATUS_RESULT in
tout) _p9k__gitstatus_next_dir=''; _p9k__gitstatus_start_time=$EPOCHREALTIME;;
norepo-sync) _p9k_vcs_status_purge $_p9k__cwd_a;;
ok-sync) _p9k_vcs_status_save;;
esac
fi
fi
return 0
}
################################################################
# Segment to show VCS information
prompt_vcs() {
if (( _p9k_vcs_index && $+GITSTATUS_DAEMON_PID_POWERLEVEL9K )); then
_p9k__prompt+='${(e)_p9k__vcs}'
return
fi
local -a backends=($_POWERLEVEL9K_VCS_BACKENDS)
if (( ${backends[(I)git]} && $+GITSTATUS_DAEMON_PID_POWERLEVEL9K )) && _p9k_vcs_gitstatus; then
_p9k_vcs_render && return
backends=(${backends:#git})
fi
if (( $#backends )); then
VCS_WORKDIR_DIRTY=false
VCS_WORKDIR_HALF_DIRTY=false
local current_state=""
# Actually invoke vcs_info manually to gather all information.
zstyle ':vcs_info:*' enable ${backends}
vcs_info
local vcs_prompt="${vcs_info_msg_0_}"
if [[ -n "$vcs_prompt" ]]; then
if [[ "$VCS_WORKDIR_DIRTY" == true ]]; then
# $vcs_visual_identifier gets set in +vi-vcs-detect-changes in functions/vcs.zsh,
# as we have there access to vcs_info internal hooks.
current_state='MODIFIED'
else
if [[ "$VCS_WORKDIR_HALF_DIRTY" == true ]]; then
current_state='UNTRACKED'
else
current_state='CLEAN'
fi
fi
_p9k_prompt_segment "${0}_${${(U)current_state}//ฤฐ/I}" "${__p9k_vcs_states[$current_state]}" "$_p9k_color1" "$vcs_visual_identifier" 0 '' "$vcs_prompt"
fi
fi
}
################################################################
# Vi Mode: show editing mode (NORMAL|INSERT|VISUAL)
prompt_vi_mode() {
local -i len=$#_p9k__prompt _p9k__has_upglob
if (( __p9k_sh_glob )); then
if (( $+_POWERLEVEL9K_VI_OVERWRITE_MODE_STRING )); then
if [[ -n $_POWERLEVEL9K_VI_INSERT_MODE_STRING ]]; then
_p9k_prompt_segment $0_INSERT "$_p9k_color1" blue '' 0 '${${${${${${:-$_p9k__keymap.$_p9k__zle_state}:#vicmd.*}:#vivis.*}:#vivli.*}:#*.*overwrite*}}' "$_POWERLEVEL9K_VI_INSERT_MODE_STRING"
fi
_p9k_prompt_segment $0_OVERWRITE "$_p9k_color1" blue '' 0 '${${${${${${:-$_p9k__keymap.$_p9k__zle_state}:#vicmd.*}:#vivis.*}:#vivli.*}:#*.*insert*}}' "$_POWERLEVEL9K_VI_OVERWRITE_MODE_STRING"
else
if [[ -n $_POWERLEVEL9K_VI_INSERT_MODE_STRING ]]; then
_p9k_prompt_segment $0_INSERT "$_p9k_color1" blue '' 0 '${${${${_p9k__keymap:#vicmd}:#vivis}:#vivli}}' "$_POWERLEVEL9K_VI_INSERT_MODE_STRING"
fi
fi
if (( $+_POWERLEVEL9K_VI_VISUAL_MODE_STRING )); then
_p9k_prompt_segment $0_NORMAL "$_p9k_color1" white '' 0 '${(M)${:-$_p9k__keymap$_p9k__region_active}:#vicmd0}' "$_POWERLEVEL9K_VI_COMMAND_MODE_STRING"
_p9k_prompt_segment $0_VISUAL "$_p9k_color1" white '' 0 '${$((! ${#${${${${:-$_p9k__keymap$_p9k__region_active}:#vicmd1}:#vivis?}:#vivli?}})):#0}' "$_POWERLEVEL9K_VI_VISUAL_MODE_STRING"
else
_p9k_prompt_segment $0_NORMAL "$_p9k_color1" white '' 0 '${$((! ${#${${${_p9k__keymap:#vicmd}:#vivis}:#vivli}})):#0}' "$_POWERLEVEL9K_VI_COMMAND_MODE_STRING"
fi
else
if (( $+_POWERLEVEL9K_VI_OVERWRITE_MODE_STRING )); then
if [[ -n $_POWERLEVEL9K_VI_INSERT_MODE_STRING ]]; then
_p9k_prompt_segment $0_INSERT "$_p9k_color1" blue '' 0 '${${:-$_p9k__keymap.$_p9k__zle_state}:#(vicmd.*|vivis.*|vivli.*|*.*overwrite*)}' "$_POWERLEVEL9K_VI_INSERT_MODE_STRING"
fi
_p9k_prompt_segment $0_OVERWRITE "$_p9k_color1" blue '' 0 '${${:-$_p9k__keymap.$_p9k__zle_state}:#(vicmd.*|vivis.*|vivli.*|*.*insert*)}' "$_POWERLEVEL9K_VI_OVERWRITE_MODE_STRING"
else
if [[ -n $_POWERLEVEL9K_VI_INSERT_MODE_STRING ]]; then
_p9k_prompt_segment $0_INSERT "$_p9k_color1" blue '' 0 '${_p9k__keymap:#(vicmd|vivis|vivli)}' "$_POWERLEVEL9K_VI_INSERT_MODE_STRING"
fi
fi
if (( $+_POWERLEVEL9K_VI_VISUAL_MODE_STRING )); then
_p9k_prompt_segment $0_NORMAL "$_p9k_color1" white '' 0 '${(M)${:-$_p9k__keymap$_p9k__region_active}:#vicmd0}' "$_POWERLEVEL9K_VI_COMMAND_MODE_STRING"
_p9k_prompt_segment $0_VISUAL "$_p9k_color1" white '' 0 '${(M)${:-$_p9k__keymap$_p9k__region_active}:#(vicmd1|vivis?|vivli?)}' "$_POWERLEVEL9K_VI_VISUAL_MODE_STRING"
else
_p9k_prompt_segment $0_NORMAL "$_p9k_color1" white '' 0 '${(M)_p9k__keymap:#(vicmd|vivis|vivli)}' "$_POWERLEVEL9K_VI_COMMAND_MODE_STRING"
fi
fi
(( _p9k__has_upglob )) || typeset -g "_p9k__segment_val_${_p9k__prompt_side}[_p9k__segment_index]"=$_p9k__prompt[len+1,-1]
}
instant_prompt_vi_mode() {
if [[ -n $_POWERLEVEL9K_VI_INSERT_MODE_STRING ]]; then
_p9k_prompt_segment prompt_vi_mode_INSERT "$_p9k_color1" blue '' 0 '' "$_POWERLEVEL9K_VI_INSERT_MODE_STRING"
fi
}
################################################################
# Virtualenv: current working virtualenv
# More information on virtualenv (Python):
# https://virtualenv.pypa.io/en/latest/
prompt_virtualenv() {
local msg=''
if (( _POWERLEVEL9K_VIRTUALENV_SHOW_PYTHON_VERSION )) && _p9k_python_version; then
msg="${_p9k__ret//\%/%%} "
fi
local v=${VIRTUAL_ENV:t}
[[ $v == $~_POWERLEVEL9K_VIRTUALENV_GENERIC_NAMES ]] && v=${VIRTUAL_ENV:h:t}
msg+="$_POWERLEVEL9K_VIRTUALENV_LEFT_DELIMITER${v//\%/%%}$_POWERLEVEL9K_VIRTUALENV_RIGHT_DELIMITER"
case $_POWERLEVEL9K_VIRTUALENV_SHOW_WITH_PYENV in
false)
_p9k_prompt_segment "$0" "blue" "$_p9k_color1" 'PYTHON_ICON' 0 '${(M)${#P9K_PYENV_PYTHON_VERSION}:#0}' "$msg"
;;
if-different)
_p9k_escape $v
_p9k_prompt_segment "$0" "blue" "$_p9k_color1" 'PYTHON_ICON' 0 '${${:-'$_p9k__ret'}:#$_p9k__pyenv_version}' "$msg"
;;
*)
_p9k_prompt_segment "$0" "blue" "$_p9k_color1" 'PYTHON_ICON' 0 '' "$msg"
;;
esac
}
_p9k_prompt_virtualenv_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='$VIRTUAL_ENV'
}
# _p9k_read_pyenv_like_version_file <filepath> [prefix]
function _p9k_read_pyenv_like_version_file() {
local -a stat
zstat -A stat +mtime -- $1 2>/dev/null || stat=(-1)
local cached=$_p9k__read_pyenv_like_version_file_cache[$1:$2]
if [[ $cached == $stat[1]:* ]]; then
_p9k__ret=${cached#*:}
else
local fd content
{
{ sysopen -r -u fd -- $1 && sysread -i $fd -s 1024 content } 2>/dev/null
} always {
[[ -n $fd ]] && exec {fd}>&-
}
local MATCH
local versions=(${(@)${(f)content}/(#m)*/${MATCH[(w)1]#$2}})
_p9k__ret=${(j.:.)versions}
_p9k__read_pyenv_like_version_file_cache[$1:$2]=$stat[1]:$_p9k__ret
fi
[[ -n $_p9k__ret ]]
}
function _p9k_pyenv_global_version() {
_p9k_read_pyenv_like_version_file ${PYENV_ROOT:-$HOME/.pyenv}/version python- || _p9k__ret=system
}
################################################################
# Segment to display pyenv information
# https://github.com/pyenv/pyenv#choosing-the-python-version
prompt_pyenv() {
unset P9K_PYENV_PYTHON_VERSION _p9k__pyenv_version
local v=${(j.:.)${(@)${(s.:.)PYENV_VERSION}#python-}}
if [[ -n $v ]]; then
(( ${_POWERLEVEL9K_PYENV_SOURCES[(I)shell]} )) || return
else
(( ${_POWERLEVEL9K_PYENV_SOURCES[(I)local|global]} )) || return
_p9k__ret=
if [[ $PYENV_DIR != (|.) ]]; then
[[ $PYENV_DIR == /* ]] && local dir=$PYENV_DIR || local dir="$_p9k__cwd_a/$PYENV_DIR"
dir=${dir:A}
if [[ $dir != $_p9k__cwd_a ]]; then
while true; do
if _p9k_read_pyenv_like_version_file $dir/.python-version python-; then
(( ${_POWERLEVEL9K_PYENV_SOURCES[(I)local]} )) || return
break
fi
[[ $dir == (/|.) ]] && break
dir=${dir:h}
done
fi
fi
if [[ -z $_p9k__ret ]]; then
_p9k_upglob .python-version
local -i idx=$?
if (( idx )) && _p9k_read_pyenv_like_version_file $_p9k__parent_dirs[idx]/.python-version python-; then
(( ${_POWERLEVEL9K_PYENV_SOURCES[(I)local]} )) || return
else
_p9k__ret=
fi
fi
if [[ -z $_p9k__ret ]]; then
(( _POWERLEVEL9K_PYENV_PROMPT_ALWAYS_SHOW )) || return
(( ${_POWERLEVEL9K_PYENV_SOURCES[(I)global]} )) || return
_p9k_pyenv_global_version
fi
v=$_p9k__ret
fi
if (( !_POWERLEVEL9K_PYENV_PROMPT_ALWAYS_SHOW )); then
_p9k_pyenv_global_version
[[ $v == $_p9k__ret ]] && return
fi
if (( !_POWERLEVEL9K_PYENV_SHOW_SYSTEM )); then
[[ $v == system ]] && return
fi
local versions=${PYENV_ROOT:-$HOME/.pyenv}/versions
versions=${versions:A}
local version=$versions/$v
version=${version:A}
if [[ $version == (#b)$versions/([^/]##)* ]]; then
typeset -g P9K_PYENV_PYTHON_VERSION=$match[1]
fi
typeset -g _p9k__pyenv_version=$v
_p9k_prompt_segment "$0" "blue" "$_p9k_color1" 'PYTHON_ICON' 0 '' "${v//\%/%%}"
}
_p9k_prompt_pyenv_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='${commands[pyenv]:-${${+functions[pyenv]}:#0}}'
}
function _p9k_goenv_global_version() {
_p9k_read_pyenv_like_version_file ${GOENV_ROOT:-$HOME/.goenv}/version go- || _p9k__ret=system
}
################################################################
# Segment to display goenv information: https://github.com/syndbg/goenv
prompt_goenv() {
local v=${(j.:.)${(@)${(s.:.)GOENV_VERSION}#go-}}
if [[ -n $v ]]; then
(( ${_POWERLEVEL9K_GOENV_SOURCES[(I)shell]} )) || return
else
(( ${_POWERLEVEL9K_GOENV_SOURCES[(I)local|global]} )) || return
_p9k__ret=
if [[ $GOENV_DIR != (|.) ]]; then
[[ $GOENV_DIR == /* ]] && local dir=$GOENV_DIR || local dir="$_p9k__cwd_a/$GOENV_DIR"
dir=${dir:A}
if [[ $dir != $_p9k__cwd_a ]]; then
while true; do
if _p9k_read_pyenv_like_version_file $dir/.go-version go-; then
(( ${_POWERLEVEL9K_GOENV_SOURCES[(I)local]} )) || return
break
fi
[[ $dir == (/|.) ]] && break
dir=${dir:h}
done
fi
fi
if [[ -z $_p9k__ret ]]; then
_p9k_upglob .go-version
local -i idx=$?
if (( idx )) && _p9k_read_pyenv_like_version_file $_p9k__parent_dirs[idx]/.go-version go-; then
(( ${_POWERLEVEL9K_GOENV_SOURCES[(I)local]} )) || return
else
_p9k__ret=
fi
fi
if [[ -z $_p9k__ret ]]; then
(( _POWERLEVEL9K_GOENV_PROMPT_ALWAYS_SHOW )) || return
(( ${_POWERLEVEL9K_GOENV_SOURCES[(I)global]} )) || return
_p9k_goenv_global_version
fi
v=$_p9k__ret
fi
if (( !_POWERLEVEL9K_GOENV_PROMPT_ALWAYS_SHOW )); then
_p9k_goenv_global_version
[[ $v == $_p9k__ret ]] && return
fi
if (( !_POWERLEVEL9K_GOENV_SHOW_SYSTEM )); then
[[ $v == system ]] && return
fi
_p9k_prompt_segment "$0" "blue" "$_p9k_color1" 'GO_ICON' 0 '' "${v//\%/%%}"
}
_p9k_prompt_goenv_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='${commands[goenv]:-${${+functions[goenv]}:#0}}'
}
################################################################
# Display openfoam information
prompt_openfoam() {
if [[ -z "$WM_FORK" ]] ; then
_p9k_prompt_segment "$0" "yellow" "$_p9k_color1" '' 0 '' "OF: ${${WM_PROJECT_VERSION:t}//\%/%%}"
else
_p9k_prompt_segment "$0" "yellow" "$_p9k_color1" '' 0 '' "F-X: ${${WM_PROJECT_VERSION:t}//\%/%%}"
fi
}
_p9k_prompt_openfoam_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='$WM_PROJECT_VERSION'
}
################################################################
# Segment to display Swift version
prompt_swift_version() {
_p9k_cached_cmd 0 swift --version || return
[[ $_p9k__ret == (#b)[^[:digit:]]#([[:digit:].]##)* ]] || return
_p9k_prompt_segment "$0" "magenta" "white" 'SWIFT_ICON' 0 '' "${match[1]//\%/%%}"
}
_p9k_prompt_swift_version_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='$commands[swift]'
}
################################################################
# dir_writable: Display information about the user's permission to write in the current directory
prompt_dir_writable() {
if [[ ! -w "$_p9k__cwd_a" ]]; then
_p9k_prompt_segment "$0_FORBIDDEN" "red" "yellow1" 'LOCK_ICON' 0 '' ''
fi
}
instant_prompt_dir_writable() { prompt_dir_writable; }
################################################################
# Kubernetes Current Context/Namespace
prompt_kubecontext() {
if ! _p9k_cache_stat_get $0 ${(s.:.)${KUBECONFIG:-$HOME/.kube/config}}; then
local name namespace cluster user cloud_name cloud_account cloud_zone cloud_cluster text state
() {
local cfg && cfg=(${(f)"$(kubectl config view -o=yaml 2>/dev/null)"}) || return
local ctx=(${(@M)cfg:#current-context: [^\"\'\|\>]*})
(( $#ctx == 1 )) || return
name=${ctx[1]#current-context: }
local -i pos=${cfg[(i)contexts:]}
(( pos <= $#cfg )) || return
shift $pos cfg
pos=${cfg[(i) name: $name]}
(( pos <= $#cfg )) || return
(( --pos ))
for ((; pos > 0; --pos)); do
local line=$cfg[pos]
if [[ $line == '- context:' ]]; then
return 0
elif [[ $line == (#b)' cluster: '([^\"\'\|\>]*) ]]; then
cluster=$match[1]
elif [[ $line == (#b)' namespace: '([^\"\'\|\>]*) ]]; then
namespace=$match[1]
elif [[ $line == (#b)' user: '([^\"\'\|\>]*) ]]; then
user=$match[1]
fi
done
}
if [[ -n $name ]]; then
: ${namespace:=default}
# gke_my-account_us-east1-a_cluster-01
# gke_my-account_us-east1_cluster-01
if [[ $cluster == (#b)gke_(?*)_(asia|australia|europe|northamerica|southamerica|us)-([a-z]##<->)(-[a-z]|)_(?*) ]]; then
cloud_name=gke
cloud_account=$match[1]
cloud_zone=$match[2]-$match[3]$match[4]
cloud_cluster=$match[5]
if (( ${_POWERLEVEL9K_KUBECONTEXT_SHORTEN[(I)gke]} )); then
text=$cloud_cluster
fi
# arn:aws:eks:us-east-1:123456789012:cluster/cluster-01
elif [[ $cluster == (#b)arn:aws:eks:([[:alnum:]-]##):([[:digit:]]##):cluster/(?*) ]]; then
cloud_name=eks
cloud_zone=$match[1]
cloud_account=$match[2]
cloud_cluster=$match[3]
if (( ${_POWERLEVEL9K_KUBECONTEXT_SHORTEN[(I)eks]} )); then
text=$cloud_cluster
fi
fi
if [[ -z $text ]]; then
text=$name
if [[ $_POWERLEVEL9K_KUBECONTEXT_SHOW_DEFAULT_NAMESPACE == 1 || $namespace != (default|$name) ]]; then
text+="/$namespace"
fi
fi
local pat class
for pat class in "${_POWERLEVEL9K_KUBECONTEXT_CLASSES[@]}"; do
if [[ $text == ${~pat} ]]; then
[[ -n $class ]] && state=_${${(U)class}//ฤฐ/I}
break
fi
done
fi
_p9k_cache_stat_set "$name" "$namespace" "$cluster" "$user" "$cloud_name" "$cloud_account" "$cloud_zone" "$cloud_cluster" "$text" "$state"
fi
typeset -g P9K_KUBECONTEXT_NAME=$_p9k__cache_val[1]
typeset -g P9K_KUBECONTEXT_NAMESPACE=$_p9k__cache_val[2]
typeset -g P9K_KUBECONTEXT_CLUSTER=$_p9k__cache_val[3]
typeset -g P9K_KUBECONTEXT_USER=$_p9k__cache_val[4]
typeset -g P9K_KUBECONTEXT_CLOUD_NAME=$_p9k__cache_val[5]
typeset -g P9K_KUBECONTEXT_CLOUD_ACCOUNT=$_p9k__cache_val[6]
typeset -g P9K_KUBECONTEXT_CLOUD_ZONE=$_p9k__cache_val[7]
typeset -g P9K_KUBECONTEXT_CLOUD_CLUSTER=$_p9k__cache_val[8]
[[ -n $_p9k__cache_val[9] ]] || return
_p9k_prompt_segment $0$_p9k__cache_val[10] magenta white KUBERNETES_ICON 0 '' "${_p9k__cache_val[9]//\%/%%}"
}
_p9k_prompt_kubecontext_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='$commands[kubectl]'
}
################################################################
# Dropbox status
prompt_dropbox() {
# The first column is just the directory, so cut it
local dropbox_status="$(dropbox-cli filestatus . | cut -d\ -f2-)"
# Only show if the folder is tracked and dropbox is running
if [[ "$dropbox_status" != 'unwatched' && "$dropbox_status" != "isn't running!" ]]; then
# If "up to date", only show the icon
if [[ "$dropbox_status" =~ 'up to date' ]]; then
dropbox_status=""
fi
_p9k_prompt_segment "$0" "white" "blue" "DROPBOX_ICON" 0 '' "${dropbox_status//\%/%%}"
fi
}
_p9k_prompt_dropbox_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='$commands[dropbox-cli]'
}
# print Java version number
prompt_java_version() {
if (( _POWERLEVEL9K_JAVA_VERSION_PROJECT_ONLY )); then
_p9k_upglob 'pom.xml|build.gradle.kts|build.sbt|deps.edn|project.clj|build.boot|*.(java|class|jar|gradle|clj|cljc)' && return
fi
local java=$commands[java]
if ! _p9k_cache_stat_get $0 $java ${JAVA_HOME:+$JAVA_HOME/release}; then
local v
v="$(java -fullversion 2>&1)" || v=
v=${${v#*\"}%\"*}
(( _POWERLEVEL9K_JAVA_VERSION_FULL )) || v=${v%%-*}
_p9k_cache_stat_set "${v//\%/%%}"
fi
[[ -n $_p9k__cache_val[1] ]] || return
_p9k_prompt_segment "$0" "red" "white" "JAVA_ICON" 0 '' $_p9k__cache_val[1]
}
_p9k_prompt_java_version_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='$commands[java]'
}
prompt_azure() {
local cfg=${AZURE_CONFIG_DIR:-$HOME/.azure}/azureProfile.json
if ! _p9k_cache_stat_get $0 $cfg; then
local name
if (( $+commands[jq] )) && name="$(jq -r '[.subscriptions[]|select(.isDefault==true)|.name][]|strings' $cfg 2>/dev/null)"; then
name=${name%%$'\n'*}
elif ! name="$(az account show --query name --output tsv 2>/dev/null)"; then
name=
fi
_p9k_cache_stat_set "$name"
fi
[[ -n $_p9k__cache_val[1] ]] || return
_p9k_prompt_segment "$0" "blue" "white" "AZURE_ICON" 0 '' "${_p9k__cache_val[1]//\%/%%}"
}
_p9k_prompt_azure_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='$commands[az]'
}
prompt_gcloud() {
local -i len=$#_p9k__prompt _p9k__has_upglob
_p9k_prompt_segment \
$0_PARTIAL blue white GCLOUD_ICON 1 \
'${${(M)${#P9K_GCLOUD_PROJECT_NAME}:#0}:+$P9K_GCLOUD_ACCOUNT$P9K_GCLOUD_PROJECT_ID}' \
'${P9K_GCLOUD_ACCOUNT//\%/%%}:${P9K_GCLOUD_PROJECT_ID//\%/%%}'
_p9k_prompt_segment \
$0_COMPLETE blue white GCLOUD_ICON 1 \
'$P9K_GCLOUD_PROJECT_NAME' \
'${P9K_GCLOUD_ACCOUNT//\%/%%}:${P9K_GCLOUD_PROJECT_ID//\%/%%}'
(( _p9k__has_upglob )) || typeset -g "_p9k__segment_val_${_p9k__prompt_side}[_p9k__segment_index]"=$_p9k__prompt[len+1,-1]
}
_p9k_gcloud_prefetch() {
# P9K_GCLOUD_PROJECT is deprecated; it's always equal to P9K_GCLOUD_PROJECT_ID
unset P9K_GCLOUD_CONFIGURATION P9K_GCLOUD_ACCOUNT P9K_GCLOUD_PROJECT P9K_GCLOUD_PROJECT_ID P9K_GCLOUD_PROJECT_NAME
(( $+commands[gcloud] )) || return
_p9k_read_word ~/.config/gcloud/active_config || return
P9K_GCLOUD_CONFIGURATION=$_p9k__ret
if ! _p9k_cache_stat_get $0 ~/.config/gcloud/configurations/config_$P9K_GCLOUD_CONFIGURATION; then
local pair account project_id
pair="$(gcloud config configurations describe $P9K_GCLOUD_CONFIGURATION \
--format=$'value[separator="\1"](properties.core.account,properties.core.project)')"
(( ! $? )) && IFS=$'\1' read account project_id <<<$pair
_p9k_cache_stat_set "$account" "$project_id"
fi
if [[ -n $_p9k__cache_val[1] ]]; then
P9K_GCLOUD_ACCOUNT=$_p9k__cache_val[1]
fi
if [[ -n $_p9k__cache_val[2] ]]; then
P9K_GCLOUD_PROJECT_ID=$_p9k__cache_val[2]
P9K_GCLOUD_PROJECT=$P9K_GCLOUD_PROJECT_ID # deprecated parameter; set for backward compatibility
fi
if [[ $P9K_GCLOUD_CONFIGURATION == $_p9k_gcloud_configuration &&
$P9K_GCLOUD_ACCOUNT == $_p9k_gcloud_account &&
$P9K_GCLOUD_PROJECT_ID == $_p9k_gcloud_project_id ]]; then
[[ -n $_p9k_gcloud_project_name ]] && P9K_GCLOUD_PROJECT_NAME=$_p9k_gcloud_project_name
if (( _POWERLEVEL9K_GCLOUD_REFRESH_PROJECT_NAME_SECONDS < 0 ||
_p9k__gcloud_last_fetch_ts + _POWERLEVEL9K_GCLOUD_REFRESH_PROJECT_NAME_SECONDS > EPOCHREALTIME )); then
return
fi
else
_p9k_gcloud_configuration=$P9K_GCLOUD_CONFIGURATION
_p9k_gcloud_account=$P9K_GCLOUD_ACCOUNT
_p9k_gcloud_project_id=$P9K_GCLOUD_PROJECT_ID
_p9k_gcloud_project_name=
_p9k__state_dump_scheduled=1
fi
[[ -n $P9K_GCLOUD_CONFIGURATION && -n $P9K_GCLOUD_ACCOUNT && -n $P9K_GCLOUD_PROJECT_ID ]] || return
_p9k__gcloud_last_fetch_ts=EPOCHREALTIME
_p9k_worker_invoke gcloud "_p9k_prompt_gcloud_compute ${(q)commands[gcloud]} ${(q)P9K_GCLOUD_CONFIGURATION} ${(q)P9K_GCLOUD_ACCOUNT} ${(q)P9K_GCLOUD_PROJECT_ID}"
}
_p9k_prompt_gcloud_init() {
_p9k__async_segments_compute+=_p9k_gcloud_prefetch
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='$commands[gcloud]'
}
_p9k_prompt_gcloud_compute() {
local gcloud=$1
P9K_GCLOUD_CONFIGURATION=$2
P9K_GCLOUD_ACCOUNT=$3
P9K_GCLOUD_PROJECT_ID=$4
_p9k_worker_async "_p9k_prompt_gcloud_async ${(q)gcloud}" _p9k_prompt_gcloud_sync
}
_p9k_prompt_gcloud_async() {
local gcloud=$1
$gcloud projects describe $P9K_GCLOUD_PROJECT_ID --configuration=$P9K_GCLOUD_CONFIGURATION \
--account=$P9K_GCLOUD_ACCOUNT --format='value(name)'
}
_p9k_prompt_gcloud_sync() {
_p9k_worker_reply "_p9k_prompt_gcloud_update ${(q)P9K_GCLOUD_CONFIGURATION} ${(q)P9K_GCLOUD_ACCOUNT} ${(q)P9K_GCLOUD_PROJECT_ID} ${(q)REPLY%$'\n'}"
}
_p9k_prompt_gcloud_update() {
[[ $1 == $P9K_GCLOUD_CONFIGURATION &&
$2 == $P9K_GCLOUD_ACCOUNT &&
$3 == $P9K_GCLOUD_PROJECT_ID &&
$4 != $P9K_GCLOUD_PROJECT_NAME ]] || return
[[ -n $4 ]] && P9K_GCLOUD_PROJECT_NAME=$4 || unset P9K_GCLOUD_PROJECT_NAME
_p9k_gcloud_project_name=$P9K_GCLOUD_PROJECT_NAME
_p9k__state_dump_scheduled=1
reset=1
}
prompt_google_app_cred() {
unset P9K_GOOGLE_APP_CRED_{TYPE,PROJECT_ID,CLIENT_EMAIL}
if ! _p9k_cache_stat_get $0 $GOOGLE_APPLICATION_CREDENTIALS; then
local -a lines
local q='[.type//"", .project_id//"", .client_email//"", 0][]'
if lines=("${(@f)$(jq -r $q <$GOOGLE_APPLICATION_CREDENTIALS 2>/dev/null)}") && (( $#lines == 4 )); then
local text="${(j.:.)lines[1,-2]}"
local pat class state
for pat class in "${_POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES[@]}"; do
if [[ $text == ${~pat} ]]; then
[[ -n $class ]] && state=_${${(U)class}//ฤฐ/I}
break
fi
done
_p9k_cache_stat_set 1 "${(@)lines[1,-2]}" "$text" "$state"
else
_p9k_cache_stat_set 0
fi
fi
(( _p9k__cache_val[1] )) || return
P9K_GOOGLE_APP_CRED_TYPE=$_p9k__cache_val[2]
P9K_GOOGLE_APP_CRED_PROJECT_ID=$_p9k__cache_val[3]
P9K_GOOGLE_APP_CRED_CLIENT_EMAIL=$_p9k__cache_val[4]
_p9k_prompt_segment "$0$_p9k__cache_val[6]" "blue" "white" "GCLOUD_ICON" 0 '' "$_p9k__cache_val[5]"
}
_p9k_prompt_google_app_cred_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='${GOOGLE_APPLICATION_CREDENTIALS:+$commands[jq]}'
}
typeset -gra __p9k_nordvpn_tag=(
P9K_NORDVPN_STATUS
P9K_NORDVPN_TECHNOLOGY
P9K_NORDVPN_PROTOCOL
P9K_NORDVPN_IP_ADDRESS
P9K_NORDVPN_SERVER
P9K_NORDVPN_COUNTRY
P9K_NORDVPN_CITY
)
function _p9k_fetch_nordvpn_status() {
setopt err_return
local REPLY
zsocket /run/nordvpnd.sock
local -i fd=$REPLY
{
>&$fd echo -nE - $'PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n\0\0\0\4\1\0\0\0\0\0\0N\1\4\0\0\0\1\203\206E\221bA\226\223\325\\k\337\31i=LnH\323j?A\223\266\243y\270\303\fYmLT{$\357]R.\203\223\257_\213\35u\320b\r&=LMedz\212\232\312\310\264\307`+\210K\203@\2te\206M\2035\5\261\37\0\0\5\0\1\0\0\0\1\0\0\0\0\0'
local tag len val
local -i n
{
IFS='' read -t 0.25 -r tag
tag=$'\n'
while true; do
tag=$((#tag))
(( (tag >>= 3) && tag <= $#__p9k_nordvpn_tag )) || break
tag=$__p9k_nordvpn_tag[tag]
[[ -t $fd ]] || true # https://www.zsh.org/mla/workers/2020/msg00207.html
sysread -s 1 -t 0.25 len
len=$((#len))
val=
while true; do
(( len )) || break
[[ -t $fd ]] || true # https://www.zsh.org/mla/workers/2020/msg00207.html
sysread -c n -s $len -t 0.25 'val[$#val+1]'
len+=-n
done
typeset -g $tag=$val
[[ -t $fd ]] || true # https://www.zsh.org/mla/workers/2020/msg00207.html
sysread -s 1 -t 0.25 tag
done
} <&$fd
} always {
exec {fd}>&-
}
}
# Shows the state of NordVPN connection. Works only on Linux. Can be in the following 5 states.
#
# MISSING: NordVPN is not installed or nordvpnd is not running. By default the segment is not
# shown in this state. To make it visible, override POWERLEVEL9K_NORDVPN_MISSING_CONTENT_EXPANSION
# and/or POWERLEVEL9K_NORDVPN_MISSING_VISUAL_IDENTIFIER_EXPANSION.
#
# # Display this icon when NordVPN is not installed or nordvpnd is not running
# POWERLEVEL9K_NORDVPN_MISSING_VISUAL_IDENTIFIER_EXPANSION='โญ'
#
# CONNECTED: NordVPN is connected. By default shows NORDVPN_ICON as icon and country code as
# content. In addition, the following variables are set for the use by
# POWERLEVEL9K_NORDVPN_CONNECTED_VISUAL_IDENTIFIER_EXPANSION and
# POWERLEVEL9K_NORDVPN_CONNECTED_CONTENT_EXPANSION:
#
# - P9K_NORDVPN_STATUS
# - P9K_NORDVPN_PROTOCOL
# - P9K_NORDVPN_TECHNOLOGY
# - P9K_NORDVPN_IP_ADDRESS
# - P9K_NORDVPN_SERVER
# - P9K_NORDVPN_COUNTRY
# - P9K_NORDVPN_CITY
# - P9K_NORDVPN_COUNTRY_CODE
#
# The last variable is trivially derived from P9K_NORDVPN_SERVER. The rest correspond to the output
# lines of `nordvpn status` command. Example of using these variables:
#
# # Display the name of the city where VPN servers are located when connected to NordVPN.
# POWERLEVEL9K_NORDVPN_CONNECTED_CONTENT_EXPANSION='${P9K_NORDVPN_CITY}'
#
# DISCONNECTED, CONNECTING, DISCONNECTING: NordVPN is disconnected/connecting/disconnecting. By
# default shows NORDVPN_ICON as icon and FAIL_ICON as content. In state CONNECTING the same
# P9K_NORDVPN_* variables are set as in CONNECTED. In states DISCONNECTED and DISCONNECTING only
# P9K_NORDVPN_STATUS is set. Example customizations:
#
# # Hide NordVPN segment when disconnected (segments with no icon and no content are not shown).
# POWERLEVEL9K_NORDVPN_DISCONNECTED_CONTENT_EXPANSION=
# POWERLEVEL9K_NORDVPN_DISCONNECTED_VISUAL_IDENTIFIER_EXPANSION=
#
# # When NordVPN is connecting, show country code on cyan background.
# POWERLEVEL9K_NORDVPN_CONNECTING_CONTENT_EXPANSION='${P9K_NORDVPN_COUNTRY_CODE}'
# POWERLEVEL9K_NORDVPN_CONNECTING_BACKGROUND=cyan
function prompt_nordvpn() {
unset $__p9k_nordvpn_tag P9K_NORDVPN_COUNTRY_CODE
if [[ -e /run/nordvpnd.sock ]]; then
_p9k_fetch_nordvpn_status 2>/dev/null
if [[ $P9K_NORDVPN_SERVER == (#b)([[:alpha:]]##)[[:digit:]]##.nordvpn.com ]]; then
typeset -g P9K_NORDVPN_COUNTRY_CODE=${${(U)match[1]}//ฤฐ/I}
fi
fi
case $P9K_NORDVPN_STATUS in
Connected)
_p9k_prompt_segment $0_CONNECTED blue white NORDVPN_ICON 0 '' "$P9K_NORDVPN_COUNTRY_CODE";;
Disconnected|Connecting|Disconnecting)
local state=${${(U)P9K_NORDVPN_STATUS}//ฤฐ/I}
_p9k_get_icon $0_$state FAIL_ICON
_p9k_prompt_segment $0_$state yellow white NORDVPN_ICON 0 '' "$_p9k__ret";;
*)
_p9k_prompt_segment $0_MISSING blue white '' 0 '' '';;
esac
}
_p9k_prompt_nordvpn_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='$commands[nordvpn]'
}
function prompt_ranger() {
_p9k_prompt_segment $0 $_p9k_color1 yellow RANGER_ICON 0 '' $RANGER_LEVEL
}
_p9k_prompt_ranger_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='$RANGER_LEVEL'
}
function instant_prompt_ranger() {
_p9k_prompt_segment prompt_ranger $_p9k_color1 yellow RANGER_ICON 1 '$RANGER_LEVEL' '$RANGER_LEVEL'
}
function prompt_midnight_commander() {
local -i len=$#_p9k__prompt _p9k__has_upglob
_p9k_prompt_segment $0 $_p9k_color1 yellow MIDNIGHT_COMMANDER_ICON 0 '' ''
(( _p9k__has_upglob )) || typeset -g "_p9k__segment_val_${_p9k__prompt_side}[_p9k__segment_index]"=$_p9k__prompt[len+1,-1]
}
_p9k_prompt_midnight_commander_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='$MC_TMPDIR'
}
function instant_prompt_midnight_commander() {
_p9k_prompt_segment prompt_midnight_commander $_p9k_color1 yellow MIDNIGHT_COMMANDER_ICON 0 '$MC_TMPDIR' ''
}
function prompt_nnn() {
_p9k_prompt_segment $0 6 $_p9k_color1 NNN_ICON 0 '' $NNNLVL
}
_p9k_prompt_nnn_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='${NNNLVL:#0}'
}
function instant_prompt_nnn() {
_p9k_prompt_segment prompt_nnn 6 $_p9k_color1 NNN_ICON 1 '${NNNLVL:#0}' '$NNNLVL'
}
function prompt_vim_shell() {
local -i len=$#_p9k__prompt _p9k__has_upglob
_p9k_prompt_segment $0 green $_p9k_color1 VIM_ICON 0 '' ''
(( _p9k__has_upglob )) || typeset -g "_p9k__segment_val_${_p9k__prompt_side}[_p9k__segment_index]"=$_p9k__prompt[len+1,-1]
}
_p9k_prompt_vim_shell_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='$VIMRUNTIME'
}
function instant_prompt_vim_shell() {
_p9k_prompt_segment prompt_vim_shell green $_p9k_color1 VIM_ICON 0 '$VIMRUNTIME' ''
}
function prompt_nix_shell() {
_p9k_prompt_segment $0 4 $_p9k_color1 NIX_SHELL_ICON 0 '' "${(M)IN_NIX_SHELL:#(pure|impure)}"
}
_p9k_prompt_nix_shell_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='${IN_NIX_SHELL:#0}'
}
function instant_prompt_nix_shell() {
_p9k_prompt_segment prompt_nix_shell 4 $_p9k_color1 NIX_SHELL_ICON 1 '${IN_NIX_SHELL:#0}' '${(M)IN_NIX_SHELL:#(pure|impure)}'
}
function prompt_terraform() {
local ws=$TF_WORKSPACE
if [[ -z $TF_WORKSPACE ]]; then
_p9k_read_word ${${TF_DATA_DIR:-.terraform}:A}/environment && ws=$_p9k__ret
fi
[[ -z $ws || $ws == default && $_POWERLEVEL9K_TERRAFORM_SHOW_DEFAULT == 0 ]] && return
local pat class
for pat class in "${_POWERLEVEL9K_TERRAFORM_CLASSES[@]}"; do
if [[ $ws == ${~pat} ]]; then
[[ -n $class ]] && state=_${${(U)class}//ฤฐ/I}
break
fi
done
_p9k_prompt_segment "$0$state" $_p9k_color1 blue TERRAFORM_ICON 0 '' $ws
}
_p9k_prompt_terraform_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='$commands[terraform]'
}
function prompt_proxy() {
local -U p=(
$all_proxy $http_proxy $https_proxy $ftp_proxy
$ALL_PROXY $HTTP_PROXY $HTTPS_PROXY $FTP_PROXY)
p=(${(@)${(@)${(@)p#*://}##*@}%%/*})
(( $#p == 1 )) || p=("")
_p9k_prompt_segment $0 $_p9k_color1 blue PROXY_ICON 0 '' "$p[1]"
}
_p9k_prompt_proxy_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='$all_proxy$http_proxy$https_proxy$ftp_proxy$ALL_PROXY$HTTP_PROXY$HTTPS_PROXY$FTP_PROXY'
}
function prompt_direnv() {
local -i len=$#_p9k__prompt _p9k__has_upglob
_p9k_prompt_segment $0 $_p9k_color1 yellow DIRENV_ICON 0 '$DIRENV_DIR' ''
(( _p9k__has_upglob )) || typeset -g "_p9k__segment_val_${_p9k__prompt_side}[_p9k__segment_index]"=$_p9k__prompt[len+1,-1]
}
_p9k_prompt_direnv_init() {
# DIRENV_DIR is set in a precmd hook. If our hook isn't the last, DIRENV_DIR might
# still get set before prompt is expanded.
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='${DIRENV_DIR:-${precmd_functions[-1]:#_p9k_precmd}}'
}
function instant_prompt_direnv() {
if [[ -n $DIRENV_DIR && $precmd_functions[-1] == _p9k_precmd ]]; then
_p9k_prompt_segment prompt_direnv $_p9k_color1 yellow DIRENV_ICON 0 '' ''
fi
}
function _p9k_timewarrior_clear() {
[[ -z $_p9k_timewarrior_dir ]] && return
_p9k_timewarrior_dir=
_p9k_timewarrior_dir_mtime=0
_p9k_timewarrior_file_mtime=0
_p9k_timewarrior_file_name=
unset _p9k_timewarrior_tags
_p9k__state_dump_scheduled=1
}
function prompt_timewarrior() {
local -a stat
local dir=${TIMEWARRIORDB:-~/.timewarrior}/data
[[ $dir == $_p9k_timewarrior_dir ]] || _p9k_timewarrior_clear
if [[ -n $_p9k_timewarrior_file_name ]]; then
zstat -A stat +mtime -- $dir $_p9k_timewarrior_file_name 2>/dev/null || stat=()
if [[ $stat[1] == $_p9k_timewarrior_dir_mtime && $stat[2] == $_p9k_timewarrior_file_mtime ]]; then
if (( $+_p9k_timewarrior_tags )); then
_p9k_prompt_segment $0 grey 255 TIMEWARRIOR_ICON 0 '' "${_p9k_timewarrior_tags//\%/%%}"
fi
return
fi
fi
if [[ ! -d $dir ]]; then
_p9k_timewarrior_clear
return
fi
_p9k_timewarrior_dir=$dir
if [[ $stat[1] != $_p9k_timewarrior_dir_mtime ]]; then
local -a files=($dir/<->-<->.data(.N))
if (( ! $#files )); then
if (( $#stat )) || zstat -A stat +mtime -- $dir 2>/dev/null; then
_p9k_timewarrior_dir_mtime=$stat[1]
_p9k_timewarrior_file_mtime=$stat[1]
_p9k_timewarrior_file_name=$dir # sic
unset _p9k_timewarrior_tags
_p9k__state_dump_scheduled=1
else
_p9k_timewarrior_clear
fi
return
fi
_p9k_timewarrior_file_name=${${(AO)files}[1]}
fi
if ! zstat -A stat +mtime -- $dir $_p9k_timewarrior_file_name 2>/dev/null; then
_p9k_timewarrior_clear
return
fi
_p9k_timewarrior_dir_mtime=$stat[1]
_p9k_timewarrior_file_mtime=$stat[2]
{ local tail=${${(Af)"$(<$_p9k_timewarrior_file_name)"}[-1]} } 2>/dev/null
if [[ $tail == (#b)'inc '[^\ ]##(|\ #\#(*)) ]]; then
_p9k_timewarrior_tags=${${match[2]## #}%% #}
_p9k_prompt_segment $0 grey 255 TIMEWARRIOR_ICON 0 '' "${_p9k_timewarrior_tags//\%/%%}"
else
unset _p9k_timewarrior_tags
fi
_p9k__state_dump_scheduled=1
}
function _p9k_prompt_timewarrior_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='$commands[timew]'
}
function _p9k_taskwarrior_check_meta() {
[[ -n $_p9k_taskwarrior_meta_sig ]] || return
[[ -z $^_p9k_taskwarrior_meta_non_files(#qN) ]] || return
local -a stat
if (( $#_p9k_taskwarrior_meta_files )); then
zstat -A stat +mtime -- $_p9k_taskwarrior_meta_files 2>/dev/null || return
fi
[[ $_p9k_taskwarrior_meta_sig == ${(pj:\0:)stat}$'\0'$TASKRC$'\0'$TASKDATA ]] || return
}
function _p9k_taskwarrior_init_meta() {
local last_sig=$_p9k_taskwarrior_meta_sig
{
local cfg
cfg="$(command task show data.location </dev/null 2>/dev/null)" || return
local lines=(${(@M)${(f)cfg}:#data.location[[:space:]]##[^[:space:]]*})
(( $#lines == 1 )) || return
local dir=${lines[1]##data.location[[:space:]]#}
: ${dir::=$~dir} # `task` can give us path with `~`` in it; expand it
local -a stat files=(${TASKRC:-~/.taskrc})
_p9k_taskwarrior_meta_files=($^files(N))
_p9k_taskwarrior_meta_non_files=(${files:|_p9k_taskwarrior_meta_files})
if (( $#_p9k_taskwarrior_meta_files )); then
zstat -A stat +mtime -- $_p9k_taskwarrior_meta_files 2>/dev/null || stat=(-1)
fi
_p9k_taskwarrior_meta_sig=${(pj:\0:)stat}$'\0'$TASKRC$'\0'$TASKDATA
_p9k_taskwarrior_data_dir=$dir
} always {
if (( $? == 0 )); then
_p9k__state_dump_scheduled=1
return
fi
[[ -n $last_sig ]] && _p9k__state_dump_scheduled=1
_p9k_taskwarrior_meta_files=()
_p9k_taskwarrior_meta_non_files=()
_p9k_taskwarrior_meta_sig=
_p9k_taskwarrior_data_dir=
_p9k__taskwarrior_functional=
}
}
function _p9k_taskwarrior_check_data() {
[[ -n $_p9k_taskwarrior_data_sig ]] || return
[[ -z $^_p9k_taskwarrior_data_non_files(#qN) ]] || return
local -a stat
if (( $#_p9k_taskwarrior_data_files )); then
zstat -A stat +mtime -- $_p9k_taskwarrior_data_files 2>/dev/null || return
fi
[[ $_p9k_taskwarrior_data_sig == ${(pj:\0:)stat}$'\0'$TASKRC$'\0'$TASKDATA ]] || return
(( _p9k_taskwarrior_next_due == 0 || _p9k_taskwarrior_next_due > EPOCHSECONDS )) || return
}
function _p9k_taskwarrior_init_data() {
local -a stat files=($_p9k_taskwarrior_data_dir/{pending,completed}.data)
_p9k_taskwarrior_data_files=($^files(N))
_p9k_taskwarrior_data_non_files=(${files:|_p9k_taskwarrior_data_files})
if (( $#_p9k_taskwarrior_data_files )); then
zstat -A stat +mtime -- $_p9k_taskwarrior_data_files 2>/dev/null || stat=(-1)
_p9k_taskwarrior_data_sig=${(pj:\0:)stat}$'\0'
else
_p9k_taskwarrior_data_sig=
fi
_p9k_taskwarrior_data_files+=($_p9k_taskwarrior_meta_files)
_p9k_taskwarrior_data_non_files+=($_p9k_taskwarrior_meta_non_files)
_p9k_taskwarrior_data_sig+=$_p9k_taskwarrior_meta_sig
local name val
for name in PENDING OVERDUE; do
val="$(command task +$name count </dev/null 2>/dev/null)" || continue
[[ $val == <1-> ]] || continue
_p9k_taskwarrior_counters[$name]=$val
done
_p9k_taskwarrior_next_due=0
if (( _p9k_taskwarrior_counters[PENDING] > _p9k_taskwarrior_counters[OVERDUE] )); then
local -a ts
ts=($(command task +PENDING -OVERDUE list rc.verbose=nothing \
rc.report.list.labels= rc.report.list.columns=due.epoch </dev/null 2>/dev/null)) || ts=()
if (( $#ts )); then
_p9k_taskwarrior_next_due=${${(on)ts}[1]}
(( _p9k_taskwarrior_next_due > EPOCHSECONDS )) || _p9k_taskwarrior_next_due=$((EPOCHSECONDS+60))
fi
fi
_p9k__state_dump_scheduled=1
}
function prompt_taskwarrior() {
unset P9K_TASKWARRIOR_PENDING_COUNT P9K_TASKWARRIOR_OVERDUE_COUNT
if ! _p9k_taskwarrior_check_data; then
_p9k_taskwarrior_data_files=()
_p9k_taskwarrior_data_non_files=()
_p9k_taskwarrior_data_sig=
_p9k_taskwarrior_counters=()
_p9k_taskwarrior_next_due=0
_p9k_taskwarrior_check_meta || _p9k_taskwarrior_init_meta || return
_p9k_taskwarrior_init_data
fi
(( $#_p9k_taskwarrior_counters )) || return
local text c=$_p9k_taskwarrior_counters[OVERDUE]
if [[ -n $c ]]; then
typeset -g P9K_TASKWARRIOR_OVERDUE_COUNT=$c
text+="!$c"
fi
c=$_p9k_taskwarrior_counters[PENDING]
if [[ -n $c ]]; then
typeset -g P9K_TASKWARRIOR_PENDING_COUNT=$c
[[ -n $text ]] && text+='/'
text+=$c
fi
[[ -n $text ]] || return
_p9k_prompt_segment $0 6 $_p9k_color1 TASKWARRIOR_ICON 0 '' $text
}
function _p9k_prompt_taskwarrior_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='${commands[task]:+$_p9k__taskwarrior_functional}'
}
prompt_wifi() {
local -i len=$#_p9k__prompt _p9k__has_upglob
_p9k_prompt_segment $0 green $_p9k_color1 WIFI_ICON 1 '$_p9k__wifi_on' '$P9K_WIFI_LAST_TX_RATE Mbps'
(( _p9k__has_upglob )) || typeset -g "_p9k__segment_val_${_p9k__prompt_side}[_p9k__segment_index]"=$_p9k__prompt[len+1,-1]
}
_p9k_prompt_wifi_init() {
if [[ -x /System/Library/PrivateFrameworks/Apple80211.framework/Versions/Current/Resources/airport ||
-r /proc/net/wireless && -n $commands[iw] ]]; then
typeset -g _p9k__wifi_on=
typeset -g P9K_WIFI_LAST_TX_RATE=
typeset -g P9K_WIFI_SSID=
typeset -g P9K_WIFI_LINK_AUTH=
typeset -g P9K_WIFI_RSSI=
typeset -g P9K_WIFI_NOISE=
typeset -g P9K_WIFI_BARS=
_p9k__async_segments_compute+='_p9k_worker_invoke wifi _p9k_prompt_wifi_compute'
else
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='${:-}'
fi
}
_p9k_prompt_wifi_compute() {
_p9k_worker_async _p9k_prompt_wifi_async _p9k_prompt_wifi_sync
}
_p9k_prompt_wifi_async() {
local airport=/System/Library/PrivateFrameworks/Apple80211.framework/Versions/Current/Resources/airport
local last_tx_rate ssid link_auth rssi noise bars on out line v state iface
{
if [[ -x $airport ]]; then
out="$($airport -I)" || return 0
for line in ${${${(f)out}##[[:space:]]#}%%[[:space:]]#}; do
v=${line#*: }
case $line[1,-$#v-3] in
agrCtlRSSI) rssi=$v;;
agrCtlNoise) noise=$v;;
state) state=$v;;
lastTxRate) last_tx_rate=$v;;
link\ auth) link_auth=$v;;
SSID) ssid=$v;;
esac
done
[[ $state == running && $rssi == (0|-<->) && $noise == (0|-<->) ]] || return 0
elif [[ -r /proc/net/wireless && -n $commands[iw] ]]; then
# Content example (https://github.com/romkatv/powerlevel10k/pull/973#issuecomment-680251804):
#
# Inter-| sta-| Quality | Discarded packets | Missed | WE
# face | tus | link level noise | nwid crypt frag retry misc | beacon | 22
# wlp3s0: 0000 58. -52. -256 0 0 0 0 76 0
local -a lines
lines=(${${(f)"$(</proc/net/wireless)"}:#*\|*}) || return 0
(( $#lines == 1 )) || return 0
local parts=(${=lines[1]})
iface=${parts[1]%:}
state=${parts[2]}
rssi=${parts[4]%.*}
noise=${parts[5]%.*}
[[ -n $iface && $state == 0## && $rssi == (0|-<->) && $noise == (0|-<->) ]] || return 0
# Output example (https://github.com/romkatv/powerlevel10k/pull/973#issuecomment-680251804):
#
# Connected to 74:83:c2:be:76:da (on wlp3s0)
# SSID: DailyGrindGuest1
# freq: 5745
# RX: 35192066 bytes (27041 packets)
# TX: 4090471 bytes (24287 packets)
# signal: -52 dBm
# rx bitrate: 243.0 MBit/s VHT-MCS 6 40MHz VHT-NSS 2
# tx bitrate: 240.0 MBit/s VHT-MCS 5 40MHz short GI VHT-NSS 2
#
# bss flags: short-slot-time
# dtim period: 1
# beacon int: 100
lines=(${(f)"$(command iw dev $iface link)"}) || return 0
local -a match mbegin mend
for line in $lines; do
if [[ $line == (#b)[[:space:]]#SSID:[[:space:]]##([^[:space:]]##) ]]; then
ssid=$match[1]
elif [[ $line == (#b)[[:space:]]#'tx bitrate:'[[:space:]]##([^[:space:]]##)' MBit/s'* ]]; then
last_tx_rate=$match[1]
[[ $last_tx_rate == <->.<-> ]] && last_tx_rate=${${last_tx_rate%%0#}%.}
fi
done
[[ -n $ssid && -n $last_tx_rate ]] || return 0
else
return 0
fi
# https://www.speedguide.net/faq/how-to-read-rssisignal-and-snrnoise-ratings-440
# http://www.wireless-nets.com/resources/tutorials/define_SNR_values.html
local -i snr_margin='rssi - noise'
if (( snr_margin >= 40 )); then
bars=4
elif (( snr_margin >= 25 )); then
bars=3
elif (( snr_margin >= 15 )); then
bars=2
elif (( snr_margin >= 10 )); then
bars=1
else
bars=0
fi
on=1
} always {
if (( ! on )); then
rssi=
noise=
ssid=
last_tx_rate=
bars=
link_auth=
fi
if [[ $_p9k__wifi_on != $on ||
$P9K_WIFI_LAST_TX_RATE != $last_tx_rate ||
$P9K_WIFI_SSID != $ssid ||
$P9K_WIFI_LINK_AUTH != $link_auth ||
$P9K_WIFI_RSSI != $rssi ||
$P9K_WIFI_NOISE != $noise ||
$P9K_WIFI_BARS != $bars ]]; then
_p9k__wifi_on=$on
P9K_WIFI_LAST_TX_RATE=$last_tx_rate
P9K_WIFI_SSID=$ssid
P9K_WIFI_LINK_AUTH=$link_auth
P9K_WIFI_RSSI=$rssi
P9K_WIFI_NOISE=$noise
P9K_WIFI_BARS=$bars
_p9k_print_params \
_p9k__wifi_on \
P9K_WIFI_LAST_TX_RATE \
P9K_WIFI_SSID \
P9K_WIFI_LINK_AUTH \
P9K_WIFI_RSSI \
P9K_WIFI_NOISE \
P9K_WIFI_BARS
echo -E - 'reset=1'
fi
}
}
_p9k_prompt_wifi_sync() {
if [[ -n $REPLY ]]; then
eval $REPLY
_p9k_worker_reply $REPLY
fi
}
function _p9k_asdf_check_meta() {
[[ -n $_p9k_asdf_meta_sig ]] || return
[[ -z $^_p9k_asdf_meta_non_files(#qN) ]] || return
local -a stat
if (( $#_p9k_asdf_meta_files )); then
zstat -A stat +mtime -- $_p9k_asdf_meta_files 2>/dev/null || return
fi
[[ $_p9k_asdf_meta_sig == $ASDF_CONFIG_FILE$'\0'$ASDF_DATA_DIR$'\0'${(pj:\0:)stat} ]] || return
}
function _p9k_asdf_init_meta() {
local last_sig=$_p9k_asdf_meta_sig
{
local -a files
local -i legacy_enabled
_p9k_asdf_plugins=()
_p9k_asdf_file_info=()
local cfg=${ASDF_CONFIG_FILE:-~/.asdfrc}
files+=$cfg
if [[ -f $cfg && -r $cfg ]]; then
# Config parser in adsf is very strange.
#
# This gives "yes":
#
# legacy_version_file = yes = no
#
# This gives "no":
#
# legacy_version_file = yes
# legacy_version_file = yes
#
# We do the same.
local lines=(${(@M)${(@)${(f)"$(<$cfg)"}%$'\r'}:#[[:space:]]#legacy_version_file[[:space:]]#=*})
if [[ $#lines == 1 && ${${(s:=:)lines[1]}[2]} == [[:space:]]#yes[[:space:]]# ]]; then
legacy_enabled=1
fi
fi
local root=${ASDF_DATA_DIR:-~/.asdf}
files+=$root/plugins
if [[ -d $root/plugins ]]; then
local plugin
for plugin in $root/plugins/[^[:space:]]##(/N); do
files+=$root/installs/${plugin:t}
local -aU installed=($root/installs/${plugin:t}/[^[:space:]]##(/N:t) system)
_p9k_asdf_plugins[${plugin:t}]=${(j:|:)${(@b)installed}}
(( legacy_enabled )) || continue
if [[ ! -e $plugin/bin ]]; then
files+=$plugin/bin
else
local list_names=$plugin/bin/list-legacy-filenames
files+=$list_names
if [[ -x $list_names ]]; then
local parse=$plugin/bin/parse-legacy-file
local -i has_parse=0
files+=$parse
[[ -x $parse ]] && has_parse=1
local name
for name in $($list_names 2>/dev/null); do
[[ $name == (*/*|.tool-versions) ]] && continue
_p9k_asdf_file_info[$name]+="${plugin:t} $has_parse "
done
fi
fi
done
fi
_p9k_asdf_meta_files=($^files(N))
_p9k_asdf_meta_non_files=(${files:|_p9k_asdf_meta_files})
local -a stat
if (( $#_p9k_asdf_meta_files )); then
zstat -A stat +mtime -- $_p9k_asdf_meta_files 2>/dev/null || return
fi
_p9k_asdf_meta_sig=$ASDF_CONFIG_FILE$'\0'$ASDF_DATA_DIR$'\0'${(pj:\0:)stat}
_p9k__asdf_dir2files=()
_p9k_asdf_file2versions=()
} always {
if (( $? == 0 )); then
_p9k__state_dump_scheduled=1
return
fi
[[ -n $last_sig ]] && _p9k__state_dump_scheduled=1
_p9k_asdf_meta_files=()
_p9k_asdf_meta_non_files=()
_p9k_asdf_meta_sig=
_p9k_asdf_plugins=()
_p9k_asdf_file_info=()
_p9k__asdf_dir2files=()
_p9k_asdf_file2versions=()
}
}
# Usage: _p9k_asdf_parse_version_file <file> <is_legacy>
#
# Mutates `versions` on success.
function _p9k_asdf_parse_version_file() {
local file=$1
local is_legacy=$2
local -a stat
zstat -A stat +mtime $file 2>/dev/null || return
if (( is_legacy )); then
local plugin has_parse
for plugin has_parse in $=_p9k_asdf_file_info[$file:t]; do
local cached=$_p9k_asdf_file2versions[$plugin:$file]
if [[ $cached == $stat[1]:* ]]; then
local v=${cached#*:}
else
if (( has_parse )); then
local v=($(${ASDF_DATA_DIR:-~/.asdf}/plugins/$plugin/bin/parse-legacy-file $file 2>/dev/null))
else
{ local v=($(<$file)) } 2>/dev/null
v=(${v%$'\r'})
fi
v=${v[(r)$_p9k_asdf_plugins[$plugin]]:-$v[1]}
_p9k_asdf_file2versions[$plugin:$file]=$stat[1]:"$v"
_p9k__state_dump_scheduled=1
fi
[[ -n $v ]] && : ${versions[$plugin]="$v"}
done
else
local cached=$_p9k_asdf_file2versions[:$file]
if [[ $cached == $stat[1]:* ]]; then
local file_versions=(${(0)${cached#*:}})
else
local file_versions=()
{ local lines=(${(@)${(@)${(f)"$(<$file)"}%$'\r'}/\#*}) } 2>/dev/null
local line
for line in $lines; do
local words=($=line)
(( $#words > 1 )) || continue
local installed=$_p9k_asdf_plugins[$words[1]]
[[ -n $installed ]] || continue
file_versions+=($words[1] ${${words:1}[(r)$installed]:-$words[2]})
done
_p9k_asdf_file2versions[:$file]=$stat[1]:${(pj:\0:)file_versions}
_p9k__state_dump_scheduled=1
fi
local plugin version
for plugin version in $file_versions; do
: ${versions[$plugin]=$version}
done
fi
return 0
}
function prompt_asdf() {
_p9k_asdf_check_meta || _p9k_asdf_init_meta || return
local -A versions
local -a stat
zstat -A stat +mtime ~ 2>/dev/null || return
local dirs=($_p9k__parent_dirs ~)
local mtimes=($_p9k__parent_mtimes $stat[1])
local -i has_global
local elem
for elem in ${(@)${:-{1..$#dirs}}/(#m)*/${${:-$MATCH:$_p9k__asdf_dir2files[$dirs[MATCH]]}#$MATCH:$mtimes[MATCH]:}}; do
if [[ $elem == *:* ]]; then
local dir=$dirs[${elem%%:*}]
zstat -A stat +mtime $dir 2>/dev/null || return
local files=($dir/.tool-versions(N) $dir/${(k)^_p9k_asdf_file_info}(N))
_p9k__asdf_dir2files[$dir]=$stat[1]:${(pj:\0:)files}
else
local files=(${(0)elem})
fi
if [[ ${files[1]:h} == ~ ]]; then
has_global=1
local -A local_versions=(${(kv)versions})
versions=()
fi
local file
for file in $files; do
[[ $file == */.tool-versions ]]
_p9k_asdf_parse_version_file $file $? || return
done
done
if (( ! has_global )); then
has_global=1
local -A local_versions=(${(kv)versions})
versions=()
fi
if [[ -r $ASDF_DEFAULT_TOOL_VERSIONS_FILENAME ]]; then
_p9k_asdf_parse_version_file $ASDF_DEFAULT_TOOL_VERSIONS_FILENAME 0 || return
fi
local plugin
for plugin in ${(k)_p9k_asdf_plugins}; do
local upper=${${(U)plugin//-/_}//ฤฐ/I}
if (( $+parameters[_POWERLEVEL9K_ASDF_${upper}_SOURCES] )); then
local sources=(${(P)${:-_POWERLEVEL9K_ASDF_${upper}_SOURCES}})
else
local sources=($_POWERLEVEL9K_ASDF_SOURCES)
fi
local version="${(P)${:-ASDF_${upper}_VERSION}}"
if [[ -n $version ]]; then
(( $sources[(I)shell] )) || continue
else
version=$local_versions[$plugin]
if [[ -n $version ]]; then
(( $sources[(I)local] )) || continue
else
version=$versions[$plugin]
[[ -n $version ]] || continue
(( $sources[(I)global] )) || continue
fi
fi
if [[ $version == $versions[$plugin] ]]; then
if (( $+parameters[_POWERLEVEL9K_ASDF_${upper}_PROMPT_ALWAYS_SHOW] )); then
(( _POWERLEVEL9K_ASDF_${upper}_PROMPT_ALWAYS_SHOW )) || continue
else
(( _POWERLEVEL9K_ASDF_PROMPT_ALWAYS_SHOW )) || continue
fi
fi
if [[ $version == system ]]; then
if (( $+parameters[_POWERLEVEL9K_ASDF_${upper}_SHOW_SYSTEM] )); then
(( _POWERLEVEL9K_ASDF_${upper}_SHOW_SYSTEM )) || continue
else
(( _POWERLEVEL9K_ASDF_SHOW_SYSTEM )) || continue
fi
fi
_p9k_get_icon $0_$upper ${upper}_ICON $plugin
_p9k_prompt_segment $0_$upper green $_p9k_color1 $'\1'$_p9k__ret 0 '' ${version//\%/%%}
done
}
_p9k_prompt_asdf_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='${commands[asdf]:-${${+functions[asdf]}:#0}}'
}
_p9k_haskell_stack_version() {
if ! _p9k_cache_stat_get $0 $1 ${STACK_ROOT:-~/.stack}/{pantry/pantry.sqlite3,stack.sqlite3}; then
local v
v="$(STACK_YAML=$1 stack \
--silent \
--no-install-ghc \
--skip-ghc-check \
--no-terminal \
--color=never \
--lock-file=read-only \
query compiler actual)" || v=
_p9k_cache_stat_set "$v"
fi
_p9k__ret=$_p9k__cache_val[1]
}
prompt_haskell_stack() {
if [[ -n $STACK_YAML ]]; then
(( ${_POWERLEVEL9K_HASKELL_STACK_SOURCES[(I)shell]} )) || return
_p9k_haskell_stack_version $STACK_YAML
else
(( ${_POWERLEVEL9K_HASKELL_STACK_SOURCES[(I)local|global]} )) || return
if _p9k_upglob stack.yaml; then
(( _POWERLEVEL9K_HASKELL_STACK_PROMPT_ALWAYS_SHOW )) || return
(( ${_POWERLEVEL9K_HASKELL_STACK_SOURCES[(I)global]} )) || return
_p9k_haskell_stack_version ${STACK_ROOT:-~/.stack}/global-project/stack.yaml
else
local -i idx=$?
(( ${_POWERLEVEL9K_HASKELL_STACK_SOURCES[(I)local]} )) || return
_p9k_haskell_stack_version $_p9k__parent_dirs[idx]/stack.yaml
fi
fi
[[ -n $_p9k__ret ]] || return
local v=$_p9k__ret
if (( !_POWERLEVEL9K_HASKELL_STACK_PROMPT_ALWAYS_SHOW )); then
_p9k_haskell_stack_version ${STACK_ROOT:-~/.stack}/global-project/stack.yaml
[[ $v == $_p9k__ret ]] && return
fi
_p9k_prompt_segment "$0" "yellow" "$_p9k_color1" 'HASKELL_ICON' 0 '' "${v//\%/%%}"
}
_p9k_prompt_haskell_stack_init() {
typeset -g "_p9k__segment_cond_${_p9k__prompt_side}[_p9k__segment_index]"='$commands[stack]'
}
# Use two preexec hooks to survive https://github.com/MichaelAquilina/zsh-you-should-use with
# YSU_HARDCORE=1. See https://github.com/romkatv/powerlevel10k/issues/427.
_p9k_preexec1() {
_p9k_restore_special_params
unset __p9k_trapint
trap - INT
}
_p9k_preexec2() {
typeset -g _p9k__preexec_cmd=$2
_p9k__timer_start=EPOCHREALTIME
P9K_TTY=old
}
function _p9k_prompt_net_iface_init() {
typeset -g _p9k__public_ip_vpn=
typeset -g _p9k__public_ip_not_vpn=
typeset -g P9K_IP_IP=
typeset -g P9K_IP_INTERFACE=
typeset -g P9K_IP_TX_BYTES=
typeset -g P9K_IP_RX_BYTES=
typeset -g P9K_IP_TX_RATE=
typeset -g P9K_IP_RX_RATE=
typeset -g _p9__ip_timestamp=
typeset -g _p9k__vpn_ip_ips=()
[[ -z $_POWERLEVEL9K_PUBLIC_IP_VPN_INTERFACE ]] && _p9k__public_ip_not_vpn=1
_p9k__async_segments_compute+='_p9k_worker_invoke net_iface _p9k_prompt_net_iface_compute'
}
# reads `iface2ip` and sets `ifaces` and `ips`
function _p9k_prompt_net_iface_match() {
local iface_regex="^($1)\$" iface ip
ips=()
ifaces=()
for iface ip in "${(@)iface2ip}"; do
[[ $iface =~ $iface_regex ]] || continue
ifaces+=$iface
ips+=$ip
done
return $(($#ips == 0))
}
function _p9k_prompt_net_iface_compute() {
_p9k_worker_async _p9k_prompt_net_iface_async _p9k_prompt_net_iface_sync
}
function _p9k_prompt_net_iface_async() {
# netstat -inbI en0
local iface ip line var
typeset -a iface2ip ips ifaces
if (( $+commands[ifconfig] )); then
for line in ${(f)"$(command ifconfig 2>/dev/null)"}; do
if [[ $line == (#b)([^[:space:]]##):[[:space:]]##flags=([[:xdigit:]]##)'<'* ]]; then
[[ $match[2] == *[13579bdfBDF] ]] && iface=$match[1] || iface=
elif [[ -n $iface && $line == (#b)[[:space:]]##inet[[:space:]]##([0-9.]##)* ]]; then
iface2ip+=($iface $match[1])
iface=
fi
done
elif (( $+commands[ip] )); then
for line in ${(f)"$(command ip -4 a show 2>/dev/null)"}; do
if [[ $line == (#b)<->:[[:space:]]##([^:]##):[[:space:]]##\<([^\>]#)\>* ]]; then
[[ ,$match[2], == *,UP,* ]] && iface=$match[1] || iface=
elif [[ -n $iface && $line == (#b)[[:space:]]##inet[[:space:]]##([0-9.]##)* ]]; then
iface2ip+=($iface $match[1])
iface=
fi
done
fi
if _p9k_prompt_net_iface_match $_POWERLEVEL9K_PUBLIC_IP_VPN_INTERFACE; then
local public_ip_vpn=1
local public_ip_not_vpn=
else
local public_ip_vpn=
local public_ip_not_vpn=1
fi
if _p9k_prompt_net_iface_match $_POWERLEVEL9K_IP_INTERFACE; then
local ip_ip=$ips[1] ip_interface=$ifaces[1] ip_timestamp=$EPOCHREALTIME
local ip_tx_bytes ip_rx_bytes ip_tx_rate ip_rx_rate
if [[ $_p9k_os == (Linux|Android) ]]; then
if [[ -r /sys/class/net/$ifaces[1]/statistics/tx_bytes &&
-r /sys/class/net/$ifaces[1]/statistics/rx_bytes ]]; then
_p9k_read_file /sys/class/net/$ifaces[1]/statistics/tx_bytes &&
[[ $_p9k__ret == <-> ]] && ip_tx_bytes=$_p9k__ret &&
_p9k_read_file /sys/class/net/$ifaces[1]/statistics/rx_bytes &&
[[ $_p9k__ret == <-> ]] && ip_rx_bytes=$_p9k__ret || { ip_tx_bytes=; ip_rx_bytes=; }
fi
elif [[ $_p9k_os == (BSD|OSX) && $+commands[netstat] == 1 ]]; then
local -a lines
if lines=(${(f)"$(netstat -inbI $ifaces[1])"}); then
local header=($=lines[1])
local -i rx_idx=$header[(Ie)Ibytes]
local -i tx_idx=$header[(Ie)Obytes]
if (( rx_idx && tx_idx )); then
ip_tx_bytes=0
ip_rx_bytes=0
for line in ${lines:1}; do
(( ip_rx_bytes += ${line[(w)rx_idx]} ))
(( ip_tx_bytes += ${line[(w)tx_idx]} ))
done
fi
fi
fi
if [[ -n $ip_rx_bytes ]]; then
if [[ $ip_ip == $P9K_IP_IP && $ifaces[1] == $P9K_IP_INTERFACE ]]; then
local -F t='ip_timestamp - _p9__ip_timestamp'
if (( t <= 0 )); then
ip_tx_rate=${P9K_IP_TX_RATE:-0 B/s}
ip_rx_rate=${P9K_IP_RX_RATE:-0 B/s}
else
_p9k_human_readable_bytes $(((ip_tx_bytes - P9K_IP_TX_BYTES) / t))
[[ $_p9k__ret == *B ]] && ip_tx_rate="$_p9k__ret[1,-2] B/s" || ip_tx_rate="$_p9k__ret[1,-2] $_p9k__ret[-1]iB/s"
_p9k_human_readable_bytes $(((ip_rx_bytes - P9K_IP_RX_BYTES) / t))
[[ $_p9k__ret == *B ]] && ip_rx_rate="$_p9k__ret[1,-2] B/s" || ip_rx_rate="$_p9k__ret[1,-2] $_p9k__ret[-1]iB/s"
fi
else
ip_tx_rate='0 B/s'
ip_rx_rate='0 B/s'
fi
fi
else
local ip_ip= ip_interface= ip_tx_bytes= ip_rx_bytes= ip_tx_rate= ip_rx_rate= ip_timestamp=
fi
if _p9k_prompt_net_iface_match $_POWERLEVEL9K_VPN_IP_INTERFACE; then
if (( _POWERLEVEL9K_VPN_IP_SHOW_ALL )); then
local vpn_ip_ips=($ips)
else
local vpn_ip_ips=($ips[1])
fi
else
local vpn_ip_ips=()
fi
[[ $_p9k__public_ip_vpn == $public_ip_vpn &&
$_p9k__public_ip_not_vpn == $public_ip_not_vpn &&
$P9K_IP_IP == $ip_ip &&
$P9K_IP_INTERFACE == $ip_interface &&
$P9K_IP_TX_BYTES == $ip_tx_bytes &&
$P9K_IP_RX_BYTES == $ip_rx_bytes &&
$P9K_IP_TX_RATE == $ip_tx_rate &&
$P9K_IP_RX_RATE == $ip_rx_rate &&
"$_p9k__vpn_ip_ips" == "$vpn_ip_ips" ]] && return 1
if [[ "$_p9k__vpn_ip_ips" == "$vpn_ip_ips" ]]; then
echo -n 0
else
echo -n 1
fi
_p9k__public_ip_vpn=$public_ip_vpn
_p9k__public_ip_not_vpn=$public_ip_not_vpn
P9K_IP_IP=$ip_ip
P9K_IP_INTERFACE=$ip_interface
P9K_IP_TX_BYTES=$ip_tx_bytes
P9K_IP_RX_BYTES=$ip_rx_bytes
P9K_IP_TX_RATE=$ip_tx_rate
P9K_IP_RX_RATE=$ip_rx_rate
_p9__ip_timestamp=$ip_timestamp
_p9k__vpn_ip_ips=($vpn_ip_ips)
_p9k_print_params \
_p9k__public_ip_vpn \
_p9k__public_ip_not_vpn \
P9K_IP_IP \
P9K_IP_INTERFACE \
P9K_IP_TX_BYTES \
P9K_IP_RX_BYTES \
P9K_IP_TX_RATE \
P9K_IP_RX_RATE \
_p9__ip_timestamp \
_p9k__vpn_ip_ips
echo -E - 'reset=1'
}
_p9k_prompt_net_iface_sync() {
local -i vpn_ip_changed=$REPLY[1]
REPLY[1]=""
eval $REPLY
(( vpn_ip_changed )) && REPLY+='; _p9k_vpn_ip_render'
_p9k_worker_reply $REPLY
}
function _p9k_set_prompt() {
PROMPT=
RPROMPT=
[[ $1 == instant_ ]] || PROMPT+='${$((_p9k_on_expand()))+}'
PROMPT+=$_p9k_prompt_prefix_left
local -i _p9k__has_upglob
local -i left_idx=1 right_idx=1 num_lines=$#_p9k_line_segments_left
for _p9k__line_index in {1..$num_lines}; do
local right=
if (( !_POWERLEVEL9K_DISABLE_RPROMPT )); then
_p9k__dir=
_p9k__prompt=
_p9k__segment_index=right_idx
_p9k__prompt_side=right
if [[ $1 == instant_ ]]; then
for _p9k__segment_name in ${${(0)_p9k_line_segments_right[_p9k__line_index]}%_joined}; do
if (( $+functions[instant_prompt_$_p9k__segment_name] )); then
local disabled=_POWERLEVEL9K_${${(U)_p9k__segment_name}//ฤฐ/I}_DISABLED_DIR_PATTERN
if [[ $_p9k__cwd != ${(P)~disabled} ]]; then
local -i len=$#_p9k__prompt
_p9k__non_hermetic_expansion=0
instant_prompt_$_p9k__segment_name
if (( _p9k__non_hermetic_expansion )); then
_p9k__prompt[len+1,-1]=
fi
fi
fi
((++_p9k__segment_index))
done
else
for _p9k__segment_name in ${${(0)_p9k_line_segments_right[_p9k__line_index]}%_joined}; do
local cond=$_p9k__segment_cond_right[_p9k__segment_index]
if [[ -z $cond || -n ${(e)cond} ]]; then
local disabled=_POWERLEVEL9K_${${(U)_p9k__segment_name}//ฤฐ/I}_DISABLED_DIR_PATTERN
if [[ $_p9k__cwd != ${(P)~disabled} ]]; then
local val=$_p9k__segment_val_right[_p9k__segment_index]
if [[ -n $val ]]; then
_p9k__prompt+=$val
else
if [[ $_p9k__segment_name == custom_* ]]; then
_p9k_custom_prompt $_p9k__segment_name[8,-1]
elif (( $+functions[prompt_$_p9k__segment_name] )); then
prompt_$_p9k__segment_name
fi
fi
fi
fi
((++_p9k__segment_index))
done
fi
_p9k__prompt=${${_p9k__prompt//$' %{\b'/'%{%G'}//$' \b'}
right_idx=_p9k__segment_index
if [[ -n $_p9k__prompt || $_p9k_line_never_empty_right[_p9k__line_index] == 1 ]]; then
right=$_p9k_line_prefix_right[_p9k__line_index]$_p9k__prompt$_p9k_line_suffix_right[_p9k__line_index]
fi
fi
unset _p9k__dir
_p9k__prompt=$_p9k_line_prefix_left[_p9k__line_index]
_p9k__segment_index=left_idx
_p9k__prompt_side=left
if [[ $1 == instant_ ]]; then
for _p9k__segment_name in ${${(0)_p9k_line_segments_left[_p9k__line_index]}%_joined}; do
if (( $+functions[instant_prompt_$_p9k__segment_name] )); then
local disabled=_POWERLEVEL9K_${${(U)_p9k__segment_name}//ฤฐ/I}_DISABLED_DIR_PATTERN
if [[ $_p9k__cwd != ${(P)~disabled} ]]; then
local -i len=$#_p9k__prompt
_p9k__non_hermetic_expansion=0
instant_prompt_$_p9k__segment_name
if (( _p9k__non_hermetic_expansion )); then
_p9k__prompt[len+1,-1]=
fi
fi
fi
((++_p9k__segment_index))
done
else
for _p9k__segment_name in ${${(0)_p9k_line_segments_left[_p9k__line_index]}%_joined}; do
local cond=$_p9k__segment_cond_left[_p9k__segment_index]
if [[ -z $cond || -n ${(e)cond} ]]; then
local disabled=_POWERLEVEL9K_${${(U)_p9k__segment_name}//ฤฐ/I}_DISABLED_DIR_PATTERN
if [[ $_p9k__cwd != ${(P)~disabled} ]]; then
local val=$_p9k__segment_val_left[_p9k__segment_index]
if [[ -n $val ]]; then
_p9k__prompt+=$val
else
if [[ $_p9k__segment_name == custom_* ]]; then
_p9k_custom_prompt $_p9k__segment_name[8,-1]
elif (( $+functions[prompt_$_p9k__segment_name] )); then
prompt_$_p9k__segment_name
fi
fi
fi
fi
((++_p9k__segment_index))
done
fi
_p9k__prompt=${${_p9k__prompt//$' %{\b'/'%{%G'}//$' \b'}
left_idx=_p9k__segment_index
_p9k__prompt+=$_p9k_line_suffix_left[_p9k__line_index]
if (( $+_p9k__dir || (_p9k__line_index != num_lines && $#right) )); then
_p9k__prompt='${${:-${_p9k__d::=0}${_p9k__rprompt::='$right'}${_p9k__lprompt::='$_p9k__prompt'}}+}'
_p9k__prompt+=$_p9k_gap_pre
if (( $+_p9k__dir )); then
if (( _p9k__line_index == num_lines && (_POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS > 0 || _POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS_PCT > 0) )); then
local a=$_POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS
local f=$((0.01*_POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS_PCT))'*_p9k__clm'
_p9k__prompt+="\${\${_p9k__h::=$((($a<$f)*$f+($a>=$f)*$a))}+}"
else
_p9k__prompt+='${${_p9k__h::=0}+}'
fi
if [[ $_POWERLEVEL9K_DIR_MAX_LENGTH == <->('%'|) ]]; then
local lim=
if [[ $_POWERLEVEL9K_DIR_MAX_LENGTH[-1] == '%' ]]; then
lim="$_p9k__dir_len-$((0.01*$_POWERLEVEL9K_DIR_MAX_LENGTH[1,-2]))*_p9k__clm"
else
lim=$((_p9k__dir_len-_POWERLEVEL9K_DIR_MAX_LENGTH))
((lim <= 0)) && lim=
fi
if [[ -n $lim ]]; then
_p9k__prompt+='${${${$((_p9k__h<_p9k__m+'$lim')):#1}:-${_p9k__h::=$((_p9k__m+'$lim'))}}+}'
fi
fi
_p9k__prompt+='${${_p9k__d::=$((_p9k__m-_p9k__h))}+}'
_p9k__prompt+='${_p9k__lprompt/\%\{d\%\}*\%\{d\%\}/${_p9k__'$_p9k__line_index'ldir-'$_p9k__dir'}}'
_p9k__prompt+='${${_p9k__m::=$((_p9k__d+_p9k__h))}+}'
else
_p9k__prompt+='${_p9k__lprompt}'
fi
((_p9k__line_index != num_lines && $#right)) && _p9k__prompt+=$_p9k_line_gap_post[_p9k__line_index]
fi
if (( _p9k__line_index == num_lines )); then
[[ -n $right ]] && RPROMPT=$_p9k_prompt_prefix_right$right$_p9k_prompt_suffix_right
_p9k__prompt='${_p9k__'$_p9k__line_index'-'$_p9k__prompt'}'$_p9k_prompt_suffix_left
[[ $1 == instant_ ]] || PROMPT+=$_p9k__prompt
else
[[ -n $right ]] || _p9k__prompt+=$'\n'
PROMPT+='${_p9k__'$_p9k__line_index'-'$_p9k__prompt'}'
fi
done
_p9k__prompt_side=
(( $#_p9k_cache < _POWERLEVEL9K_MAX_CACHE_SIZE )) || _p9k_cache=()
(( $#_p9k__cache_ephemeral < _POWERLEVEL9K_MAX_CACHE_SIZE )) || _p9k__cache_ephemeral=()
[[ -n $RPROMPT ]] || unset RPROMPT
}
_p9k_set_instant_prompt() {
local saved_prompt=$PROMPT
local saved_rprompt=$RPROMPT
_p9k_set_prompt instant_
typeset -g _p9k__instant_prompt=$PROMPT$'\x1f'$_p9k__prompt$'\x1f'$RPROMPT
PROMPT=$saved_prompt
RPROMPT=$saved_rprompt
[[ -n $RPROMPT ]] || unset RPROMPT
}
typeset -gri __p9k_instant_prompt_version=30
_p9k_dump_instant_prompt() {
local user=${(%):-%n}
local root_dir=${__p9k_dump_file:h}
local prompt_dir=${root_dir}/p10k-$user
local root_file=$root_dir/p10k-instant-prompt-$user.zsh
local prompt_file=$prompt_dir/prompt-${#_p9k__cwd}
[[ -d $prompt_dir ]] || mkdir -p $prompt_dir || return
[[ -w $root_dir && -w $prompt_dir ]] || return
if [[ ! -e $root_file ]]; then
local tmp=$root_file.tmp.$$
local -i fd
sysopen -a -m 600 -o creat,trunc -u fd -- $tmp || return
{
[[ $TERM_PROGRAM == Hyper ]] && local hyper='==' || local hyper='!='
local -a display_v=("${_p9k__display_v[@]}")
local -i i
for ((i = 6; i <= $#display_v; i+=2)); do display_v[i]=show; done
display_v[2]=hide
display_v[4]=hide
local gitstatus_dir=${${_POWERLEVEL9K_GITSTATUS_DIR:A}:-${__p9k_root_dir}/gitstatus}
local gitstatus_header
if [[ -r $gitstatus_dir/install.info ]]; then
IFS= read -r gitstatus_header <$gitstatus_dir/install.info || return
fi
>&$fd print -r -- "() {
$__p9k_intro_no_locale
(( ! \$+__p9k_instant_prompt_disabled )) || return
typeset -gi __p9k_instant_prompt_disabled=1 __p9k_instant_prompt_sourced=$__p9k_instant_prompt_version
[[ \$ZSH_VERSION == ${(q)ZSH_VERSION} && \$ZSH_PATCHLEVEL == ${(q)ZSH_PATCHLEVEL} &&
\$TERM_PROGRAM $hyper 'Hyper' && \$+VTE_VERSION == $+VTE_VERSION &&
\$POWERLEVEL9K_DISABLE_INSTANT_PROMPT != 'true' &&
\$POWERLEVEL9K_INSTANT_PROMPT != 'off' ]] || { __p9k_instant_prompt_sourced=0; return 1; }
typeset -g __p9k_instant_prompt_param_sig=${(q+)_p9k__param_sig}
local gitstatus_dir=${(q)gitstatus_dir}
local gitstatus_header=${(q)gitstatus_header}
local -i ZLE_RPROMPT_INDENT=${ZLE_RPROMPT_INDENT:-1}
local PROMPT_EOL_MARK=${(q)PROMPT_EOL_MARK-%B%S%#%s%b}
[[ -n \$SSH_CLIENT || -n \$SSH_TTY || -n \$SSH_CONNECTION ]] && local ssh=1 || local ssh=0
local cr=\$'\r' lf=\$'\n' esc=\$'\e[' rs=$'\x1e' us=$'\x1f'
local -i height=$_POWERLEVEL9K_INSTANT_PROMPT_COMMAND_LINES
local prompt_dir=${(q)prompt_dir}"
>&$fd print -r -- '
local real_gitstatus_header
if [[ -r $gitstatus_dir/install.info ]]; then
IFS= read -r real_gitstatus_header <$gitstatus_dir/install.info || real_gitstatus_header=borked
fi
if [[ $real_gitstatus_header != $gitstatus_header ]]; then
__p9k_instant_prompt_sourced=0
return 1
fi
[[ $ZSH_SUBSHELL == 0 && -z $ZSH_SCRIPT && -z $ZSH_EXECUTION_STRING &&
-t 0 && -t 1 && -t 2 && -o interactive && -o zle && -o no_xtrace ]] || return
zmodload zsh/langinfo zsh/terminfo zsh/system || return
if [[ $langinfo[CODESET] != (utf|UTF)(-|)8 ]]; then
local loc_cmd=$commands[locale]
[[ -z $loc_cmd ]] && loc_cmd='${(q)commands[locale]}'
if [[ -x $loc_cmd ]]; then
local -a locs
if locs=(${(@M)$(locale -a 2>/dev/null):#*.(utf|UTF)(-|)8}) && (( $#locs )); then
local loc=${locs[(r)(#i)C.UTF(-|)8]:-${locs[(r)(#i)en_US.UTF(-|)8]:-$locs[1]}}
[[ -n $LC_ALL ]] && local LC_ALL=$loc || local LC_CTYPE=$loc
fi
fi
fi
(( terminfo[colors] == '${terminfo[colors]:-0}' )) || return
(( $+terminfo[cuu] && $+terminfo[cuf] && $+terminfo[ed] && $+terminfo[sc] && $+terminfo[rc] )) || return
local pwd=${(%):-%/}
[[ $pwd == /* ]] || return
local prompt_file=$prompt_dir/prompt-${#pwd}
local key=$pwd:$ssh:${(%):-%#}
local content
{ content="$(<$prompt_file)" } 2>/dev/null || return
local tail=${content##*$rs$key$us}
[[ ${#tail} != ${#content} ]] || return
local P9K_PROMPT=instant
if [[ $P9K_TTY != old ]]; then'
if (( _POWERLEVEL9K_NEW_TTY_MAX_AGE_SECONDS < 0 )); then
>&$fd print -r -- ' typeset -gx P9K_TTY=new'
else
>&$fd print -r -- '
typeset -gx P9K_TTY=old
zmodload -F zsh/stat b:zstat || return
zmodload zsh/datetime || return
local -a stat
if zstat -A stat +ctime -- $TTY 2>/dev/null &&
(( EPOCHREALTIME - stat[1] < '$_POWERLEVEL9K_NEW_TTY_MAX_AGE_SECONDS' )); then
P9K_TTY=new
fi'
fi
>&$fd print -r -- ' fi
local -i _p9k__empty_line_i=3 _p9k__ruler_i=3
local -A _p9k_display_k=('${(j: :)${(@q)${(kv)_p9k_display_k}}}')
local -a _p9k__display_v=('${(j: :)${(@q)display_v}}')
function p10k() {
'$__p9k_intro'
[[ $1 == display ]] || return
shift
local -i k dump
local opt prev new pair list name var
while getopts ":ha" opt; do
case $opt in
a) dump=1;;
h) return 0;;
?) return 1;;
esac
done
if (( dump )); then
reply=()
shift $((OPTIND-1))
(( ARGC )) || set -- "*"
for opt; do
for k in ${(u@)_p9k_display_k[(I)$opt]:/(#m)*/$_p9k_display_k[$MATCH]}; do
reply+=($_p9k__display_v[k,k+1])
done
done
return 0
fi
for opt in "${@:$OPTIND}"; do
pair=(${(s:=:)opt})
list=(${(s:,:)${pair[2]}})
if [[ ${(b)pair[1]} == $pair[1] ]]; then
local ks=($_p9k_display_k[$pair[1]])
else
local ks=(${(u@)_p9k_display_k[(I)$pair[1]]:/(#m)*/$_p9k_display_k[$MATCH]})
fi
for k in $ks; do
if (( $#list == 1 )); then
[[ $_p9k__display_v[k+1] == $list[1] ]] && continue
new=$list[1]
else
new=${list[list[(I)$_p9k__display_v[k+1]]+1]:-$list[1]}
[[ $_p9k__display_v[k+1] == $new ]] && continue
fi
_p9k__display_v[k+1]=$new
name=$_p9k__display_v[k]
if [[ $name == (empty_line|ruler) ]]; then
var=_p9k__${name}_i
[[ $new == hide ]] && typeset -gi $var=3 || unset $var
elif [[ $name == (#b)(<->)(*) ]]; then
var=_p9k__${match[1]}${${${${match[2]//\/}/#left/l}/#right/r}/#gap/g}
[[ $new == hide ]] && typeset -g $var= || unset $var
fi
done
done
}'
if (( _POWERLEVEL9K_PROMPT_ADD_NEWLINE )); then
>&$fd print -r -- ' [[ $P9K_TTY == old ]] && { unset _p9k__empty_line_i; _p9k__display_v[2]=print }'
fi
if (( _POWERLEVEL9K_SHOW_RULER )); then
>&$fd print -r -- '[[ $P9K_TTY == old ]] && { unset _p9k__ruler_i; _p9k__display_v[4]=print }'
fi
if (( $+functions[p10k-on-init] )); then
>&$fd print -r -- '
p10k-on-init() { '$functions[p10k-on-init]' }'
fi
if (( $+functions[p10k-on-pre-prompt] )); then
>&$fd print -r -- '
p10k-on-pre-prompt() { '$functions[p10k-on-pre-prompt]' }'
fi
if (( $+functions[p10k-on-post-prompt] )); then
>&$fd print -r -- '
p10k-on-post-prompt() { '$functions[p10k-on-post-prompt]' }'
fi
if (( $+functions[p10k-on-post-widget] )); then
>&$fd print -r -- '
p10k-on-post-widget() { '$functions[p10k-on-post-widget]' }'
fi
if (( $+functions[p10k-on-init] )); then
>&$fd print -r -- '
p10k-on-init'
fi
local pat idx var
for pat idx var in $_p9k_show_on_command; do
>&$fd print -r -- "
local $var=
_p9k__display_v[$idx]=hide"
done
if (( $+functions[p10k-on-pre-prompt] )); then
>&$fd print -r -- '
p10k-on-pre-prompt'
fi
if (( $+functions[p10k-on-init] )); then
>&$fd print -r -- '
unfunction p10k-on-init'
fi
if (( $+functions[p10k-on-pre-prompt] )); then
>&$fd print -r -- '
unfunction p10k-on-pre-prompt'
fi
if (( $+functions[p10k-on-post-prompt] )); then
>&$fd print -r -- '
unfunction p10k-on-post-prompt'
fi
if (( $+functions[p10k-on-post-widget] )); then
>&$fd print -r -- '
unfunction p10k-on-post-widget'
fi
>&$fd print -r -- '
trap "unset -m _p9k__\*; unfunction p10k" EXIT
local -a _p9k_t=("${(@ps:$us:)${tail%%$rs*}}")'
if [[ $+VTE_VERSION == 1 || $TERM_PROGRAM == Hyper ]]; then
if [[ $TERM_PROGRAM == Hyper ]]; then
local bad_lines=40 bad_columns=100
else
local bad_lines=24 bad_columns=80
fi
>&$fd print -r -- '
if (( LINES == '$bad_lines' && COLUMNS == '$bad_columns' )); then
zmodload -F zsh/stat b:zstat || return
zmodload zsh/datetime || return
local -a tty_ctime
if ! zstat -A tty_ctime +ctime -- $TTY 2>/dev/null || (( tty_ctime[1] + 2 > EPOCHREALTIME )); then
local -F deadline=$((EPOCHREALTIME+0.025))
local tty_size
while true; do
if (( EPOCHREALTIME > deadline )) || ! tty_size="$(/bin/stty size 2>/dev/null)" || [[ $tty_size != <->" "<-> ]]; then
(( $+_p9k__ruler_i )) || local -i _p9k__ruler_i=1
local _p9k__g= _p9k__'$#_p9k_line_segments_right'r= _p9k__'$#_p9k_line_segments_right'r_frame=
break
fi
if [[ $tty_size != "'$bad_lines' '$bad_columns'" ]]; then
local lines_columns=(${=tty_size})
local LINES=$lines_columns[1]
local COLUMNS=$lines_columns[2]
break
fi
done
fi
fi'
fi
(( __p9k_ksh_arrays )) && >&$fd print -r -- ' setopt ksh_arrays'
(( __p9k_sh_glob )) && >&$fd print -r -- ' setopt sh_glob'
>&$fd print -r -- ' typeset -ga __p9k_used_instant_prompt=("${(@e)_p9k_t[-3,-1]}")'
(( __p9k_ksh_arrays )) && >&$fd print -r -- ' unsetopt ksh_arrays'
(( __p9k_sh_glob )) && >&$fd print -r -- ' unsetopt sh_glob'
>&$fd print -r -- '
(( height += ${#${__p9k_used_instant_prompt[1]//[^$lf]}} ))
local _p9k__ret
function _p9k_prompt_length() {
local COLUMNS=1024
local -i x y=$#1 m
if (( y )); then
while (( ${${(%):-$1%$y(l.1.0)}[-1]} )); do
x=y
(( y *= 2 ));
done
local xy
while (( y > x + 1 )); do
m=$(( x + (y - x) / 2 ))
typeset ${${(%):-$1%$m(l.x.y)}[-1]}=$m
done
fi
_p9k__ret=$x
}
local out'
[[ $+VTE_VERSION == 1 || $TERM_PROGRAM == Hyper ]] && >&$fd print -r -- ' if (( ! $+_p9k__g )); then'
>&$fd print -r -- '
local mark=${(e)PROMPT_EOL_MARK}
[[ $mark == "%B%S%#%s%b" ]] && _p9k__ret=1 || _p9k_prompt_length $mark
local -i fill=$((COLUMNS > _p9k__ret ? COLUMNS - _p9k__ret : 0))
out+="${(%):-%b%k%f%s%u$mark${(pl.$fill.. .)}$cr%b%k%f%s%u%E}"'
[[ $+VTE_VERSION == 1 || $TERM_PROGRAM == Hyper ]] && >&$fd print -r -- ' fi'
>&$fd print -r -- '
out+="${(pl.$height..$lf.)}$esc${height}A$terminfo[sc]"
out+=${(%):-"$__p9k_used_instant_prompt[1]$__p9k_used_instant_prompt[2]"}
if [[ -n $__p9k_used_instant_prompt[3] ]]; then
_p9k_prompt_length "$__p9k_used_instant_prompt[2]"
local -i left_len=_p9k__ret
_p9k_prompt_length "$__p9k_used_instant_prompt[3]"
local -i gap=$((COLUMNS - left_len - _p9k__ret - ZLE_RPROMPT_INDENT))
if (( gap >= 40 )); then
out+="${(pl.$gap.. .)}${(%):-${__p9k_used_instant_prompt[3]}%b%k%f%s%u}$cr$esc${left_len}C"
fi
fi
typeset -g __p9k_instant_prompt_output=${TMPDIR:-/tmp}/p10k-instant-prompt-output-${(%):-%n}-$$
{ echo -n > $__p9k_instant_prompt_output } || return
print -rn -- "$out" || return
local fd_null
sysopen -ru fd_null /dev/null || return
exec {__p9k_fd_0}<&0 {__p9k_fd_1}>&1 {__p9k_fd_2}>&2 0<&$fd_null 1>$__p9k_instant_prompt_output
exec 2>&1 {fd_null}>&-
typeset -gi __p9k_instant_prompt_active=1
typeset -g __p9k_instant_prompt_dump_file=${XDG_CACHE_HOME:-~/.cache}/p10k-dump-${(%):-%n}.zsh
if builtin source $__p9k_instant_prompt_dump_file 2>/dev/null && (( $+functions[_p9k_preinit] )); then
_p9k_preinit
fi
function _p9k_instant_prompt_cleanup() {
(( ZSH_SUBSHELL == 0 && ${+__p9k_instant_prompt_active} )) || return 0
'$__p9k_intro_no_locale'
unset __p9k_instant_prompt_active
exec 0<&$__p9k_fd_0 1>&$__p9k_fd_1 2>&$__p9k_fd_2 {__p9k_fd_0}>&- {__p9k_fd_1}>&- {__p9k_fd_2}>&-
unset __p9k_fd_0 __p9k_fd_1 __p9k_fd_2
typeset -gi __p9k_instant_prompt_erased=1
print -rn -- $terminfo[rc]${(%):-%b%k%f%s%u}$terminfo[ed]
if [[ -s $__p9k_instant_prompt_output ]]; then
command cat $__p9k_instant_prompt_output 2>/dev/null
if (( $1 )); then
local _p9k__ret mark="${(e)${PROMPT_EOL_MARK-%B%S%#%s%b}}"
_p9k_prompt_length $mark
local -i fill=$((COLUMNS > _p9k__ret ? COLUMNS - _p9k__ret : 0))
echo -nE - "${(%):-%b%k%f%s%u$mark${(pl.$fill.. .)}$cr%b%k%f%s%u%E}"
fi
fi
zshexit_functions=(${zshexit_functions:#_p9k_instant_prompt_cleanup})
zmodload -F zsh/files b:zf_rm || return
local user=${(%):-%n}
local root_dir=${__p9k_instant_prompt_dump_file:h}
zf_rm -f -- $__p9k_instant_prompt_output $__p9k_instant_prompt_dump_file{,.zwc} $root_dir/p10k-instant-prompt-$user.zsh{,.zwc} $root_dir/p10k-$user/prompt-*(N) 2>/dev/null
}
function _p9k_instant_prompt_precmd_first() {
'$__p9k_intro'
function _p9k_instant_prompt_sched_last() {
(( ${+__p9k_instant_prompt_active} )) || return 0
_p9k_instant_prompt_cleanup 1
setopt no_local_options prompt_cr prompt_sp
}
zmodload zsh/sched
sched +0 _p9k_instant_prompt_sched_last
precmd_functions=(${(@)precmd_functions:#_p9k_instant_prompt_precmd_first})
}
zshexit_functions=(_p9k_instant_prompt_cleanup $zshexit_functions)
precmd_functions=(_p9k_instant_prompt_precmd_first $precmd_functions)
DISABLE_UPDATE_PROMPT=true
} && unsetopt prompt_cr prompt_sp || true'
} always {
exec {fd}>&-
}
{
(( ! $? )) || return
# `zf_mv -f src dst` fails on NTFS if `dst` is not writable, hence `zf_rm`.
zf_rm -f -- $root_file.zwc || return
zf_mv -f -- $tmp $root_file || return
zcompile -R -- $tmp.zwc $root_file || return
zf_mv -f -- $tmp.zwc $root_file.zwc || return
} always {
(( $? )) && zf_rm -f -- $tmp $tmp.zwc 2>/dev/null
}
fi
local tmp=$prompt_file.tmp.$$
zf_mv -f -- $prompt_file $tmp 2>/dev/null
if [[ "$(<$tmp)" == *$'\x1e'$_p9k__instant_prompt_sig$'\x1f'* ]] 2>/dev/null; then
echo -n >$tmp || return
fi
local -i fd
sysopen -a -m 600 -o creat -u fd -- $tmp || return
{
{
print -rnu $fd -- $'\x1e'$_p9k__instant_prompt_sig$'\x1f'${(pj:\x1f:)_p9k_t}$'\x1f'$_p9k__instant_prompt || return
} always {
exec {fd}>&-
}
zf_mv -f -- $tmp $prompt_file || return
} always {
(( $? )) && zf_rm -f -- $tmp 2>/dev/null
}
}
typeset -gi __p9k_sh_glob
typeset -gi __p9k_ksh_arrays
typeset -gi __p9k_new_status
typeset -ga __p9k_new_pipestatus
_p9k_save_status() {
local -i pipe
if (( !$+_p9k__line_finished )); then
: # SIGINT
elif (( !$+_p9k__preexec_cmd )); then
# Empty line, comment or parse error.
#
# This case is handled incorrectly:
#
# true | false
# |
#
# Here status=1 and pipestatus=(0 1). Ideally we should ignore pipestatus but we won't.
#
# This works though (unless pipefail is set):
#
# false | true
# |
#
# We get status=1 and pipestatus=(1 0) and correctly ignore pipestatus.
(( _p9k__status == __p9k_new_status )) && return
elif (( $__p9k_new_pipestatus[(I)$__p9k_new_status] )); then # just in case
local cmd=(${(z)_p9k__preexec_cmd})
if [[ $#cmd != 0 && $cmd[1] != '!' && ${(Q)cmd[1]} != coproc ]]; then
local arg
for arg in ${(z)_p9k__preexec_cmd}; do
# '()' is for functions, *';' is for complex commands.
if [[ $arg == ('()'|'&&'|'||'|'&'|'&|'|'&!'|*';') ]]; then
pipe=0
break
elif [[ $arg == *('|'|'|&')* ]]; then
pipe=1
fi
done
fi
fi
_p9k__status=$__p9k_new_status
if (( pipe )); then
_p9k__pipestatus=($__p9k_new_pipestatus)
else
_p9k__pipestatus=($_p9k__status)
fi
}
function _p9k_dump_state() {
local dir=${__p9k_dump_file:h}
[[ -d $dir ]] || mkdir -p -- $dir || return
[[ -w $dir ]] || return
local tmp=$__p9k_dump_file.tmp.$$
local -i fd
sysopen -a -m 600 -o creat,trunc -u fd -- $tmp || return
{
{
typeset -g __p9k_cached_param_pat=$_p9k__param_pat
typeset -g __p9k_cached_param_sig=$_p9k__param_sig
typeset -pm __p9k_cached_param_pat __p9k_cached_param_sig >&$fd || return
unset __p9k_cached_param_pat __p9k_cached_param_sig
(( $+_p9k_preinit )) && { print -r -- $_p9k_preinit >&$fd || return }
print -r -- '_p9k_restore_state_impl() {' >&$fd || return
typeset -pm '_POWERLEVEL9K_*|_p9k_[^_]*|icons|OS|DEFAULT_COLOR|DEFAULT_COLOR_INVERTED' >&$fd || return
print -r -- '}' >&$fd || return
} always {
exec {fd}>&-
}
# `zf_mv -f src dst` fails on NTFS if `dst` is not writable, hence `zf_rm`.
zf_rm -f -- $__p9k_dump_file.zwc || return
zf_mv -f -- $tmp $__p9k_dump_file || return
zcompile -R -- $tmp.zwc $__p9k_dump_file || return
zf_mv -f -- $tmp.zwc $__p9k_dump_file.zwc || return
} always {
(( $? )) && zf_rm -f -- $tmp $tmp.zwc 2>/dev/null
}
}
function _p9k_delete_instant_prompt() {
local user=${(%):-%n}
local root_dir=${__p9k_dump_file:h}
zf_rm -f -- $root_dir/p10k-instant-prompt-$user.zsh{,.zwc} ${root_dir}/p10k-$user/prompt-*(N) 2>/dev/null
}
function _p9k_restore_state() {
{
[[ $__p9k_cached_param_pat == $_p9k__param_pat && $__p9k_cached_param_sig == $_p9k__param_sig ]] || return
(( $+functions[_p9k_restore_state_impl] )) || return
_p9k_restore_state_impl
return 0
} always {
if (( $? )); then
if (( $+functions[_p9k_preinit] )); then
unfunction _p9k_preinit
(( $+functions[gitstatus_stop_p9k_] )) && gitstatus_stop_p9k_ POWERLEVEL9K
fi
_p9k_delete_instant_prompt
zf_rm -f -- $__p9k_dump_file{,.zwc} 2>/dev/null
elif [[ $__p9k_instant_prompt_param_sig != $_p9k__param_sig ]]; then
_p9k_delete_instant_prompt
_p9k_dumped_instant_prompt_sigs=()
fi
unset __p9k_cached_param_sig
}
}
function _p9k_clear_instant_prompt() {
if (( $+__p9k_fd_0 )); then
exec 0<&$__p9k_fd_0 {__p9k_fd_0}>&-
unset __p9k_fd_0
fi
exec 1>&$__p9k_fd_1 2>&$__p9k_fd_2 {__p9k_fd_1}>&- {__p9k_fd_2}>&-
unset __p9k_fd_1 __p9k_fd_2
zshexit_functions=(${zshexit_functions:#_p9k_instant_prompt_cleanup})
if (( _p9k__can_hide_cursor )); then
echoti civis
_p9k__cursor_hidden=1
fi
if [[ -s $__p9k_instant_prompt_output ]]; then
{
local content
[[ $_POWERLEVEL9K_INSTANT_PROMPT == verbose ]] && content="$(<$__p9k_instant_prompt_output)"
local mark="${(e)${PROMPT_EOL_MARK-%B%S%#%s%b}}"
_p9k_prompt_length $mark
local -i fill=$((COLUMNS > _p9k__ret ? COLUMNS - _p9k__ret : 0))
local cr=$'\r'
local sp="${(%):-%b%k%f%s%u$mark${(pl.$fill.. .)}$cr%b%k%f%s%u%E}"
print -rn -- $terminfo[rc]${(%):-%b%k%f%s%u}$terminfo[ed]
local unexpected=${${${(S)content//$'\e[?'<->'c'}//$'\e['<->' q'}//$'\e'[^$'\a\e']#($'\a'|$'\e\\')}
if [[ -n $unexpected ]]; then
local omz1='[Oh My Zsh] Would you like to update? [Y/n]: '
local omz2='Updating Oh My Zsh'
local omz3='https://shop.planetargon.com/collections/oh-my-zsh'
local omz4='There was an error updating. Try again later?'
if [[ $unexpected != ($omz1|)$omz2*($omz3|$omz4)[^$'\n']#($'\n'|) ]]; then
echo -E - ""
echo -E - "${(%):-[%3FWARNING%f]: Console output during zsh initialization detected.}"
echo -E - ""
echo -E - "${(%):-When using Powerlevel10k with instant prompt, console output during zsh}"
echo -E - "${(%):-initialization may indicate issues.}"
echo -E - ""
echo -E - "${(%):-You can:}"
echo -E - ""
echo -E - "${(%):- - %BRecommended%b: Change %B$__p9k_zshrc_u%b so that it does not perform console I/O}"
echo -E - "${(%):- after the instant prompt preamble. See the link below for details.}"
echo -E - ""
echo -E - "${(%):- * You %Bwill not%b see this error message again.}"
echo -E - "${(%):- * Zsh will start %Bquickly%b and prompt will update %Bsmoothly%b.}"
echo -E - ""
echo -E - "${(%):- - Suppress this warning either by running %Bp10k configure%b or by manually}"
echo -E - "${(%):- defining the following parameter:}"
echo -E - ""
echo -E - "${(%):- %3Ftypeset%f -g POWERLEVEL9K_INSTANT_PROMPT=quiet}"
echo -E - ""
echo -E - "${(%):- * You %Bwill not%b see this error message again.}"
echo -E - "${(%):- * Zsh will start %Bquickly%b but prompt will %Bjump down%b after initialization.}"
echo -E - ""
echo -E - "${(%):- - Disable instant prompt either by running %Bp10k configure%b or by manually}"
echo -E - "${(%):- defining the following parameter:}"
echo -E - ""
echo -E - "${(%):- %3Ftypeset%f -g POWERLEVEL9K_INSTANT_PROMPT=off}"
echo -E - ""
echo -E - "${(%):- * You %Bwill not%b see this error message again.}"
echo -E - "${(%):- * Zsh will start %Bslowly%b.}"
echo -E - ""
echo -E - "${(%):- - Do nothing.}"
echo -E - ""
echo -E - "${(%):- * You %Bwill%b see this error message every time you start zsh.}"
echo -E - "${(%):- * Zsh will start %Bquickly%b but prompt will %Bjump down%b after initialization.}"
echo -E - ""
echo -E - "${(%):-For details, see:}"
if (( _p9k_term_has_href )); then
echo - "${(%):-\e]8;;https://github.com/romkatv/powerlevel10k/blob/master/README.md#instant-prompt\ahttps://github.com/romkatv/powerlevel10k/blob/master/README.md#instant-prompt\e]8;;\a}"
else
echo - "${(%):-https://github.com/romkatv/powerlevel10k/blob/master/README.md#instant-prompt}"
fi
echo -E - ""
echo - "${(%):-%3F-- console output produced during zsh initialization follows --%f}"
echo -E - ""
fi
fi
command cat -- $__p9k_instant_prompt_output
echo -nE - $sp
zf_rm -f -- $__p9k_instant_prompt_output
} 2>/dev/null
else
zf_rm -f -- $__p9k_instant_prompt_output 2>/dev/null
print -rn -- $terminfo[rc]${(%):-%b%k%f%s%u}$terminfo[ed]
fi
prompt_opts=(percent subst sp cr)
if [[ $_POWERLEVEL9K_DISABLE_INSTANT_PROMPT == 0 && $__p9k_instant_prompt_active == 2 ]]; then
>&2 echo -E - ""
>&2 echo -E - "${(%):-[%1FERROR%f]: When using Powerlevel10k with instant prompt, %Bprompt_cr%b must be unset.}"
>&2 echo -E - ""
>&2 echo -E - "${(%):-You can:}"
>&2 echo -E - ""
>&2 echo -E - "${(%):- - %BRecommended%b: call %Bp10k finalize%b at the end of %B$__p9k_zshrc_u%b.}"
>&2 echo -E - "${(%):- You can do this by running the following command:}"
>&2 echo -E - ""
>&2 echo -E - "${(%):- %2Fecho%f %3F'(( ! \${+functions[p10k]\} )) || p10k finalize'%f >>! $__p9k_zshrc_u}"
>&2 echo -E - ""
>&2 echo -E - "${(%):- * You %Bwill not%b see this error message again.}"
>&2 echo -E - "${(%):- * Zsh will start %Bquickly%b and %Bwithout%b prompt flickering.}"
>&2 echo -E - ""
>&2 echo -E - "${(%):- - Find where %Bprompt_cr%b option gets sets in your zsh configs and stop setting it.}"
>&2 echo -E - ""
>&2 echo -E - "${(%):- * You %Bwill not%b see this error message again.}"
>&2 echo -E - "${(%):- * Zsh will start %Bquickly%b and %Bwithout%b prompt flickering.}"
>&2 echo -E - ""
>&2 echo -E - "${(%):- - Disable instant prompt either by running %Bp10k configure%b or by manually}"
>&2 echo -E - "${(%):- defining the following parameter:}"
>&2 echo -E - ""
>&2 echo -E - "${(%):- %3Ftypeset%f -g POWERLEVEL9K_INSTANT_PROMPT=off}"
>&2 echo -E - ""
>&2 echo -E - "${(%):- * You %Bwill not%b see this error message again.}"
>&2 echo -E - "${(%):- * Zsh will start %Bslowly%b.}"
>&2 echo -E - ""
>&2 echo -E - "${(%):- - Do nothing.}"
>&2 echo -E - ""
>&2 echo -E - "${(%):- * You %Bwill%b see this error message every time you start zsh.}"
>&2 echo -E - "${(%):- * Zsh will start %Bquckly%b but %Bwith%b prompt flickering.}"
>&2 echo -E - ""
fi
}
function _p9k_do_dump() {
eval "$__p9k_intro"
zle -F $1
exec {1}>&-
(( _p9k__state_dump_fd )) || return
if (( ! _p9k__instant_prompt_disabled )); then
_p9k__instant_prompt_sig=$_p9k__cwd:$P9K_SSH:${(%):-%#}
_p9k_set_instant_prompt
_p9k_dump_instant_prompt
_p9k_dumped_instant_prompt_sigs[$_p9k__instant_prompt_sig]=1
fi
_p9k_dump_state
_p9k__state_dump_scheduled=0
_p9k__state_dump_fd=0
}
function _p9k_should_dump() {
(( __p9k_dumps_enabled && ! _p9k__state_dump_fd )) || return
(( _p9k__state_dump_scheduled || _p9k__prompt_idx == 1 )) && return
_p9k__instant_prompt_sig=$_p9k__cwd:$P9K_SSH:${(%):-%#}
(( ! $+_p9k_dumped_instant_prompt_sigs[$_p9k__instant_prompt_sig] ))
}
# Must not run under `eval "$__p9k_intro_locale"`. Safe to run with any options.
function _p9k_restore_special_params() {
(( ! ${+_p9k__real_zle_rprompt_indent} )) || {
[[ -n "$_p9k__real_zle_rprompt_indent" ]] &&
ZLE_RPROMPT_INDENT="$_p9k__real_zle_rprompt_indent" ||
unset ZLE_RPROMPT_INDENT
unset _p9k__real_zle_rprompt_indent
}
(( ! ${+_p9k__real_lc_ctype} )) || {
LC_CTYPE="$_p9k__real_lc_ctype"
unset _p9k__real_lc_ctype
}
(( ! ${+_p9k__real_lc_all} )) || {
LC_ALL="$_p9k__real_lc_all"
unset _p9k__real_lc_all
}
}
function _p9k_on_expand() {
(( _p9k__expanded && ! ${+__p9k_instant_prompt_active} )) && [[ "${langinfo[CODESET]}" == (utf|UTF)(-|)8 ]] && return
eval "$__p9k_intro_no_locale"
if [[ $langinfo[CODESET] != (utf|UTF)(-|)8 ]]; then
_p9k_restore_special_params
if [[ $langinfo[CODESET] != (utf|UTF)(-|)8 ]] && _p9k_init_locale; then
if [[ -n $LC_ALL ]]; then
_p9k__real_lc_all=$LC_ALL
LC_ALL=$__p9k_locale
else
_p9k__real_lc_ctype=$LC_CTYPE
LC_CTYPE=$__p9k_locale
fi
fi
fi
(( _p9k__expanded && ! $+__p9k_instant_prompt_active )) && return
eval "$__p9k_intro_locale"
if (( ! _p9k__expanded )); then
if _p9k_should_dump; then
sysopen -o cloexec -ru _p9k__state_dump_fd /dev/null
zle -F $_p9k__state_dump_fd _p9k_do_dump
fi
if (( ! $+P9K_TTY )); then
typeset -gx P9K_TTY=old
if (( _POWERLEVEL9K_NEW_TTY_MAX_AGE_SECONDS < 0 )); then
P9K_TTY=new
else
local -a stat
if zstat -A stat +ctime -- $TTY 2>/dev/null &&
(( EPOCHREALTIME - stat[1] < _POWERLEVEL9K_NEW_TTY_MAX_AGE_SECONDS )); then
P9K_TTY=new
fi
fi
fi
__p9k_reset_state=1
if (( _POWERLEVEL9K_PROMPT_ADD_NEWLINE )); then
if [[ $P9K_TTY == new ]]; then
_p9k__empty_line_i=3
_p9k__display_v[2]=hide
elif [[ -z $_p9k_transient_prompt && $+functions[p10k-on-post-prompt] == 0 ]]; then
_p9k__empty_line_i=3
_p9k__display_v[2]=print
else
unset _p9k__empty_line_i
_p9k__display_v[2]=show
fi
fi
if (( _POWERLEVEL9K_SHOW_RULER )); then
if [[ $P9K_TTY == new ]]; then
_p9k__ruler_i=3
_p9k__display_v[4]=hide
elif [[ -z $_p9k_transient_prompt && $+functions[p10k-on-post-prompt] == 0 ]]; then
_p9k__ruler_i=3
_p9k__display_v[4]=print
else
unset _p9k__ruler_i
_p9k__display_v[4]=show
fi
fi
(( _p9k__fully_initialized )) || _p9k_wrap_widgets
fi
if (( $+__p9k_instant_prompt_active )); then
_p9k_clear_instant_prompt
unset __p9k_instant_prompt_active
fi
if (( ! _p9k__expanded )); then
_p9k__expanded=1
(( _p9k__fully_initialized || ! $+functions[p10k-on-init] )) || p10k-on-init
local pat idx var
for pat idx var in $_p9k_show_on_command; do
_p9k_display_segment $idx $var hide
done
(( $+functions[p10k-on-pre-prompt] )) && p10k-on-pre-prompt
if zle; then
local -a P9K_COMMANDS=($_p9k__last_commands)
local pat idx var
for pat idx var in $_p9k_show_on_command; do
if (( $P9K_COMMANDS[(I)$pat] )); then
_p9k_display_segment $idx $var show
else
_p9k_display_segment $idx $var hide
fi
done
if (( $+functions[p10k-on-post-widget] )); then
local -h WIDGET
unset WIDGET
p10k-on-post-widget
fi
else
if [[ $_p9k__display_v[2] == print && -n $_p9k_t[_p9k_empty_line_idx] ]]; then
print -rnP -- '%b%k%f%E'$_p9k_t[_p9k_empty_line_idx]
fi
if [[ $_p9k__display_v[4] == print ]]; then
() {
local ruler=$_p9k_t[_p9k_ruler_idx]
local -i _p9k__clm=COLUMNS _p9k__ind=${ZLE_RPROMPT_INDENT:-1}
(( __p9k_ksh_arrays )) && setopt ksh_arrays
(( __p9k_sh_glob )) && setopt sh_glob
setopt prompt_subst
print -rnP -- '%b%k%f%E'$ruler
}
fi
fi
__p9k_reset_state=0
_p9k__fully_initialized=1
fi
}
functions -M _p9k_on_expand
_p9k_precmd_impl() {
eval "$__p9k_intro"
(( __p9k_enabled )) || return
if ! zle || [[ -z $_p9k__param_sig ]]; then
if zle; then
__p9k_new_status=0
__p9k_new_pipestatus=(0)
else
_p9k__must_restore_prompt=0
fi
if _p9k_must_init; then
local -i instant_prompt_disabled
if (( !__p9k_configured )); then
__p9k_configured=1
if [[ -z "${parameters[(I)POWERLEVEL9K_*~POWERLEVEL9K_(MODE|CONFIG_FILE|GITSTATUS_DIR)]}" ]]; then
_p9k_can_configure -q
local -i ret=$?
if (( ret == 2 && $+__p9k_instant_prompt_active )); then
_p9k_clear_instant_prompt
unset __p9k_instant_prompt_active
_p9k_delete_instant_prompt
zf_rm -f -- $__p9k_dump_file{,.zwc} 2>/dev/null
() {
local key
while true; do
[[ -t 2 ]]
read -t0 -k key || break
done 2>/dev/null
}
_p9k_can_configure -q
ret=$?
fi
if (( ret == 0 )); then
if (( $+commands[git] )); then
(
local -i pid
{
{ /bin/sh "$__p9k_root_dir"/gitstatus/install </dev/null &>/dev/null & } && pid=$!
( builtin source "$__p9k_root_dir"/internal/wizard.zsh )
} always {
if (( pid )); then
kill -- $pid 2>/dev/null
wait -- $pid 2>/dev/null
fi
}
)
else
( builtin source "$__p9k_root_dir"/internal/wizard.zsh )
fi
if (( $? )); then
instant_prompt_disabled=1
else
builtin source "$__p9k_cfg_path"
_p9k__force_must_init=1
_p9k_must_init
fi
fi
fi
fi
typeset -gi _p9k__instant_prompt_disabled=instant_prompt_disabled
_p9k_init
fi
if (( _p9k__timer_start )); then
typeset -gF P9K_COMMAND_DURATION_SECONDS=$((EPOCHREALTIME - _p9k__timer_start))
else
unset P9K_COMMAND_DURATION_SECONDS
fi
_p9k_save_status
if [[ $_p9k__preexec_cmd == [[:space:]]#(clear([[:space:]]##-(|x)(|T[a-zA-Z0-9-_\'\"]#))#|reset)[[:space:]]# &&
$_p9k__status == 0 ]]; then
P9K_TTY=new
elif [[ $P9K_TTY == new && $_p9k__fully_initialized == 1 ]] && ! zle; then
P9K_TTY=old
fi
_p9k__timer_start=0
_p9k__region_active=0
unset _p9k__line_finished _p9k__preexec_cmd
_p9k__keymap=main
_p9k__zle_state=insert
(( ++_p9k__prompt_idx ))
fi
_p9k_fetch_cwd
_p9k__refresh_reason=precmd
__p9k_reset_state=1
local -i fast_vcs
if (( _p9k_vcs_index && $+GITSTATUS_DAEMON_PID_POWERLEVEL9K )); then
if [[ $_p9k__cwd != $~_POWERLEVEL9K_VCS_DISABLED_DIR_PATTERN ]]; then
local -F start_time=EPOCHREALTIME
unset _p9k__vcs
unset _p9k__vcs_timeout
_p9k_vcs_gitstatus
local -i fast_vcs=1
fi
fi
(( $+functions[_p9k_async_segments_compute] )) && _p9k_async_segments_compute
_p9k__expanded=0
_p9k_set_prompt
_p9k__refresh_reason=''
if [[ $precmd_functions[1] != _p9k_do_nothing && $precmd_functions[(I)_p9k_do_nothing] != 0 ]]; then
precmd_functions=(_p9k_do_nothing ${(@)precmd_functions:#_p9k_do_nothing})
fi
if [[ $precmd_functions[-1] != _p9k_precmd && $precmd_functions[(I)_p9k_precmd] != 0 ]]; then
precmd_functions=(${(@)precmd_functions:#_p9k_precmd} _p9k_precmd)
fi
if [[ $preexec_functions[1] != _p9k_preexec1 && $preexec_functions[(I)_p9k_preexec1] != 0 ]]; then
preexec_functions=(_p9k_preexec1 ${(@)preexec_functions:#_p9k_preexec1})
fi
if [[ $preexec_functions[-1] != _p9k_preexec2 && $preexec_functions[(I)_p9k_preexec2] != 0 ]]; then
preexec_functions=(${(@)preexec_functions:#_p9k_preexec2} _p9k_preexec2)
fi
if (( fast_vcs && _p9k_vcs_index && $+GITSTATUS_DAEMON_PID_POWERLEVEL9K )); then
if (( $+_p9k__vcs_timeout )); then
(( _p9k__vcs_timeout = _POWERLEVEL9K_VCS_MAX_SYNC_LATENCY_SECONDS + start_time - EPOCHREALTIME ))
(( _p9k__vcs_timeout >= 0 )) || (( _p9k__vcs_timeout = 0 ))
gitstatus_process_results_p9k_ -t $_p9k__vcs_timeout POWERLEVEL9K
fi
if (( ! $+_p9k__vcs )); then
local _p9k__prompt _p9k__prompt_side=$_p9k_vcs_side _p9k__segment_name=vcs
local -i _p9k__has_upglob _p9k__segment_index=_p9k_vcs_index _p9k__line_index=_p9k_vcs_line_index
_p9k_vcs_render
typeset -g _p9k__vcs=$_p9k__prompt
fi
fi
_p9k_worker_receive
__p9k_reset_state=0
}
_p9k_trapint() {
if (( __p9k_enabled )); then
eval "$__p9k_intro"
_p9k_deschedule_redraw
zle && _p9k_on_widget_zle-line-finish int
fi
return 0
}
_p9k_precmd() {
__p9k_new_status=$?
__p9k_new_pipestatus=($pipestatus)
trap ":" INT
[[ -o ksh_arrays ]] && __p9k_ksh_arrays=1 || __p9k_ksh_arrays=0
[[ -o sh_glob ]] && __p9k_sh_glob=1 || __p9k_sh_glob=0
_p9k_restore_special_params
_p9k_precmd_impl
[[ ${+__p9k_instant_prompt_active} == 0 || -o no_prompt_cr ]] || __p9k_instant_prompt_active=2
setopt no_local_options no_prompt_bang prompt_percent prompt_subst prompt_cr prompt_sp
# See https://www.zsh.org/mla/workers/2020/msg00612.html for the reason behind __p9k_trapint.
typeset -g __p9k_trapint='_p9k_trapint; return 130'
trap "$__p9k_trapint" INT
}
function _p9k_reset_prompt() {
if (( __p9k_reset_state != 1 )) && zle && [[ -z $_p9k__line_finished ]]; then
__p9k_reset_state=0
setopt prompt_subst
(( __p9k_ksh_arrays )) && setopt ksh_arrays
(( __p9k_sh_glob )) && setopt sh_glob
{
(( _p9k__can_hide_cursor )) && echoti civis
zle .reset-prompt
(( ${+functions[z4h]} )) || zle -R
} always {
(( _p9k__can_hide_cursor )) && echoti cnorm
_p9k__cursor_hidden=0
}
fi
}
# Does ZSH have a certain off-by-one bug that triggers when PROMPT overflows to a new line?
#
# Bug: https://github.com/zsh-users/zsh/commit/d8d9fee137a5aa2cf9bf8314b06895bfc2a05518.
# ZSH_PATCHLEVEL=zsh-5.4.2-159-gd8d9fee13. Released in 5.5.
#
# Fix: https://github.com/zsh-users/zsh/commit/64d13738357c9b9c212adbe17f271716abbcf6ea.
# ZSH_PATCHLEVEL=zsh-5.7.1-50-g64d137383.
#
# Test: PROMPT="${(pl:$((COLUMNS))::-:)}<%1(l.%2(l.FAIL.PASS).FAIL)> " zsh -dfis <<<exit
# Workaround: PROMPT="${(pl:$((COLUMNS))::-:)}%{%G%}<%1(l.%2(l.FAIL.PASS).FAIL)> " zsh -dfis <<<exit
function _p9k_prompt_overflow_bug() {
[[ $ZSH_PATCHLEVEL =~ '^zsh-5\.4\.2-([0-9]+)-' ]] && return $(( match[1] < 159 ))
[[ $ZSH_PATCHLEVEL =~ '^zsh-5\.7\.1-([0-9]+)-' ]] && return $(( match[1] >= 50 ))
is-at-least 5.5 && ! is-at-least 5.7.2
}
typeset -g _p9k__param_pat
typeset -g _p9k__param_sig
_p9k_init_vars() {
typeset -gF _p9k__gcloud_last_fetch_ts
typeset -g _p9k_gcloud_configuration
typeset -g _p9k_gcloud_account
typeset -g _p9k_gcloud_project_id
typeset -g _p9k_gcloud_project_name
typeset -gi _p9k_term_has_href
typeset -gi _p9k_vcs_index
typeset -gi _p9k_vcs_line_index
typeset -g _p9k_vcs_side
typeset -ga _p9k_taskwarrior_meta_files
typeset -ga _p9k_taskwarrior_meta_non_files
typeset -g _p9k_taskwarrior_meta_sig
typeset -g _p9k_taskwarrior_data_dir
typeset -g _p9k__taskwarrior_functional=1
typeset -ga _p9k_taskwarrior_data_files
typeset -ga _p9k_taskwarrior_data_non_files
typeset -g _p9k_taskwarrior_data_sig
typeset -gA _p9k_taskwarrior_counters
typeset -gF _p9k_taskwarrior_next_due
typeset -ga _p9k_asdf_meta_files
typeset -ga _p9k_asdf_meta_non_files
typeset -g _p9k_asdf_meta_sig
# plugin => installed_version_pattern
# example: (ruby '2.7.0|2.6.3|system' lua 'system' chubaka '1.0.0|system')
typeset -gA _p9k_asdf_plugins
# example: (.ruby-version "ruby 1 chubaka 0")
#
# - "1" means parse-legacy-file is present
# - "chubaka" is another plugin that claims to be able to parse .ruby-version
typeset -gA _p9k_asdf_file_info
# dir => mtime ':' ${(pj:\0:)files}
typeset -gA _p9k__asdf_dir2files
# :file => mtime ':' ${(pj:\0:)tool_versions}
# plugin:file => mtime ':' version
typeset -gA _p9k_asdf_file2versions
# filepath => mtime ':' word
typeset -gA _p9k__read_word_cache
# filepath:prefix => mtime ':' versions
typeset -gA _p9k__read_pyenv_like_version_file_cache
# _p9k__parent_dirs and _p9k__parent_mtimes are parallel arrays. They are updated
# together with _p9k__cwd. _p9k__parent_mtimes[i] is mtime for _p9k__parent_dirs[i].
#
# When _p9k__cwd is / or ~, both arrays are empty. When _p9k__cwd is ~/foo/bar,
# _p9k__parent_dirs is (/home/user/foo/bar /home/user/foo). When _p9k__cwd is
# /foo/bar, it's (/foo/bar /foo).
#
# $_p9k__parent_mtimes_i[i] == "$i:$_p9k__parent_mtimes[i]"
# $_p9k__parent_mtimes_s == "$_p9k__parent_mtimes_i".
typeset -ga _p9k__parent_dirs
typeset -ga _p9k__parent_mtimes
typeset -ga _p9k__parent_mtimes_i
typeset -g _p9k__parent_mtimes_s
typeset -g _p9k__cwd
typeset -g _p9k__cwd_a
# dir/pattern => dir mtime ':' num_matches
typeset -gA _p9k__glob_cache
# dir/pattern => space-separated parent dir mtimes ' :' the first matching parent dir
# Note: ' :' is indeed the delimiter.
typeset -gA _p9k__upsearch_cache
typeset -g _p9k_timewarrior_dir
typeset -gi _p9k_timewarrior_dir_mtime
typeset -gi _p9k_timewarrior_file_mtime
typeset -g _p9k_timewarrior_file_name
typeset -gA _p9k__prompt_char_saved
typeset -g _p9k__worker_pid
typeset -g _p9k__worker_req_fd
typeset -g _p9k__worker_resp_fd
typeset -g _p9k__worker_shell_pid
typeset -g _p9k__worker_file_prefix
typeset -gA _p9k__worker_request_map
typeset -ga _p9k__segment_cond_left
typeset -ga _p9k__segment_cond_right
typeset -ga _p9k__segment_val_left
typeset -ga _p9k__segment_val_right
typeset -ga _p9k_show_on_command
typeset -g _p9k__last_buffer
typeset -ga _p9k__last_commands
typeset -gi _p9k__fully_initialized
typeset -gi _p9k__must_restore_prompt
typeset -gi _p9k__restore_prompt_fd
typeset -gi _p9k__redraw_fd
typeset -gi _p9k__can_hide_cursor=$(( $+terminfo[civis] && $+terminfo[cnorm] ))
typeset -gi _p9k__cursor_hidden
typeset -gi _p9k__non_hermetic_expansion
typeset -g _p9k__time
typeset -g _p9k__date
typeset -gA _p9k_dumped_instant_prompt_sigs
typeset -g _p9k__instant_prompt_sig
typeset -g _p9k__instant_prompt
typeset -gi _p9k__state_dump_scheduled
typeset -gi _p9k__state_dump_fd
typeset -gi _p9k__prompt_idx
typeset -gi _p9k_reset_on_line_finish
typeset -gF _p9k__timer_start
typeset -gi _p9k__status
typeset -ga _p9k__pipestatus
typeset -g _p9k__ret
typeset -g _p9k__cache_key
typeset -ga _p9k__cache_val
typeset -g _p9k__cache_stat_meta
typeset -g _p9k__cache_stat_fprint
typeset -g _p9k__cache_fprint_key
typeset -gA _p9k_cache
typeset -gA _p9k__cache_ephemeral
typeset -ga _p9k_t
typeset -g _p9k__n
typeset -gi _p9k__i
typeset -g _p9k__bg
typeset -ga _p9k_left_join
typeset -ga _p9k_right_join
typeset -g _p9k__public_ip
typeset -g _p9k__todo_command
typeset -g _p9k__todo_file
typeset -g _p9k__git_dir
# git workdir => 1 if gitstatus is slow on it, 0 if it's fast.
typeset -gA _p9k_git_slow
# git workdir => the last state we've seen for it
typeset -gA _p9k__gitstatus_last
typeset -gF _p9k__gitstatus_start_time
typeset -g _p9k__prompt
typeset -g _p9k__rprompt
typeset -g _p9k__lprompt
typeset -g _p9k__prompt_side
typeset -g _p9k__segment_name
typeset -gi _p9k__segment_index
typeset -gi _p9k__line_index
typeset -g _p9k__refresh_reason
typeset -gi _p9k__region_active
typeset -ga _p9k_line_segments_left
typeset -ga _p9k_line_segments_right
typeset -ga _p9k_line_prefix_left
typeset -ga _p9k_line_prefix_right
typeset -ga _p9k_line_suffix_left
typeset -ga _p9k_line_suffix_right
typeset -ga _p9k_line_never_empty_right
typeset -ga _p9k_line_gap_post
typeset -g _p9k__xy
typeset -g _p9k__clm
typeset -g _p9k__p
typeset -gi _p9k__x
typeset -gi _p9k__y
typeset -gi _p9k__m
typeset -gi _p9k__d
typeset -gi _p9k__h
typeset -gi _p9k__ind
typeset -g _p9k_gap_pre
typeset -gi _p9k__ruler_i=3
typeset -gi _p9k_ruler_idx
typeset -gi _p9k__empty_line_i=3
typeset -gi _p9k_empty_line_idx
typeset -g _p9k_prompt_prefix_left
typeset -g _p9k_prompt_prefix_right
typeset -g _p9k_prompt_suffix_left
typeset -g _p9k_prompt_suffix_right
typeset -gi _p9k_emulate_zero_rprompt_indent
typeset -gA _p9k_battery_states
typeset -g _p9k_os
typeset -g _p9k_os_icon
typeset -g _p9k_color1
typeset -g _p9k_color2
typeset -g _p9k__s
typeset -g _p9k__ss
typeset -g _p9k__sss
typeset -g _p9k__v
typeset -g _p9k__c
typeset -g _p9k__e
typeset -g _p9k__w
typeset -gi _p9k__dir_len
typeset -gi _p9k_num_cpus
typeset -g _p9k__keymap
typeset -g _p9k__zle_state
typeset -g _p9k_uname
typeset -g _p9k_uname_o
typeset -g _p9k_uname_m
typeset -g _p9k_transient_prompt
typeset -g _p9k__last_prompt_pwd
typeset -gA _p9k_display_k
typeset -ga _p9k__display_v
typeset -gA _p9k__dotnet_stat_cache
typeset -gA _p9k__dir_stat_cache
typeset -gi _p9k__expanded
typeset -gi _p9k__force_must_init
typeset -g P9K_VISUAL_IDENTIFIER
typeset -g P9K_CONTENT
typeset -g P9K_GAP
typeset -g P9K_PROMPT=regular
}
_p9k_init_params() {
_p9k_declare -F POWERLEVEL9K_GCLOUD_REFRESH_PROJECT_NAME_SECONDS 60
# invarint: _POWERLEVEL9K_INSTANT_PROMPT == (verbose|quiet|off)
# invariant: [[ ($_POWERLEVEL9K_INSTANT_PROMPT == off) == $_POWERLEVEL9K_DISABLE_INSTANT_PROMPT ]]
_p9k_declare -s POWERLEVEL9K_INSTANT_PROMPT # verbose, quiet, off
if [[ $_POWERLEVEL9K_INSTANT_PROMPT == off ]]; then
typeset -gi _POWERLEVEL9K_DISABLE_INSTANT_PROMPT=1
else
_p9k_declare -b POWERLEVEL9K_DISABLE_INSTANT_PROMPT 0
if (( _POWERLEVEL9K_DISABLE_INSTANT_PROMPT )); then
_POWERLEVEL9K_INSTANT_PROMPT=off
elif [[ $_POWERLEVEL9K_INSTANT_PROMPT != quiet ]]; then
_POWERLEVEL9K_INSTANT_PROMPT=verbose
fi
fi
(( _POWERLEVEL9K_DISABLE_INSTANT_PROMPT )) && _p9k__instant_prompt_disabled=1
_p9k_declare -s POWERLEVEL9K_TRANSIENT_PROMPT off
[[ $_POWERLEVEL9K_TRANSIENT_PROMPT == (off|always|same-dir) ]] || _POWERLEVEL9K_TRANSIENT_PROMPT=off
_p9k_declare -s POWERLEVEL9K_WORKER_LOG_LEVEL
_p9k_declare -i POWERLEVEL9K_COMMANDS_MAX_TOKEN_COUNT 64
_p9k_declare -a POWERLEVEL9K_HOOK_WIDGETS --
_p9k_declare -b POWERLEVEL9K_TODO_HIDE_ZERO_TOTAL 0
_p9k_declare -b POWERLEVEL9K_TODO_HIDE_ZERO_FILTERED 0
_p9k_declare -b POWERLEVEL9K_DISABLE_HOT_RELOAD 0
_p9k_declare -F POWERLEVEL9K_NEW_TTY_MAX_AGE_SECONDS 5
_p9k_declare -i POWERLEVEL9K_INSTANT_PROMPT_COMMAND_LINES 1
_p9k_declare -a POWERLEVEL9K_LEFT_PROMPT_ELEMENTS -- context dir vcs
_p9k_declare -a POWERLEVEL9K_RIGHT_PROMPT_ELEMENTS -- status root_indicator background_jobs history time
_p9k_declare -b POWERLEVEL9K_DISABLE_RPROMPT 0
_p9k_declare -b POWERLEVEL9K_PROMPT_ADD_NEWLINE 0
_p9k_declare -b POWERLEVEL9K_PROMPT_ON_NEWLINE 0
_p9k_declare -b POWERLEVEL9K_RPROMPT_ON_NEWLINE 0
_p9k_declare -b POWERLEVEL9K_SHOW_RULER 0
_p9k_declare -i POWERLEVEL9K_PROMPT_ADD_NEWLINE_COUNT 1
_p9k_declare -s POWERLEVEL9K_COLOR_SCHEME dark
_p9k_declare -s POWERLEVEL9K_GITSTATUS_DIR ""
_p9k_declare -s POWERLEVEL9K_VCS_DISABLED_WORKDIR_PATTERN
_p9k_declare -b POWERLEVEL9K_VCS_SHOW_SUBMODULE_DIRTY 0
_p9k_declare -i POWERLEVEL9K_VCS_SHORTEN_LENGTH
_p9k_declare -i POWERLEVEL9K_VCS_SHORTEN_MIN_LENGTH
_p9k_declare -s POWERLEVEL9K_VCS_SHORTEN_STRATEGY
if [[ $langinfo[CODESET] == (utf|UTF)(-|)8 ]]; then
_p9k_declare -e POWERLEVEL9K_VCS_SHORTEN_DELIMITER '\u2026'
else
_p9k_declare -e POWERLEVEL9K_VCS_SHORTEN_DELIMITER '..'
fi
_p9k_declare -b POWERLEVEL9K_VCS_CONFLICTED_STATE 0
_p9k_declare -b POWERLEVEL9K_HIDE_BRANCH_ICON 0
_p9k_declare -b POWERLEVEL9K_VCS_HIDE_TAGS 0
_p9k_declare -i POWERLEVEL9K_CHANGESET_HASH_LENGTH 8
# Specifies the maximum number of elements in the cache. When the cache grows over this limit,
# it gets cleared. This is meant to avoid memory leaks when a rogue prompt is filling the cache
# with data.
_p9k_declare -i POWERLEVEL9K_MAX_CACHE_SIZE 10000
_p9k_declare -e POWERLEVEL9K_ANACONDA_LEFT_DELIMITER "("
_p9k_declare -e POWERLEVEL9K_ANACONDA_RIGHT_DELIMITER ")"
_p9k_declare -b POWERLEVEL9K_ANACONDA_SHOW_PYTHON_VERSION 1
_p9k_declare -b POWERLEVEL9K_BACKGROUND_JOBS_VERBOSE 1
_p9k_declare -b POWERLEVEL9K_BACKGROUND_JOBS_VERBOSE_ALWAYS 0
_p9k_declare -b POWERLEVEL9K_DISK_USAGE_ONLY_WARNING 0
_p9k_declare -i POWERLEVEL9K_DISK_USAGE_WARNING_LEVEL 90
_p9k_declare -i POWERLEVEL9K_DISK_USAGE_CRITICAL_LEVEL 95
_p9k_declare -i POWERLEVEL9K_BATTERY_LOW_THRESHOLD 10
_p9k_declare -i POWERLEVEL9K_BATTERY_HIDE_ABOVE_THRESHOLD 999
_p9k_declare -b POWERLEVEL9K_BATTERY_VERBOSE 1
_p9k_declare -a POWERLEVEL9K_BATTERY_LEVEL_BACKGROUND --
_p9k_declare -a POWERLEVEL9K_BATTERY_LEVEL_FOREGROUND --
case $parameters[POWERLEVEL9K_BATTERY_STAGES] in
scalar*) typeset -ga _POWERLEVEL9K_BATTERY_STAGES=("${(@s::)${(g::)POWERLEVEL9K_BATTERY_STAGES}}");;
array*) typeset -ga _POWERLEVEL9K_BATTERY_STAGES=("${(@g::)POWERLEVEL9K_BATTERY_STAGES}");;
esac
local state
for state in CHARGED CHARGING LOW DISCONNECTED; do
_p9k_declare -i POWERLEVEL9K_BATTERY_${state}_HIDE_ABOVE_THRESHOLD $_POWERLEVEL9K_BATTERY_HIDE_ABOVE_THRESHOLD
local var=POWERLEVEL9K_BATTERY_${state}_STAGES
case $parameters[$var] in
scalar*) eval "typeset -ga _$var=(${(@qq)${(@s::)${(g::)${(P)var}}}})";;
array*) eval "typeset -ga _$var=(${(@qq)${(@g::)${(@P)var}}})";;
*) eval "typeset -ga _$var=(${(@qq)_POWERLEVEL9K_BATTERY_STAGES})";;
esac
local var=POWERLEVEL9K_BATTERY_${state}_LEVEL_BACKGROUND
case $parameters[$var] in
array*) eval "typeset -ga _$var=(${(@qq)${(@P)var}})";;
*) eval "typeset -ga _$var=(${(@qq)_POWERLEVEL9K_BATTERY_LEVEL_BACKGROUND})";;
esac
local var=POWERLEVEL9K_BATTERY_${state}_LEVEL_FOREGROUND
case $parameters[$var] in
array*) eval "typeset -ga _$var=(${(@qq)${(@P)var}})";;
*) eval "typeset -ga _$var=(${(@qq)_POWERLEVEL9K_BATTERY_LEVEL_FOREGROUND})";;
esac
done
_p9k_declare -F POWERLEVEL9K_PUBLIC_IP_TIMEOUT 300
_p9k_declare -a POWERLEVEL9K_PUBLIC_IP_METHODS -- dig curl wget
_p9k_declare -e POWERLEVEL9K_PUBLIC_IP_NONE ""
_p9k_declare -s POWERLEVEL9K_PUBLIC_IP_HOST "https://v4.ident.me/"
_p9k_declare -s POWERLEVEL9K_PUBLIC_IP_VPN_INTERFACE ""
_p9k_segment_in_use public_ip || _POWERLEVEL9K_PUBLIC_IP_VPN_INTERFACE=
_p9k_declare -b POWERLEVEL9K_ALWAYS_SHOW_CONTEXT 0
_p9k_declare -b POWERLEVEL9K_ALWAYS_SHOW_USER 0
_p9k_declare -e POWERLEVEL9K_CONTEXT_TEMPLATE "%n@%m"
_p9k_declare -e POWERLEVEL9K_USER_TEMPLATE "%n"
_p9k_declare -e POWERLEVEL9K_HOST_TEMPLATE "%m"
_p9k_declare -F POWERLEVEL9K_COMMAND_EXECUTION_TIME_THRESHOLD 3
_p9k_declare -i POWERLEVEL9K_COMMAND_EXECUTION_TIME_PRECISION 2
# Other options: "d h m s".
_p9k_declare -s POWERLEVEL9K_COMMAND_EXECUTION_TIME_FORMAT "H:M:S"
_p9k_declare -e POWERLEVEL9K_HOME_FOLDER_ABBREVIATION "~"
_p9k_declare -b POWERLEVEL9K_DIR_PATH_ABSOLUTE 0
_p9k_declare -s POWERLEVEL9K_DIR_SHOW_WRITABLE ''
case $_POWERLEVEL9K_DIR_SHOW_WRITABLE in
true) _POWERLEVEL9K_DIR_SHOW_WRITABLE=1;;
v2) _POWERLEVEL9K_DIR_SHOW_WRITABLE=2;;
*) _POWERLEVEL9K_DIR_SHOW_WRITABLE=0;;
esac
typeset -gi _POWERLEVEL9K_DIR_SHOW_WRITABLE
_p9k_declare -b POWERLEVEL9K_DIR_OMIT_FIRST_CHARACTER 0
_p9k_declare -b POWERLEVEL9K_DIR_HYPERLINK 0
_p9k_declare -s POWERLEVEL9K_SHORTEN_STRATEGY ""
local markers=(
.bzr
.citc
.git
.hg
.node-version
.python-version
.ruby-version
.shorten_folder_marker
.svn
.terraform
CVS
Cargo.toml
composer.json
go.mod
package.json
)
_p9k_declare -s POWERLEVEL9K_SHORTEN_FOLDER_MARKER "(${(j:|:)markers})"
# Shorten directory if it's longer than this even if there is space for it.
# The value can be either absolute (e.g., '80') or a percentage of terminal
# width (e.g, '50%'). If empty, directory will be shortened only when prompt
# doesn't fit. Applies only when POWERLEVEL9K_SHORTEN_STRATEGY=truncate_to_unique.
_p9k_declare -s POWERLEVEL9K_DIR_MAX_LENGTH 0
# Individual elements are patterns. They are expanded with the options set
# by `emulate zsh && setopt extended_glob`.
_p9k_declare -a POWERLEVEL9K_DIR_PACKAGE_FILES -- package.json composer.json
# When dir is on the last prompt line, try to shorten it enough to leave at least this many
# columns for typing commands. Applies only when POWERLEVEL9K_SHORTEN_STRATEGY=truncate_to_unique.
_p9k_declare -i POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS 40
# When dir is on the last prompt line, try to shorten it enough to leave at least
# COLUMNS * POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS_PCT * 0.01 columns for typing commands. Applies
# only when POWERLEVEL9K_SHORTEN_STRATEGY=truncate_to_unique.
_p9k_declare -F POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS_PCT 50
# POWERLEVEL9K_DIR_CLASSES allow you to specify custom styling and icons for different
# directories.
#
# POWERLEVEL9K_DIR_CLASSES must be an array with 3 * N elements. Each triplet consists of:
#
# 1. A pattern against which the current directory is matched. Matching is done with
# extended_glob option enabled.
# 2. Directory class for the purpose of styling.
# 3. Icon.
#
# Triplets are tried in order. The first triplet whose pattern matches $PWD wins. If there are no
# matches, there will be no icon and the styling is done according to POWERLEVEL9K_DIR_BACKGROUND,
# POWERLEVEL9K_DIR_FOREGROUND, etc.
#
# Example:
#
# POWERLEVEL9K_DIR_CLASSES=(
# '~/work(/*)#' WORK '(โฏยฐโกยฐ๏ผโฏ๏ธต โปโโป'
# '~(/*)#' HOME 'โ'
# '*' DEFAULT '')
#
# POWERLEVEL9K_DIR_WORK_BACKGROUND=red
# POWERLEVEL9K_DIR_HOME_BACKGROUND=blue
# POWERLEVEL9K_DIR_DEFAULT_BACKGROUND=yellow
#
# With these settings, the current directory in the prompt may look like this:
#
# (โฏยฐโกยฐ๏ผโฏ๏ธต โปโโป ~/work/projects/important/urgent
#
# โ ~/best/powerlevel10k
_p9k_declare -a POWERLEVEL9K_DIR_CLASSES
_p9k_declare -i POWERLEVEL9K_SHORTEN_DELIMITER_LENGTH
_p9k_declare -e POWERLEVEL9K_SHORTEN_DELIMITER
_p9k_declare -s POWERLEVEL9K_DIR_TRUNCATE_BEFORE_MARKER ''
case $_POWERLEVEL9K_DIR_TRUNCATE_BEFORE_MARKER in
first|last) _POWERLEVEL9K_DIR_TRUNCATE_BEFORE_MARKER+=:0;;
(first|last):(|-)<->);;
*) _POWERLEVEL9K_DIR_TRUNCATE_BEFORE_MARKER=;;
esac
[[ -z $_POWERLEVEL9K_SHORTEN_FOLDER_MARKER ]] && _POWERLEVEL9K_DIR_TRUNCATE_BEFORE_MARKER=
_p9k_declare -i POWERLEVEL9K_SHORTEN_DIR_LENGTH
_p9k_declare -s POWERLEVEL9K_IP_INTERFACE ""
: ${_POWERLEVEL9K_IP_INTERFACE:='.*'}
_p9k_segment_in_use ip || _POWERLEVEL9K_IP_INTERFACE=
_p9k_declare -s POWERLEVEL9K_VPN_IP_INTERFACE "(gpd|wg|(.*tun))[0-9]*"
: ${_POWERLEVEL9K_VPN_IP_INTERFACE:='.*'}
_p9k_segment_in_use vpn_ip || _POWERLEVEL9K_VPN_IP_INTERFACE=
_p9k_declare -b POWERLEVEL9K_VPN_IP_SHOW_ALL 0
_p9k_declare -i POWERLEVEL9K_LOAD_WHICH 5
case $_POWERLEVEL9K_LOAD_WHICH in
1) _POWERLEVEL9K_LOAD_WHICH=1;;
15) _POWERLEVEL9K_LOAD_WHICH=3;;
*) _POWERLEVEL9K_LOAD_WHICH=2;;
esac
_p9k_declare -b POWERLEVEL9K_NODE_VERSION_PROJECT_ONLY 0
_p9k_declare -b POWERLEVEL9K_PHP_VERSION_PROJECT_ONLY 0
_p9k_declare -b POWERLEVEL9K_DOTNET_VERSION_PROJECT_ONLY 1
_p9k_declare -b POWERLEVEL9K_GO_VERSION_PROJECT_ONLY 1
_p9k_declare -b POWERLEVEL9K_RUST_VERSION_PROJECT_ONLY 1
_p9k_declare -b POWERLEVEL9K_JAVA_VERSION_PROJECT_ONLY 0
_p9k_declare -b POWERLEVEL9K_NODENV_PROMPT_ALWAYS_SHOW 0
_p9k_declare -a POWERLEVEL9K_NODENV_SOURCES -- shell local global
_p9k_declare -b POWERLEVEL9K_NODENV_SHOW_SYSTEM 1
_p9k_declare -b POWERLEVEL9K_RBENV_PROMPT_ALWAYS_SHOW 0
_p9k_declare -a POWERLEVEL9K_RBENV_SOURCES -- shell local global
_p9k_declare -b POWERLEVEL9K_RBENV_SHOW_SYSTEM 1
_p9k_declare -b POWERLEVEL9K_SCALAENV_PROMPT_ALWAYS_SHOW 0
_p9k_declare -a POWERLEVEL9K_SCALAENV_SOURCES -- shell local global
_p9k_declare -b POWERLEVEL9K_SCALAENV_SHOW_SYSTEM 1
_p9k_declare -b POWERLEVEL9K_PHPENV_PROMPT_ALWAYS_SHOW 0
_p9k_declare -a POWERLEVEL9K_PHPENV_SOURCES -- shell local global
_p9k_declare -b POWERLEVEL9K_PHPENV_SHOW_SYSTEM 1
_p9k_declare -b POWERLEVEL9K_LUAENV_PROMPT_ALWAYS_SHOW 0
_p9k_declare -a POWERLEVEL9K_LUAENV_SOURCES -- shell local global
_p9k_declare -b POWERLEVEL9K_LUAENV_SHOW_SYSTEM 1
_p9k_declare -b POWERLEVEL9K_JENV_PROMPT_ALWAYS_SHOW 0
_p9k_declare -a POWERLEVEL9K_JENV_SOURCES -- shell local global
_p9k_declare -b POWERLEVEL9K_JENV_SHOW_SYSTEM 1
_p9k_declare -b POWERLEVEL9K_PLENV_PROMPT_ALWAYS_SHOW 0
_p9k_declare -a POWERLEVEL9K_PLENV_SOURCES -- shell local global
_p9k_declare -b POWERLEVEL9K_PLENV_SHOW_SYSTEM 1
_p9k_declare -b POWERLEVEL9K_PYENV_PROMPT_ALWAYS_SHOW 0
_p9k_declare -b POWERLEVEL9K_PYENV_SHOW_SYSTEM 1
_p9k_declare -a POWERLEVEL9K_PYENV_SOURCES -- shell local global
_p9k_declare -b POWERLEVEL9K_GOENV_PROMPT_ALWAYS_SHOW 0
_p9k_declare -a POWERLEVEL9K_GOENV_SOURCES -- shell local global
_p9k_declare -b POWERLEVEL9K_GOENV_SHOW_SYSTEM 1
_p9k_declare -b POWERLEVEL9K_ASDF_PROMPT_ALWAYS_SHOW 0
_p9k_declare -b POWERLEVEL9K_ASDF_SHOW_SYSTEM 1
_p9k_declare -a POWERLEVEL9K_ASDF_SOURCES -- shell local global
local var
for var in ${parameters[(I)POWERLEVEL9K_ASDF_*_PROMPT_ALWAYS_SHOW]}; do
_p9k_declare -b $var $_POWERLEVEL9K_ASDF_PROMPT_ALWAYS_SHOW
done
for var in ${parameters[(I)POWERLEVEL9K_ASDF_*_SHOW_SYSTEM]}; do
_p9k_declare -b $var $_POWERLEVEL9K_ASDF_SHOW_SYSTEM
done
for var in ${parameters[(I)POWERLEVEL9K_ASDF_*_SOURCES]}; do
_p9k_declare -a $var -- $_POWERLEVEL9K_ASDF_SOURCES
done
_p9k_declare -b POWERLEVEL9K_HASKELL_STACK_PROMPT_ALWAYS_SHOW 1
_p9k_declare -a POWERLEVEL9K_HASKELL_STACK_SOURCES -- shell local
_p9k_declare -b POWERLEVEL9K_RVM_SHOW_GEMSET 0
_p9k_declare -b POWERLEVEL9K_RVM_SHOW_PREFIX 0
_p9k_declare -b POWERLEVEL9K_CHRUBY_SHOW_VERSION 1
_p9k_declare -b POWERLEVEL9K_CHRUBY_SHOW_ENGINE 1
_p9k_declare -b POWERLEVEL9K_STATUS_CROSS 0
_p9k_declare -b POWERLEVEL9K_STATUS_OK 1
_p9k_declare -b POWERLEVEL9K_STATUS_OK_PIPE 1
_p9k_declare -b POWERLEVEL9K_STATUS_ERROR 1
_p9k_declare -b POWERLEVEL9K_STATUS_ERROR_PIPE 1
_p9k_declare -b POWERLEVEL9K_STATUS_ERROR_SIGNAL 1
_p9k_declare -b POWERLEVEL9K_STATUS_SHOW_PIPESTATUS 1
_p9k_declare -b POWERLEVEL9K_STATUS_HIDE_SIGNAME 0
_p9k_declare -b POWERLEVEL9K_STATUS_VERBOSE_SIGNAME 1
_p9k_declare -b POWERLEVEL9K_STATUS_EXTENDED_STATES 0
_p9k_declare -b POWERLEVEL9K_STATUS_VERBOSE 1
_p9k_declare -b POWERLEVEL9K_STATUS_OK_IN_NON_VERBOSE 0
_p9k_declare -e POWERLEVEL9K_DATE_FORMAT "%D{%d.%m.%y}"
_p9k_declare -s POWERLEVEL9K_VCS_ACTIONFORMAT_FOREGROUND 1
_p9k_declare -b POWERLEVEL9K_SHOW_CHANGESET 0
_p9k_declare -e POWERLEVEL9K_VCS_LOADING_TEXT loading
_p9k_declare -a POWERLEVEL9K_VCS_GIT_HOOKS -- vcs-detect-changes git-untracked git-aheadbehind git-stash git-remotebranch git-tagname
_p9k_declare -a POWERLEVEL9K_VCS_HG_HOOKS -- vcs-detect-changes
_p9k_declare -a POWERLEVEL9K_VCS_SVN_HOOKS -- vcs-detect-changes svn-detect-changes
# If it takes longer than this to fetch git repo status, display the prompt with a greyed out
# vcs segment and fix it asynchronously when the results come it.
_p9k_declare -F POWERLEVEL9K_VCS_MAX_SYNC_LATENCY_SECONDS 0.01
(( POWERLEVEL9K_VCS_MAX_SYNC_LATENCY_SECONDS >= 0 )) || (( POWERLEVEL9K_VCS_MAX_SYNC_LATENCY_SECONDS = 0 ))
_p9k_declare -a POWERLEVEL9K_VCS_BACKENDS -- git
(( $+commands[git] )) || _POWERLEVEL9K_VCS_BACKENDS=(${_POWERLEVEL9K_VCS_BACKENDS:#git})
_p9k_declare -b POWERLEVEL9K_VCS_DISABLE_GITSTATUS_FORMATTING 0
_p9k_declare -i POWERLEVEL9K_VCS_MAX_INDEX_SIZE_DIRTY -1
_p9k_declare -i POWERLEVEL9K_VCS_STAGED_MAX_NUM 1
_p9k_declare -i POWERLEVEL9K_VCS_UNSTAGED_MAX_NUM 1
_p9k_declare -i POWERLEVEL9K_VCS_UNTRACKED_MAX_NUM 1
_p9k_declare -i POWERLEVEL9K_VCS_CONFLICTED_MAX_NUM 1
_p9k_declare -i POWERLEVEL9K_VCS_COMMITS_AHEAD_MAX_NUM -1
_p9k_declare -i POWERLEVEL9K_VCS_COMMITS_BEHIND_MAX_NUM -1
_p9k_declare -b POWERLEVEL9K_VCS_RECURSE_UNTRACKED_DIRS 0
_p9k_declare -b POWERLEVEL9K_DISABLE_GITSTATUS 0
_p9k_declare -e POWERLEVEL9K_VI_INSERT_MODE_STRING "INSERT"
_p9k_declare -e POWERLEVEL9K_VI_COMMAND_MODE_STRING "NORMAL"
# VISUAL mode is shown as NORMAL unless POWERLEVEL9K_VI_VISUAL_MODE_STRING is explicitly set.
_p9k_declare -e POWERLEVEL9K_VI_VISUAL_MODE_STRING
# OVERWRITE mode is shown as INSERT unless POWERLEVEL9K_VI_OVERWRITE_MODE_STRING is explicitly set.
_p9k_declare -e POWERLEVEL9K_VI_OVERWRITE_MODE_STRING
_p9k_declare -s POWERLEVEL9K_VIRTUALENV_SHOW_WITH_PYENV true
_p9k_declare -b POWERLEVEL9K_VIRTUALENV_SHOW_PYTHON_VERSION 1
_p9k_declare -e POWERLEVEL9K_VIRTUALENV_LEFT_DELIMITER "("
_p9k_declare -e POWERLEVEL9K_VIRTUALENV_RIGHT_DELIMITER ")"
_p9k_declare -a POWERLEVEL9K_VIRTUALENV_GENERIC_NAMES -- virtualenv venv .venv env
_POWERLEVEL9K_VIRTUALENV_GENERIC_NAMES="${(j.|.)_POWERLEVEL9K_VIRTUALENV_GENERIC_NAMES}"
_p9k_declare -b POWERLEVEL9K_NODEENV_SHOW_NODE_VERSION 1
_p9k_declare -e POWERLEVEL9K_NODEENV_LEFT_DELIMITER "["
_p9k_declare -e POWERLEVEL9K_NODEENV_RIGHT_DELIMITER "]"
_p9k_declare -b POWERLEVEL9K_KUBECONTEXT_SHOW_DEFAULT_NAMESPACE 1
_p9k_declare -a POWERLEVEL9K_KUBECONTEXT_SHORTEN --
# Defines context classes for the purpose of applying different styling to different contexts.
#
# POWERLEVEL9K_KUBECONTEXT_CLASSES must be an array with even number of elements. The first
# element in each pair defines a pattern against which the current context (in the format it is
# displayed in the prompt) gets matched. The second element defines context class. Patterns are
# tried in order. The first match wins.
#
# If a non-empty class <C> is assigned to a context, the segment is styled with
# POWERLEVEL9K_KUBECONTEXT_<U>_BACKGROUND and POWERLEVEL9K_KUBECONTEXT_<U>_FOREGROUND where <U> is
# uppercased <C>. Otherwise with POWERLEVEL9K_KUBECONTEXT_BACKGROUND and
# POWERLEVEL9K_KUBECONTEXT_FOREGROUND.
#
# Example: Use red background for contexts containing "prod", green for "testing" and yellow for
# everything else.
#
# POWERLEVEL9K_KUBECONTEXT_CLASSES=(
# '*prod*' prod
# '*testing*' testing
# '*' other)
#
# POWERLEVEL9K_KUBECONTEXT_PROD_BACKGROUND=red
# POWERLEVEL9K_KUBECONTEXT_TESTING_BACKGROUND=green
# POWERLEVEL9K_KUBECONTEXT_OTHER_BACKGROUND=yellow
_p9k_declare -a POWERLEVEL9K_KUBECONTEXT_CLASSES --
_p9k_declare -a POWERLEVEL9K_AWS_CLASSES --
_p9k_declare -a POWERLEVEL9K_TERRAFORM_CLASSES --
_p9k_declare -b POWERLEVEL9K_TERRAFORM_SHOW_DEFAULT 0
_p9k_declare -a POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES -- 'service_account:*' SERVICE_ACCOUNT
# Specifies the format of java version.
#
# POWERLEVEL9K_JAVA_VERSION_FULL=true => 1.8.0_212-8u212-b03-0ubuntu1.18.04.1-b03
# POWERLEVEL9K_JAVA_VERSION_FULL=false => 1.8.0_212
#
# These correspond to `java -fullversion` and `java -version` respectively.
_p9k_declare -b POWERLEVEL9K_JAVA_VERSION_FULL 1
_p9k_declare -b POWERLEVEL9K_PROMPT_CHAR_OVERWRITE_STATE 0
# Format for the current time: 09:51:02. See `man 3 strftime`.
_p9k_declare -e POWERLEVEL9K_TIME_FORMAT "%D{%H:%M:%S}"
# If set to true, time will update when you hit enter. This way prompts for the past
# commands will contain the start times of their commands as opposed to the default
# behavior where they contain the end times of their preceding commands.
_p9k_declare -b POWERLEVEL9K_TIME_UPDATE_ON_COMMAND 0
# If set to true, time will update every second.
_p9k_declare -b POWERLEVEL9K_EXPERIMENTAL_TIME_REALTIME 0
local -i i=1
while (( i <= $#_POWERLEVEL9K_LEFT_PROMPT_ELEMENTS )); do
local segment=${${(U)_POWERLEVEL9K_LEFT_PROMPT_ELEMENTS[i]}//ฤฐ/I}
local var=POWERLEVEL9K_${segment}_LEFT_DISABLED
(( $+parameters[$var] )) || var=POWERLEVEL9K_${segment}_DISABLED
if [[ ${(P)var} == true ]]; then
_POWERLEVEL9K_LEFT_PROMPT_ELEMENTS[i,i]=()
else
(( ++i ))
fi
done
local -i i=1
while (( i <= $#_POWERLEVEL9K_RIGHT_PROMPT_ELEMENTS )); do
local segment=${${(U)_POWERLEVEL9K_RIGHT_PROMPT_ELEMENTS[i]}//ฤฐ/I}
local var=POWERLEVEL9K_${segment}_RIGHT_DISABLED
(( $+parameters[$var] )) || var=POWERLEVEL9K_${segment}_DISABLED
if [[ ${(P)var} == true ]]; then
_POWERLEVEL9K_RIGHT_PROMPT_ELEMENTS[i,i]=()
else
(( ++i ))
fi
done
local var
for var in ${(@)${parameters[(I)POWERLEVEL9K_*]}/(#m)*/${(M)${parameters[_$MATCH]-$MATCH}:#$MATCH}}; do
case $parameters[$var] in
(scalar|integer|float)*) typeset -g _$var=${(P)var};;
array*) eval 'typeset -ga '_$var'=("${'$var'[@]}")';;
esac
done
}
function _p9k_on_widget_zle-keymap-select() { _p9k_check_visual_mode; __p9k_reset_state=2; }
function _p9k_on_widget_overwrite-mode() { _p9k_check_visual_mode; __p9k_reset_state=2; }
function _p9k_on_widget_vi-replace() { _p9k_check_visual_mode; __p9k_reset_state=2; }
if is-at-least 5.3; then
function _p9k_check_visual_mode() {
[[ ${KEYMAP:-} == vicmd ]] || return 0
local region=${${REGION_ACTIVE:-0}/2/1}
[[ $region != $_p9k__region_active ]] || return 0
_p9k__region_active=$region
__p9k_reset_state=2
}
else
function _p9k_check_visual_mode() {}
fi
function _p9k_on_widget_visual-mode() { _p9k_check_visual_mode; }
function _p9k_on_widget_visual-line-mode() { _p9k_check_visual_mode; }
function _p9k_on_widget_deactivate-region() { _p9k_check_visual_mode; }
function _p9k_on_widget_zle-line-init() {
(( _p9k__cursor_hidden )) || return 0
_p9k__cursor_hidden=0
echoti cnorm
}
function _p9k_on_widget_zle-line-finish() {
(( $+_p9k__line_finished )) && return
_p9k__line_finished=
(( _p9k_reset_on_line_finish )) && __p9k_reset_state=2
(( $+functions[p10k-on-post-prompt] )) && p10k-on-post-prompt
if [[ -n $_p9k_transient_prompt ]]; then
if [[ $_POWERLEVEL9K_TRANSIENT_PROMPT == always || $_p9k__cwd == $_p9k__last_prompt_pwd ]]; then
RPROMPT=
PROMPT=$_p9k_transient_prompt
__p9k_reset_state=2
else
_p9k__last_prompt_pwd=$_p9k__cwd
fi
fi
if (( __p9k_reset_state == 2 )); then
if [[ $1 == int ]]; then
_p9k__must_restore_prompt=1
if (( !_p9k__restore_prompt_fd )); then
sysopen -o cloexec -ru _p9k__restore_prompt_fd /dev/null
zle -F $_p9k__restore_prompt_fd _p9k_restore_prompt
fi
fi
if (( $+termcap[up] )); then
(( _p9k__can_hide_cursor )) && local hide=$terminfo[civis] || local hide=
echo -nE - $hide$'\n'$termcap[up]
fi
_p9k_reset_prompt
fi
_p9k__line_finished='%{%}'
}
function _p9k_on_widget_send-break() {
_p9k_on_widget_zle-line-finish int
}
# Usage example: _p9k_display_segment 58 _p9k__1rkubecontext hide
function _p9k_display_segment() {
[[ $_p9k__display_v[$1] == $3 ]] && return
_p9k__display_v[$1]=$3
[[ $3 == hide ]] && typeset -g $2= || unset $2
__p9k_reset_state=2
}
function _p9k_redraw() {
zle -F $1
exec {1}>&-
_p9k__redraw_fd=0
() {
local -h WIDGET=zle-line-pre-redraw
_p9k_widget_hook ''
}
}
function _p9k_deschedule_redraw() {
(( _p9k__redraw_fd )) || return
zle -F $_p9k__redraw_fd
exec {_p9k__redraw_fd}>&-
_p9k__redraw_fd=0
}
function _p9k_widget_hook() {
_p9k_deschedule_redraw
if (( ${+functions[p10k-on-post-widget]} || ${#_p9k_show_on_command} )); then
local -a P9K_COMMANDS
if [[ "$_p9k__last_buffer" == "$PREBUFFER$BUFFER" ]]; then
P9K_COMMANDS=(${_p9k__last_commands[@]})
else
_p9k__last_buffer="$PREBUFFER$BUFFER"
if [[ -n "$_p9k__last_buffer" ]]; then
# this must run with user options
_p9k_parse_buffer "$_p9k__last_buffer" $_POWERLEVEL9K_COMMANDS_MAX_TOKEN_COUNT
fi
_p9k__last_commands=(${P9K_COMMANDS[@]})
fi
fi
eval "$__p9k_intro"
(( _p9k__restore_prompt_fd )) && _p9k_restore_prompt $_p9k__restore_prompt_fd
if [[ $1 == clear-screen ]]; then
P9K_TTY=new
_p9k__expanded=0
_p9k_reset_prompt
fi
__p9k_reset_state=1
_p9k_check_visual_mode
local pat idx var
for pat idx var in $_p9k_show_on_command; do
if (( $P9K_COMMANDS[(I)$pat] )); then
_p9k_display_segment $idx $var show
else
_p9k_display_segment $idx $var hide
fi
done
(( $+functions[p10k-on-post-widget] )) && p10k-on-post-widget "${@:2}"
(( $+functions[_p9k_on_widget_$1] )) && _p9k_on_widget_$1
(( __p9k_reset_state == 2 )) && _p9k_reset_prompt
__p9k_reset_state=0
}
function _p9k_widget() {
local f=${widgets[._p9k_orig_$1]:-}
local -i res
[[ -z $f ]] || {
[[ $f == user:-z4h-* ]] && {
"${f#user:}" "${@:2}"
res=$?
} || {
zle ._p9k_orig_$1 -- "${@:2}"
res=$?
}
}
(( ! __p9k_enabled )) || [[ $CONTEXT != start ]] || {
[[ $1 == zle-line-pre-redraw ]] && (( PENDING || KEYS_QUEUED_COUNT )) && {
(( _p9k__redraw_fd )) || {
sysopen -o cloexec -ru _p9k__redraw_fd /dev/null
zle -F $_p9k__redraw_fd _p9k_redraw
}
return res
}
_p9k_widget_hook "$@"
}
return res
}
function _p9k_widget_send-break() {
(( ! __p9k_enabled )) || [[ $CONTEXT != start ]] || {
_p9k_widget_hook send-break "$@"
}
local f=${widgets[._p9k_orig_send-break]:-}
[[ -z $f ]] || zle ._p9k_orig_send-break -- "$@"
}
typeset -gi __p9k_widgets_wrapped=0
function _p9k_wrap_widgets() {
(( __p9k_widgets_wrapped )) && return
typeset -gir __p9k_widgets_wrapped=1
local -a widget_list
if is-at-least 5.3; then
local -aU widget_list=(
zle-line-pre-redraw
zle-line-init
zle-line-finish
zle-keymap-select
overwrite-mode
vi-replace
visual-mode
visual-line-mode
deactivate-region
clear-screen
send-break
$_POWERLEVEL9K_HOOK_WIDGETS
)
else
# There is no zle-line-pre-redraw in zsh < 5.3, so we have to wrap all widgets
# with key bindings. This costs extra 3ms: 1.5ms to fetch the list of widgets and
# another 1.5ms to wrap them.
local keymap tmp=${TMPDIR:-/tmp}/p10k.bindings.$sysparams[pid]
{
for keymap in $keymaps; do bindkey -M $keymap; done >$tmp
local -aU widget_list=(
zle-isearch-exit
zle-isearch-update
zle-line-init
zle-line-finish
zle-history-line-set
zle-keymap-select
send-break
$_POWERLEVEL9K_HOOK_WIDGETS
${${${(f)"$(<$tmp)"}##* }:#(*\"|.*)}
)
} always {
zf_rm -f -- $tmp
}
fi
local widget
for widget in $widget_list; do
if (( ! $+functions[_p9k_widget_$widget] )); then
functions[_p9k_widget_$widget]='_p9k_widget '${(q)widget}' "$@"'
fi
# The leading dot is to work around bugs in zsh-syntax-highlighting.
zle -A $widget ._p9k_orig_$widget
zle -N $widget _p9k_widget_$widget
done 2>/dev/null # `zle -A` fails for inexisting widgets and complains to stderr
}
function _p9k_restore_prompt() {
eval "$__p9k_intro"
zle -F $1
exec {1}>&-
_p9k__restore_prompt_fd=0
(( _p9k__must_restore_prompt )) || return 0
_p9k__must_restore_prompt=0
unset _p9k__line_finished
_p9k__refresh_reason=restore
_p9k_set_prompt
_p9k__refresh_reason=
_p9k__expanded=0
_p9k_reset_prompt
}
prompt__p9k_internal_nothing() { _p9k__prompt+='${_p9k__sss::=}'; }
instant_prompt__p9k_internal_nothing() { prompt__p9k_internal_nothing; }
# _p9k_build_gap_post line_number
_p9k_build_gap_post() {
if [[ $1 == 1 ]]; then
local kind_l=first kind_u=FIRST
else
local kind_l=newline kind_u=NEWLINE
fi
_p9k_get_icon '' MULTILINE_${kind_u}_PROMPT_GAP_CHAR
local char=${_p9k__ret:- }
_p9k_prompt_length $char
if (( _p9k__ret != 1 || $#char != 1 )); then
>&2 print -rP -- "%F{red}WARNING!%f %BMULTILINE_${kind_u}_PROMPT_GAP_CHAR%b is not one character long. Will use ' '."
>&2 print -rP -- "Either change the value of %BPOWERLEVEL9K_MULTILINE_${kind_u}_PROMPT_GAP_CHAR%b or remove it."
char=' '
fi
local style
_p9k_color prompt_multiline_${kind_l}_prompt_gap BACKGROUND ""
[[ -n $_p9k__ret ]] && _p9k_background $_p9k__ret
style+=$_p9k__ret
_p9k_color prompt_multiline_${kind_l}_prompt_gap FOREGROUND ""
[[ -n $_p9k__ret ]] && _p9k_foreground $_p9k__ret
style+=$_p9k__ret
_p9k_escape_style $style
style=$_p9k__ret
local exp=_POWERLEVEL9K_MULTILINE_${kind_u}_PROMPT_GAP_EXPANSION
(( $+parameters[$exp] )) && exp=${(P)exp} || exp='${P9K_GAP}'
[[ $char == '.' ]] && local s=',' || local s='.'
_p9k__ret=$'${${_p9k__g+\n}:-'$style'${${${_p9k__m:#-*}:+'
_p9k__ret+='${${_p9k__'$1'g+${(pl.$((_p9k__m+1)).. .)}}:-'
if [[ $exp == '${P9K_GAP}' ]]; then
_p9k__ret+='${(pl'$s'$((_p9k__m+1))'$s$s$char$s')}'
else
_p9k__ret+='${${P9K_GAP::=${(pl'$s'$((_p9k__m+1))'$s$s$char$s')}}+}'
_p9k__ret+='${:-"'$exp'"}'
style=1
fi
_p9k__ret+='}'
if (( __p9k_ksh_arrays )); then
_p9k__ret+=$'$_p9k__rprompt${_p9k_t[$((!_p9k__ind))]}}:-\n}'
else
_p9k__ret+=$'$_p9k__rprompt${_p9k_t[$((1+!_p9k__ind))]}}:-\n}'
fi
[[ -n $style ]] && _p9k__ret+='%b%k%f'
_p9k__ret+='}'
}
_p9k_init_lines() {
local -a left_segments=($_POWERLEVEL9K_LEFT_PROMPT_ELEMENTS)
local -a right_segments=($_POWERLEVEL9K_RIGHT_PROMPT_ELEMENTS)
if (( _POWERLEVEL9K_PROMPT_ON_NEWLINE )); then
left_segments+=(newline _p9k_internal_nothing)
fi
local -i num_left_lines=$((1 + ${#${(@M)left_segments:#newline}}))
local -i num_right_lines=$((1 + ${#${(@M)right_segments:#newline}}))
if (( num_right_lines > num_left_lines )); then
repeat $((num_right_lines - num_left_lines)) left_segments=(newline $left_segments)
local -i num_lines=num_right_lines
else
if (( _POWERLEVEL9K_RPROMPT_ON_NEWLINE )); then
repeat $((num_left_lines - num_right_lines)) right_segments=(newline $right_segments)
else
repeat $((num_left_lines - num_right_lines)) right_segments+=newline
fi
local -i num_lines=num_left_lines
fi
local -i i
for i in {1..$num_lines}; do
local -i left_end=${left_segments[(i)newline]}
local -i right_end=${right_segments[(i)newline]}
_p9k_line_segments_left+="${(pj:\0:)left_segments[1,left_end-1]}"
_p9k_line_segments_right+="${(pj:\0:)right_segments[1,right_end-1]}"
(( left_end > $#left_segments )) && left_segments=() || shift left_end left_segments
(( right_end > $#right_segments )) && right_segments=() || shift right_end right_segments
_p9k_get_icon '' LEFT_SEGMENT_SEPARATOR
_p9k_get_icon 'prompt_empty_line' LEFT_PROMPT_LAST_SEGMENT_END_SYMBOL $_p9k__ret
_p9k_escape $_p9k__ret
_p9k_line_prefix_left+='${_p9k__'$i'l-${${:-${_p9k__bg::=NONE}${_p9k__i::=0}${_p9k__sss::=%f'$_p9k__ret'}}+}'
_p9k_line_suffix_left+='%b%k$_p9k__sss%b%k%f'
_p9k_escape ${(g::)_POWERLEVEL9K_EMPTY_LINE_RIGHT_PROMPT_FIRST_SEGMENT_START_SYMBOL}
[[ -n $_p9k__ret ]] && _p9k_line_never_empty_right+=1 || _p9k_line_never_empty_right+=0
_p9k_line_prefix_right+='${_p9k__'$i'r-${${:-${_p9k__bg::=NONE}${_p9k__i::=0}${_p9k__sss::='$_p9k__ret'}}+}'
_p9k_line_suffix_right+='$_p9k__sss%b%k%f}' # gets overridden for _p9k_emulate_zero_rprompt_indent
if (( i == num_lines )); then
# it's safe to use _p9k_prompt_length on the last line because it cannot have prompt connection
_p9k_prompt_length ${(e)_p9k__ret}
(( _p9k__ret )) || _p9k_line_never_empty_right[-1]=0
fi
done
_p9k_get_icon '' LEFT_SEGMENT_END_SEPARATOR
if [[ -n $_p9k__ret ]]; then
_p9k__ret+=%b%k%f
# Not escaped for historical reasons.
_p9k__ret='${:-"'$_p9k__ret'"}'
if (( _POWERLEVEL9K_PROMPT_ON_NEWLINE )); then
_p9k_line_suffix_left[-2]+=$_p9k__ret
else
_p9k_line_suffix_left[-1]+=$_p9k__ret
fi
fi
for i in {1..$num_lines}; do _p9k_line_suffix_left[i]+='}'; done
if (( num_lines > 1 )); then
for i in {1..$((num_lines-1))}; do
_p9k_build_gap_post $i
_p9k_line_gap_post+=$_p9k__ret
done
if [[ $+_POWERLEVEL9K_MULTILINE_FIRST_PROMPT_PREFIX == 1 || $_POWERLEVEL9K_PROMPT_ON_NEWLINE == 1 ]]; then
_p9k_get_icon '' MULTILINE_FIRST_PROMPT_PREFIX
if [[ -n $_p9k__ret ]]; then
[[ _p9k__ret == *%* ]] && _p9k__ret+=%b%k%f
# Not escaped for historical reasons.
_p9k__ret='${_p9k__1l_frame-"'$_p9k__ret'"}'
_p9k_line_prefix_left[1]=$_p9k__ret$_p9k_line_prefix_left[1]
fi
fi
if [[ $+_POWERLEVEL9K_MULTILINE_LAST_PROMPT_PREFIX == 1 || $_POWERLEVEL9K_PROMPT_ON_NEWLINE == 1 ]]; then
_p9k_get_icon '' MULTILINE_LAST_PROMPT_PREFIX
if [[ -n $_p9k__ret ]]; then
[[ _p9k__ret == *%* ]] && _p9k__ret+=%b%k%f
# Not escaped for historical reasons.
_p9k__ret='${_p9k__'$num_lines'l_frame-"'$_p9k__ret'"}'
_p9k_line_prefix_left[-1]=$_p9k__ret$_p9k_line_prefix_left[-1]
fi
fi
_p9k_get_icon '' MULTILINE_FIRST_PROMPT_SUFFIX
if [[ -n $_p9k__ret ]]; then
[[ _p9k__ret == *%* ]] && _p9k__ret+=%b%k%f
_p9k_line_suffix_right[1]+='${_p9k__1r_frame-'${(qqq)_p9k__ret}'}'
_p9k_line_never_empty_right[1]=1
fi
_p9k_get_icon '' MULTILINE_LAST_PROMPT_SUFFIX
if [[ -n $_p9k__ret ]]; then
[[ _p9k__ret == *%* ]] && _p9k__ret+=%b%k%f
_p9k_line_suffix_right[-1]+='${_p9k__'$num_lines'r_frame-'${(qqq)_p9k__ret}'}'
# it's safe to use _p9k_prompt_length on the last line because it cannot have prompt connection
_p9k_prompt_length $_p9k__ret
(( _p9k__ret )) && _p9k_line_never_empty_right[-1]=1
fi
if (( num_lines > 2 )); then
if [[ $+_POWERLEVEL9K_MULTILINE_NEWLINE_PROMPT_PREFIX == 1 || $_POWERLEVEL9K_PROMPT_ON_NEWLINE == 1 ]]; then
_p9k_get_icon '' MULTILINE_NEWLINE_PROMPT_PREFIX
if [[ -n $_p9k__ret ]]; then
[[ _p9k__ret == *%* ]] && _p9k__ret+=%b%k%f
for i in {2..$((num_lines-1))}; do
# Not escaped for historical reasons.
_p9k_line_prefix_left[i]='${_p9k__'$i'l_frame-"'$_p9k__ret'"}'$_p9k_line_prefix_left[i]
done
fi
fi
_p9k_get_icon '' MULTILINE_NEWLINE_PROMPT_SUFFIX
if [[ -n $_p9k__ret ]]; then
[[ _p9k__ret == *%* ]] && _p9k__ret+=%b%k%f
for i in {2..$((num_lines-1))}; do
_p9k_line_suffix_right[i]+='${_p9k__'$i'r_frame-'${(qqq)_p9k__ret}'}'
done
_p9k_line_never_empty_right[2,-2]=${(@)_p9k_line_never_empty_right[2,-2]/0/1}
fi
fi
fi
}
_p9k_all_params_eq() {
local key
for key in ${parameters[(I)${~1}]}; do
[[ ${(P)key} == $2 ]] || return
done
}
_p9k_init_display() {
_p9k_display_k=(empty_line 1 ruler 3)
local -i n=3 i
local name
for i in {1..$#_p9k_line_segments_left}; do
local -i j=$((-$#_p9k_line_segments_left+i-1))
_p9k_display_k+=(
$i $((n+=2)) $j $n
$i/left_frame $((n+=2)) $j/left_frame $n
$i/right_frame $((n+=2)) $j/right_frame $n
$i/left $((n+=2)) $j/left $n
$i/right $((n+=2)) $j/right $n
$i/gap $((n+=2)) $j/gap $n)
for name in ${${(@0)_p9k_line_segments_left[i]}%_joined}; do
_p9k_display_k+=($i/left/$name $((n+=2)) $j/left/$name $n)
done
for name in ${${(@0)_p9k_line_segments_right[i]}%_joined}; do
_p9k_display_k+=($i/right/$name $((n+=2)) $j/right/$name $n)
done
done
}
_p9k_init_prompt() {
_p9k_t=($'\n' $'%{\n%}' '')
_p9k_prompt_overflow_bug && _p9k_t[2]=$'%{%G\n%}'
_p9k_init_lines
_p9k_gap_pre='${${:-${_p9k__x::=0}${_p9k__y::=1024}${_p9k__p::=$_p9k__lprompt$_p9k__rprompt}'
repeat 10; do
_p9k_gap_pre+='${_p9k__m::=$(((_p9k__x+_p9k__y)/2))}'
_p9k_gap_pre+='${_p9k__xy::=${${(%):-$_p9k__p%$_p9k__m(l./$_p9k__m;$_p9k__y./$_p9k__x;$_p9k__m)}##*/}}'
_p9k_gap_pre+='${_p9k__x::=${_p9k__xy%;*}}'
_p9k_gap_pre+='${_p9k__y::=${_p9k__xy#*;}}'
done
_p9k_gap_pre+='${_p9k__m::=$((_p9k__clm-_p9k__x-_p9k__ind-1))}'
_p9k_gap_pre+='}+}'
_p9k_prompt_prefix_left='${${_p9k__clm::=$COLUMNS}+}${${COLUMNS::=1024}+}'
_p9k_prompt_prefix_right='${_p9k__'$#_p9k_line_segments_left'-${${_p9k__clm::=$COLUMNS}+}${${COLUMNS::=1024}+}'
_p9k_prompt_suffix_left='${${COLUMNS::=$_p9k__clm}+}'
_p9k_prompt_suffix_right='${${COLUMNS::=$_p9k__clm}+}}'
if _p9k_segment_in_use vi_mode || _p9k_segment_in_use prompt_char; then
_p9k_prompt_prefix_left+='${${_p9k__keymap::=${KEYMAP:-$_p9k__keymap}}+}'
fi
if { _p9k_segment_in_use vi_mode && (( $+_POWERLEVEL9K_VI_OVERWRITE_MODE_STRING )) } ||
{ _p9k_segment_in_use prompt_char && (( _POWERLEVEL9K_PROMPT_CHAR_OVERWRITE_STATE )) }; then
_p9k_prompt_prefix_left+='${${_p9k__zle_state::=${ZLE_STATE:-$_p9k__zle_state}}+}'
fi
_p9k_prompt_prefix_left+='%b%k%f'
# Bug fixed in: https://github.com/zsh-users/zsh/commit/3eea35d0853bddae13fa6f122669935a01618bf9.
# If affects most terminals when RPROMPT is non-empty and ZLE_RPROMPT_INDENT is zero.
# We can work around it as long as RPROMPT ends with a space.
if [[ -n $_p9k_line_segments_right[-1] && $_p9k_line_never_empty_right[-1] == 0 &&
$ZLE_RPROMPT_INDENT == 0 ]] &&
_p9k_all_params_eq '_POWERLEVEL9K_*WHITESPACE_BETWEEN_RIGHT_SEGMENTS' ' ' &&
_p9k_all_params_eq '_POWERLEVEL9K_*RIGHT_RIGHT_WHITESPACE' ' ' &&
_p9k_all_params_eq '_POWERLEVEL9K_*RIGHT_PROMPT_LAST_SEGMENT_END_SYMBOL' '' &&
! is-at-least 5.7.2; then
_p9k_emulate_zero_rprompt_indent=1
_p9k_prompt_prefix_left+='${${:-${_p9k__real_zle_rprompt_indent:=$ZLE_RPROMPT_INDENT}${ZLE_RPROMPT_INDENT::=1}${_p9k__ind::=0}}+}'
_p9k_line_suffix_right[-1]='${_p9k__sss:+${_p9k__sss% }%E}}'
else
_p9k_emulate_zero_rprompt_indent=0
_p9k_prompt_prefix_left+='${${_p9k__ind::=${${ZLE_RPROMPT_INDENT:-1}/#-*/0}}+}'
fi
if [[ $ITERM_SHELL_INTEGRATION_INSTALLED == Yes ]]; then
_p9k_prompt_prefix_left+=$'%{\e]133;A\a%}'
_p9k_prompt_suffix_left+=$'%{\e]133;B\a%}'
fi
if (( _POWERLEVEL9K_PROMPT_ADD_NEWLINE_COUNT > 0 )); then
_p9k_t+=${(pl.$_POWERLEVEL9K_PROMPT_ADD_NEWLINE_COUNT..\n.)}
else
_p9k_t+=''
fi
_p9k_empty_line_idx=$#_p9k_t
if (( __p9k_ksh_arrays )); then
_p9k_prompt_prefix_left+='${_p9k_t[${_p9k__empty_line_i:-'$#_p9k_t'}-1]}'
else
_p9k_prompt_prefix_left+='${_p9k_t[${_p9k__empty_line_i:-'$#_p9k_t'}]}'
fi
_p9k_get_icon '' RULER_CHAR
local ruler_char=$_p9k__ret
_p9k_prompt_length $ruler_char
(( _p9k__ret == 1 && $#ruler_char == 1 )) || ruler_char=' '
_p9k_color prompt_ruler BACKGROUND ""
if [[ -z $_p9k__ret && $ruler_char == ' ' ]]; then
local ruler=$'\n'
else
_p9k_background $_p9k__ret
local ruler=%b$_p9k__ret
_p9k_color prompt_ruler FOREGROUND ""
_p9k_foreground $_p9k__ret
ruler+=$_p9k__ret
[[ $ruler_char == '.' ]] && local sep=',' || local sep='.'
ruler+='${(pl'$sep'${$((_p9k__clm-_p9k__ind))/#-*/0}'$sep$sep$ruler_char$sep')}%k%f'
if (( __p9k_ksh_arrays )); then
ruler+='${_p9k_t[$((!_p9k__ind))]}'
else
ruler+='${_p9k_t[$((1+!_p9k__ind))]}'
fi
fi
_p9k_t+=$ruler
_p9k_ruler_idx=$#_p9k_t
if (( __p9k_ksh_arrays )); then
_p9k_prompt_prefix_left+='${(e)_p9k_t[${_p9k__ruler_i:-'$#_p9k_t'}-1]}'
else
_p9k_prompt_prefix_left+='${(e)_p9k_t[${_p9k__ruler_i:-'$#_p9k_t'}]}'
fi
( _p9k_segment_in_use time && (( _POWERLEVEL9K_TIME_UPDATE_ON_COMMAND )) )
_p9k_reset_on_line_finish=$((!$?))
_p9k_t+=$_p9k_gap_pre
_p9k_gap_pre='${(e)_p9k_t['$(($#_p9k_t - __p9k_ksh_arrays))']}'
_p9k_t+=$_p9k_prompt_prefix_left
_p9k_prompt_prefix_left='${(e)_p9k_t['$(($#_p9k_t - __p9k_ksh_arrays))']}'
}
_p9k_init_ssh() {
# The following code is based on Pure:
# https://github.com/sindresorhus/pure/blob/e8abf9d37185ec9b7b4398ca9c5eba555a1028eb/pure.zsh.
#
# License: https://github.com/sindresorhus/pure/blob/e8abf9d37185ec9b7b4398ca9c5eba555a1028eb/license.
[[ -n $P9K_SSH ]] && return
typeset -gix P9K_SSH=0
if [[ -n $SSH_CLIENT || -n $SSH_TTY || -n $SSH_CONNECTION ]]; then
P9K_SSH=1
return 0
fi
# When changing user on a remote system, the $SSH_CONNECTION environment variable can be lost.
# Attempt detection via `who`.
(( $+commands[who] )) || return
local ipv6='(([0-9a-fA-F]+:)|:){2,}[0-9a-fA-F]+' # Simplified, only checks partial pattern.
local ipv4='([0-9]{1,3}\.){3}[0-9]+' # Simplified, allows invalid ranges.
# Assume two non-consecutive periods represents a hostname. Matches `x.y.z`, but not `x.y`.
local hostname='([.][^. ]+){2}'
local w
w="$(who -m 2>/dev/null)" || w=${(@M)${(f)"$(who 2>/dev/null)"}:#*[[:space:]]${TTY#/dev/}[[:space:]]*}
# Usually the remote address is surrounded by parenthesis but not on all systems (e.g., Busybox).
[[ $w =~ "\(?($ipv4|$ipv6|$hostname)\)?\$" ]] && P9K_SSH=1
}
_p9k_must_init() {
(( _POWERLEVEL9K_DISABLE_HOT_RELOAD && !_p9k__force_must_init )) && return 1
_p9k__force_must_init=0
local IFS sig
if [[ -n $_p9k__param_sig ]]; then
IFS=$'\2' sig="${(e)_p9k__param_pat}"
[[ $sig == $_p9k__param_sig ]] && return 1
_p9k_deinit
fi
_p9k__param_pat=$'v108\1'${(q)ZSH_VERSION}$'\1'${(q)ZSH_PATCHLEVEL}$'\1'
_p9k__param_pat+=$'${#parameters[(I)POWERLEVEL9K_*]}\1${(%):-%n%#}\1$GITSTATUS_LOG_LEVEL\1'
_p9k__param_pat+=$'$GITSTATUS_ENABLE_LOGGING\1$GITSTATUS_DAEMON\1$GITSTATUS_NUM_THREADS\1'
_p9k__param_pat+=$'$GITSTATUS_CACHE_DIR\1$GITSTATUS_AUTO_INSTALL\1${ZLE_RPROMPT_INDENT:-1}\1'
_p9k__param_pat+=$'$__p9k_sh_glob\1$__p9k_ksh_arrays\1$ITERM_SHELL_INTEGRATION_INSTALLED\1'
_p9k__param_pat+=$'${PROMPT_EOL_MARK-%B%S%#%s%b}\1$commands[locale]\1$langinfo[CODESET]\1'
_p9k__param_pat+=$'$VTE_VERSION\1$TERM_PROGRAM\1$DEFAULT_USER\1$P9K_SSH\1$commands[uname]\1'
_p9k__param_pat+=$'$__p9k_root_dir\1$functions[p10k-on-init]\1$functions[p10k-on-pre-prompt]\1'
_p9k__param_pat+=$'$functions[p10k-on-post-widget]\1$functions[p10k-on-post-prompt]\1'
_p9k__param_pat+=$'$+commands[git]\1$terminfo[colors]'
local MATCH
IFS=$'\1' _p9k__param_pat+="${(@)${(@o)parameters[(I)POWERLEVEL9K_*]}:/(#m)*/\${${(q)MATCH}-$IFS\}}"
IFS=$'\2' _p9k__param_sig="${(e)_p9k__param_pat}"
}
function _p9k_set_os() {
_p9k_os=$1
_p9k_get_icon prompt_os_icon $2
_p9k_os_icon=$_p9k__ret
}
function _p9k_init_cacheable() {
_p9k_init_icons
_p9k_init_params
_p9k_init_prompt
_p9k_init_display
# https://gist.github.com/egmontkob/eb114294efbcd5adb1944c9f3cb5feda#backward-compatibility
if [[ $VTE_VERSION != (<1-4602>|4801) ]]; then
_p9k_term_has_href=1
fi
local elem func
local -i i=0
for i in {1..$#_p9k_line_segments_left}; do
for elem in ${${${(@0)_p9k_line_segments_left[i]}%_joined}//-/_}; do
local var=POWERLEVEL9K_${${(U)elem}//ฤฐ/I}_SHOW_ON_COMMAND
(( $+parameters[$var] )) || continue
_p9k_show_on_command+=(
$'(|*[/\0])('${(j.|.)${(P)var}}')'
$((1+_p9k_display_k[$i/left/$elem]))
_p9k__${i}l$elem)
done
for elem in ${${${(@0)_p9k_line_segments_right[i]}%_joined}//-/_}; do
local var=POWERLEVEL9K_${${(U)elem}//ฤฐ/I}_SHOW_ON_COMMAND
(( $+parameters[$var] )) || continue
local cmds=(${(P)var})
_p9k_show_on_command+=(
$'(|*[/\0])('${(j.|.)${(P)var}}')'
$((1+$_p9k_display_k[$i/right/$elem]))
_p9k__${i}r$elem)
done
done
if [[ $_POWERLEVEL9K_TRANSIENT_PROMPT != off ]]; then
local sep=$'\1'
_p9k_transient_prompt='%b%k%s%u%(?'$sep
_p9k_color prompt_prompt_char_OK_VIINS FOREGROUND 76
_p9k_foreground $_p9k__ret
_p9k_transient_prompt+=$_p9k__ret
_p9k_transient_prompt+='${${P9K_CONTENT::="โฏ"}+}'
_p9k_param prompt_prompt_char_OK_VIINS CONTENT_EXPANSION '${P9K_CONTENT}'
_p9k_transient_prompt+='${:-"'$_p9k__ret'"}'
_p9k_transient_prompt+=$sep
_p9k_color prompt_prompt_char_ERROR_VIINS FOREGROUND 196
_p9k_foreground $_p9k__ret
_p9k_transient_prompt+=$_p9k__ret
_p9k_transient_prompt+='${${P9K_CONTENT::="โฏ"}+}'
_p9k_param prompt_prompt_char_ERROR_VIINS CONTENT_EXPANSION '${P9K_CONTENT}'
_p9k_transient_prompt+='${:-"'$_p9k__ret'"}'
_p9k_transient_prompt+=')%b%k%f%s%u '
if [[ $ITERM_SHELL_INTEGRATION_INSTALLED == Yes ]]; then
_p9k_transient_prompt=$'%{\e]133;A\a%}'$_p9k_transient_prompt$'%{\e]133;B\a%}'
fi
fi
_p9k_uname="$(uname)"
[[ $_p9k_uname == Linux ]] && _p9k_uname_o="$(uname -o 2>/dev/null)"
_p9k_uname_m="$(uname -m)"
if [[ $_p9k_uname == Linux && $_p9k_uname_o == Android ]]; then
_p9k_set_os Android ANDROID_ICON
else
case $_p9k_uname in
SunOS) _p9k_set_os Solaris SUNOS_ICON;;
Darwin) _p9k_set_os OSX APPLE_ICON;;
CYGWIN_NT-* | MSYS_NT-*) _p9k_set_os Windows WINDOWS_ICON;;
FreeBSD|OpenBSD|DragonFly) _p9k_set_os BSD FREEBSD_ICON;;
Linux)
_p9k_os='Linux'
local os_release_id
if [[ -r /etc/os-release ]]; then
local lines=(${(f)"$(</etc/os-release)"})
lines=(${(@M)lines:#ID=*})
(( $#lines == 1 )) && os_release_id=${lines[1]#ID=}
elif [[ -e /etc/artix-release ]]; then
os_release_id=artix
fi
case $os_release_id in
*arch*) _p9k_set_os Linux LINUX_ARCH_ICON;;
*debian*) _p9k_set_os Linux LINUX_DEBIAN_ICON;;
*raspbian*) _p9k_set_os Linux LINUX_RASPBIAN_ICON;;
*ubuntu*) _p9k_set_os Linux LINUX_UBUNTU_ICON;;
*elementary*) _p9k_set_os Linux LINUX_ELEMENTARY_ICON;;
*fedora*) _p9k_set_os Linux LINUX_FEDORA_ICON;;
*coreos*) _p9k_set_os Linux LINUX_COREOS_ICON;;
*gentoo*) _p9k_set_os Linux LINUX_GENTOO_ICON;;
*mageia*) _p9k_set_os Linux LINUX_MAGEIA_ICON;;
*centos*) _p9k_set_os Linux LINUX_CENTOS_ICON;;
*opensuse*|*tumbleweed*) _p9k_set_os Linux LINUX_OPENSUSE_ICON;;
*sabayon*) _p9k_set_os Linux LINUX_SABAYON_ICON;;
*slackware*) _p9k_set_os Linux LINUX_SLACKWARE_ICON;;
*linuxmint*) _p9k_set_os Linux LINUX_MINT_ICON;;
*alpine*) _p9k_set_os Linux LINUX_ALPINE_ICON;;
*aosc*) _p9k_set_os Linux LINUX_AOSC_ICON;;
*nixos*) _p9k_set_os Linux LINUX_NIXOS_ICON;;
*devuan*) _p9k_set_os Linux LINUX_DEVUAN_ICON;;
*manjaro*) _p9k_set_os Linux LINUX_MANJARO_ICON;;
*void*) _p9k_set_os Linux LINUX_VOID_ICON;;
*artix*) _p9k_set_os Linux LINUX_ARTIX_ICON;;
*) _p9k_set_os Linux LINUX_ICON;;
esac
;;
esac
fi
if [[ $_POWERLEVEL9K_COLOR_SCHEME == light ]]; then
_p9k_color1=7
_p9k_color2=0
else
_p9k_color1=0
_p9k_color2=7
fi
# Someone might be using these.
typeset -g OS=$_p9k_os
typeset -g DEFAULT_COLOR=$_p9k_color1
typeset -g DEFAULT_COLOR_INVERTED=$_p9k_color2
_p9k_battery_states=(
'LOW' 'red'
'CHARGING' 'yellow'
'CHARGED' 'green'
'DISCONNECTED' "$_p9k_color2"
)
# This simpler construct doesn't work on zsh-5.1 with multi-line prompt:
#
# ${(@0)_p9k_line_segments_left[@]}
local -a left_segments=(${(@0)${(pj:\0:)_p9k_line_segments_left}})
_p9k_left_join=(1)
for ((i = 2; i <= $#left_segments; ++i)); do
elem=$left_segments[i]
if [[ $elem == *_joined ]]; then
_p9k_left_join+=$_p9k_left_join[((i-1))]
else
_p9k_left_join+=$i
fi
done
local -a right_segments=(${(@0)${(pj:\0:)_p9k_line_segments_right}})
_p9k_right_join=(1)
for ((i = 2; i <= $#right_segments; ++i)); do
elem=$right_segments[i]
if [[ $elem == *_joined ]]; then
_p9k_right_join+=$_p9k_right_join[((i-1))]
else
_p9k_right_join+=$i
fi
done
case $_p9k_os in
OSX) (( $+commands[sysctl] )) && _p9k_num_cpus="$(sysctl -n hw.logicalcpu 2>/dev/null)";;
BSD) (( $+commands[sysctl] )) && _p9k_num_cpus="$(sysctl -n hw.ncpu 2>/dev/null)";;
*) (( $+commands[nproc] )) && _p9k_num_cpus="$(nproc 2>/dev/null)";;
esac
(( _p9k_num_cpus )) || _p9k_num_cpus=1
if _p9k_segment_in_use dir; then
if (( $+_POWERLEVEL9K_DIR_CLASSES )); then
local -i i=3
for ((; i <= $#_POWERLEVEL9K_DIR_CLASSES; i+=3)); do
_POWERLEVEL9K_DIR_CLASSES[i]=${(g::)_POWERLEVEL9K_DIR_CLASSES[i]}
done
else
typeset -ga _POWERLEVEL9K_DIR_CLASSES=()
_p9k_get_icon prompt_dir_ETC ETC_ICON
_POWERLEVEL9K_DIR_CLASSES+=('/etc|/etc/*' ETC "$_p9k__ret")
_p9k_get_icon prompt_dir_HOME HOME_ICON
_POWERLEVEL9K_DIR_CLASSES+=('~' HOME "$_p9k__ret")
_p9k_get_icon prompt_dir_HOME_SUBFOLDER HOME_SUB_ICON
_POWERLEVEL9K_DIR_CLASSES+=('~/*' HOME_SUBFOLDER "$_p9k__ret")
_p9k_get_icon prompt_dir_DEFAULT FOLDER_ICON
_POWERLEVEL9K_DIR_CLASSES+=('*' DEFAULT "$_p9k__ret")
fi
fi
if _p9k_segment_in_use status; then
typeset -g _p9k_exitcode2str=({0..255})
local -i i=2
if (( !_POWERLEVEL9K_STATUS_HIDE_SIGNAME )); then
for ((; i <= $#signals; ++i)); do
local sig=$signals[i]
(( _POWERLEVEL9K_STATUS_VERBOSE_SIGNAME )) && sig="SIG${sig}($((i-1)))"
_p9k_exitcode2str[$((128+i))]=$sig
done
fi
fi
if [[ $#_POWERLEVEL9K_VCS_BACKENDS == 1 && $_POWERLEVEL9K_VCS_BACKENDS[1] == git ]]; then
local elem line
local -i i=0 line_idx=0
for line in $_p9k_line_segments_left; do
(( ++line_idx ))
for elem in ${${(0)line}%_joined}; do
(( ++i ))
if [[ $elem == vcs ]]; then
if (( _p9k_vcs_index )); then
_p9k_vcs_index=-1
else
_p9k_vcs_index=i
_p9k_vcs_line_index=line_idx
_p9k_vcs_side=left
fi
fi
done
done
i=0
line_idx=0
for line in $_p9k_line_segments_right; do
(( ++line_idx ))
for elem in ${${(0)line}%_joined}; do
(( ++i ))
if [[ $elem == vcs ]]; then
if (( _p9k_vcs_index )); then
_p9k_vcs_index=-1
else
_p9k_vcs_index=i
_p9k_vcs_line_index=line_idx
_p9k_vcs_side=right
fi
fi
done
done
if (( _p9k_vcs_index > 0 )); then
local state
for state in ${(k)__p9k_vcs_states}; do
_p9k_param prompt_vcs_$state CONTENT_EXPANSION x
if [[ -z $_p9k__ret ]]; then
_p9k_vcs_index=-1
break
fi
done
fi
if (( _p9k_vcs_index == -1 )); then
_p9k_vcs_index=0
_p9k_vcs_line_index=0
_p9k_vcs_side=
fi
fi
}
_p9k_init_vcs() {
if ! _p9k_segment_in_use vcs || (( ! $#_POWERLEVEL9K_VCS_BACKENDS )); then
(( $+functions[gitstatus_stop_p9k_] )) && gitstatus_stop_p9k_ POWERLEVEL9K
unset _p9k_preinit
return
fi
_p9k_vcs_info_init
if (( $+functions[_p9k_preinit] )); then
if (( $+GITSTATUS_DAEMON_PID_POWERLEVEL9K )); then
() {
trap 'return 130' INT
{
gitstatus_start_p9k_ POWERLEVEL9K
} always {
trap ':' INT
}
}
fi
(( $+GITSTATUS_DAEMON_PID_POWERLEVEL9K )) || _p9k__instant_prompt_disabled=1
return 0
fi
(( _POWERLEVEL9K_DISABLE_GITSTATUS )) && return
(( $_POWERLEVEL9K_VCS_BACKENDS[(I)git] )) || return
local gitstatus_dir=${_POWERLEVEL9K_GITSTATUS_DIR:-${__p9k_root_dir}/gitstatus}
typeset -g _p9k_preinit="function _p9k_preinit() {
(( $+commands[git] )) || { unfunction _p9k_preinit; return 1 }
[[ \$ZSH_VERSION == ${(q)ZSH_VERSION} ]] || return
[[ -r ${(q)gitstatus_dir}/gitstatus.plugin.zsh ]] || return
builtin source ${(q)gitstatus_dir}/gitstatus.plugin.zsh _p9k_ || return
GITSTATUS_AUTO_INSTALL=${(q)GITSTATUS_AUTO_INSTALL} \
GITSTATUS_DAEMON=${(q)GITSTATUS_DAEMON} \
GITSTATUS_CACHE_DIR=${(q)GITSTATUS_CACHE_DIR} \
GITSTATUS_NUM_THREADS=${(q)GITSTATUS_NUM_THREADS} \
GITSTATUS_LOG_LEVEL=${(q)GITSTATUS_LOG_LEVEL} \
GITSTATUS_ENABLE_LOGGING=${(q)GITSTATUS_ENABLE_LOGGING} \
gitstatus_start_p9k_ \
-s $_POWERLEVEL9K_VCS_STAGED_MAX_NUM \
-u $_POWERLEVEL9K_VCS_UNSTAGED_MAX_NUM \
-d $_POWERLEVEL9K_VCS_UNTRACKED_MAX_NUM \
-c $_POWERLEVEL9K_VCS_CONFLICTED_MAX_NUM \
-m $_POWERLEVEL9K_VCS_MAX_INDEX_SIZE_DIRTY \
${${_POWERLEVEL9K_VCS_RECURSE_UNTRACKED_DIRS:#0}:+-e} \
-a POWERLEVEL9K
}"
builtin source $gitstatus_dir/gitstatus.plugin.zsh _p9k_ || return
() {
trap 'return 130' INT
{
gitstatus_start_p9k_ \
-s $_POWERLEVEL9K_VCS_STAGED_MAX_NUM \
-u $_POWERLEVEL9K_VCS_UNSTAGED_MAX_NUM \
-d $_POWERLEVEL9K_VCS_UNTRACKED_MAX_NUM \
-c $_POWERLEVEL9K_VCS_CONFLICTED_MAX_NUM \
-m $_POWERLEVEL9K_VCS_MAX_INDEX_SIZE_DIRTY \
${${_POWERLEVEL9K_VCS_RECURSE_UNTRACKED_DIRS:#0}:+-e} \
POWERLEVEL9K
} always {
trap ':' INT
}
}
(( $+GITSTATUS_DAEMON_PID_POWERLEVEL9K )) || _p9k__instant_prompt_disabled=1
}
_p9k_init() {
_p9k_init_vars
_p9k_restore_state || _p9k_init_cacheable
typeset -g P9K_OS_ICON=$_p9k_os_icon
local -a _p9k__async_segments_compute
local -i i
local elem
_p9k__prompt_side=left
_p9k__segment_index=1
for i in {1..$#_p9k_line_segments_left}; do
for elem in ${${(@0)_p9k_line_segments_left[i]}%_joined}; do
local f_init=_p9k_prompt_${elem}_init
(( $+functions[$f_init] )) && $f_init
(( ++_p9k__segment_index ))
done
done
_p9k__prompt_side=right
_p9k__segment_index=1
for i in {1..$#_p9k_line_segments_right}; do
for elem in ${${(@0)_p9k_line_segments_right[i]}%_joined}; do
local f_init=_p9k_prompt_${elem}_init
(( $+functions[$f_init] )) && $f_init
(( ++_p9k__segment_index ))
done
done
if [[ -n $_POWERLEVEL9K_PUBLIC_IP_VPN_INTERFACE ||
-n $_POWERLEVEL9K_IP_INTERFACE ||
-n $_POWERLEVEL9K_VPN_IP_INTERFACE ]]; then
_p9k_prompt_net_iface_init
fi
if [[ -n $_p9k__async_segments_compute ]]; then
functions[_p9k_async_segments_compute]=${(pj:\n:)_p9k__async_segments_compute}
_p9k_worker_start
fi
local k v
for k v in ${(kv)_p9k_display_k}; do
[[ $k == -* ]] && continue
_p9k__display_v[v]=$k
_p9k__display_v[v+1]=show
done
_p9k__display_v[2]=hide
_p9k__display_v[4]=hide
if (( $+functions[iterm2_decorate_prompt] )); then
_p9k__iterm2_decorate_prompt=$functions[iterm2_decorate_prompt]
function iterm2_decorate_prompt() {
typeset -g ITERM2_PRECMD_PS1=$PROMPT
typeset -g ITERM2_SHOULD_DECORATE_PROMPT=
}
fi
if (( $+functions[iterm2_precmd] )); then
_p9k__iterm2_precmd=$functions[iterm2_precmd]
functions[iterm2_precmd]='local _p9k_status=$?; zle && return; () { return $_p9k_status; }; '$_p9k__iterm2_precmd
fi
if _p9k_segment_in_use todo; then
if [[ -n ${_p9k__todo_command::=${commands[todo.sh]}} ]]; then
local todo_global=/etc/todo/config
elif [[ -n ${_p9k__todo_command::=${commands[todo-txt]}} ]]; then
local todo_global=/etc/todo-txt/config
fi
if [[ -n $_p9k__todo_command ]]; then
_p9k__todo_file="$(exec -a $_p9k__todo_command ${commands[bash]:-:} 3>&1 &>/dev/null -c "
[ -e \"\$TODOTXT_CFG_FILE\" ] || TODOTXT_CFG_FILE=\$HOME/.todo/config
[ -e \"\$TODOTXT_CFG_FILE\" ] || TODOTXT_CFG_FILE=\$HOME/todo.cfg
[ -e \"\$TODOTXT_CFG_FILE\" ] || TODOTXT_CFG_FILE=\$HOME/.todo.cfg
[ -e \"\$TODOTXT_CFG_FILE\" ] || TODOTXT_CFG_FILE=\${XDG_CONFIG_HOME:-\$HOME/.config}/todo/config
[ -e \"\$TODOTXT_CFG_FILE\" ] || TODOTXT_CFG_FILE=${(qqq)_p9k__todo_command:h}/todo.cfg
[ -e \"\$TODOTXT_CFG_FILE\" ] || TODOTXT_CFG_FILE=\${TODOTXT_GLOBAL_CFG_FILE:-${(qqq)todo_global}}
[ -r \"\$TODOTXT_CFG_FILE\" ] || exit
source \"\$TODOTXT_CFG_FILE\"
printf "%s" \"\$TODO_FILE\" >&3")"
fi
fi
if _p9k_segment_in_use dir &&
[[ $_POWERLEVEL9K_SHORTEN_STRATEGY == truncate_with_package_name && $+commands[jq] == 0 ]]; then
print -rP -- '%F{yellow}WARNING!%f %BPOWERLEVEL9K_SHORTEN_STRATEGY=truncate_with_package_name%b requires %F{green}jq%f.'
print -rP -- 'Either install %F{green}jq%f or change the value of %BPOWERLEVEL9K_SHORTEN_STRATEGY%b.'
fi
_p9k_init_vcs
if (( _p9k__instant_prompt_disabled )); then
(( _POWERLEVEL9K_DISABLE_INSTANT_PROMPT )) && unset __p9k_instant_prompt_erased
_p9k_delete_instant_prompt
_p9k_dumped_instant_prompt_sigs=()
fi
if (( $+__p9k_instant_prompt_erased )); then
unset __p9k_instant_prompt_erased
{
>&2 echo -E - ""
>&2 echo -E - "${(%):-[%1FERROR%f]: When using instant prompt, Powerlevel10k must be loaded before the first prompt.}"
>&2 echo -E - ""
>&2 echo -E - "${(%):-You can:}"
>&2 echo -E - ""
>&2 echo -E - "${(%):- - %BRecommended%b: Change the way Powerlevel10k is loaded from %B$__p9k_zshrc_u%b.}"
if (( _p9k_term_has_href )); then
>&2 echo - "${(%):- See \e]8;;https://github.com/romkatv/powerlevel10k/blob/master/README.md#installation\ahttps://github.com/romkatv/powerlevel10k/blob/master/README.md#installation\e]8;;\a.}"
else
>&2 echo - "${(%):- See https://github.com/romkatv/powerlevel10k/blob/master/README.md#installation.}"
fi
if (( $+zsh_defer_options )); then
>&2 echo -E - ""
>&2 echo -E - "${(%):- NOTE: Do not use %1Fzsh-defer%f to load %Upowerlevel10k.zsh-theme%u.}"
elif (( $+functins[zinit] )); then
>&2 echo -E - ""
>&2 echo -E - "${(%):- NOTE: If using %2Fzinit%f to load %3F'romkatv/powerlevel10k'%f, %Bdo not apply%b %1Fice wait%f.}"
elif (( $+functins[zplugin] )); then
>&2 echo -E - ""
>&2 echo -E - "${(%):- NOTE: If using %2Fzplugin%f to load %3F'romkatv/powerlevel10k'%f, %Bdo not apply%b %1Fice wait%f.}"
fi
>&2 echo -E - ""
>&2 echo -E - "${(%):- * You %Bwill not%b see this error message again.}"
>&2 echo -E - "${(%):- * Zsh will start %Bquickly%b.}"
>&2 echo -E - ""
>&2 echo -E - "${(%):- - Disable instant prompt either by running %Bp10k configure%b or by manually}"
>&2 echo -E - "${(%):- defining the following parameter:}"
>&2 echo -E - ""
>&2 echo -E - "${(%):- %3Ftypeset%f -g POWERLEVEL9K_INSTANT_PROMPT=off}"
>&2 echo -E - ""
>&2 echo -E - "${(%):- * You %Bwill not%b see this error message again.}"
>&2 echo -E - "${(%):- * Zsh will start %Bslowly%b.}"
>&2 echo -E - ""
>&2 echo -E - "${(%):- - Do nothing.}"
>&2 echo -E - ""
>&2 echo -E - "${(%):- * You %Bwill%b see this error message every time you start zsh.}"
>&2 echo -E - "${(%):- * Zsh will start %Bslowly%b.}"
>&2 echo -E - ""
} 2>>$TTY
fi
}
_p9k_deinit() {
(( $+functions[_p9k_preinit] )) && unfunction _p9k_preinit
(( $+functions[gitstatus_stop_p9k_] )) && gitstatus_stop_p9k_ POWERLEVEL9K
_p9k_worker_stop
if (( _p9k__state_dump_fd )); then
zle -F $_p9k__state_dump_fd
exec {_p9k__state_dump_fd}>&-
fi
if (( _p9k__restore_prompt_fd )); then
zle -F $_p9k__restore_prompt_fd
exec {_p9k__restore_prompt_fd}>&-
fi
if (( _p9k__redraw_fd )); then
zle -F $_p9k__redraw_fd
exec {_p9k__redraw_fd}>&-
fi
(( $+_p9k__iterm2_precmd )) && functions[iterm2_precmd]=$_p9k__iterm2_precmd
(( $+_p9k__iterm2_decorate_prompt )) && functions[iterm2_decorate_prompt]=$_p9k__iterm2_decorate_prompt
unset -m '(_POWERLEVEL9K_|P9K_|_p9k_)*~(P9K_SSH|P9K_TTY)'
[[ -n $__p9k_locale ]] || unset __p9k_locale
}
typeset -gi __p9k_enabled=0
typeset -gi __p9k_configured=0
typeset -gri __p9k_instant_prompt_disabled=1
# `typeset -g` doesn't roundtrip in zsh prior to 5.4.
if is-at-least 5.4; then
typeset -gri __p9k_dumps_enabled=1
else
typeset -gri __p9k_dumps_enabled=0
fi
_p9k_do_nothing() { true; }
_p9k_setup() {
(( __p9k_enabled )) && return
prompt_opts=(percent subst)
if (( ! $+__p9k_instant_prompt_active )); then
prompt_opts+=sp
prompt_opts+=cr
fi
prompt_powerlevel9k_teardown
__p9k_enabled=1
typeset -ga preexec_functions=(_p9k_preexec1 $preexec_functions _p9k_preexec2)
typeset -ga precmd_functions=(_p9k_do_nothing $precmd_functions _p9k_precmd)
}
prompt_powerlevel9k_setup() {
_p9k_restore_special_params
eval "$__p9k_intro"
_p9k_setup
}
prompt_powerlevel9k_teardown() {
_p9k_restore_special_params
eval "$__p9k_intro"
add-zsh-hook -D precmd '(_p9k_|powerlevel9k_)*'
add-zsh-hook -D preexec '(_p9k_|powerlevel9k_)*'
PROMPT='%m%# '
RPROMPT=
if (( __p9k_enabled )); then
_p9k_deinit
__p9k_enabled=0
fi
}
typeset -gr __p9k_p10k_usage="Usage: %2Fp10k%f %Bcommand%b [options]
Commands:
%Bconfigure%b run interactive configuration wizard
%Breload%b reload configuration
%Bsegment%b print a user-defined prompt segment
%Bdisplay%b show, hide or toggle prompt parts
%Bhelp%b print this help message
Print help for a specific command:
%2Fp10k%f %Bhelp%b command"
typeset -gr __p9k_p10k_segment_usage="Usage: %2Fp10k%f %Bsegment%b [-h] [{+|-}re] [-s state] [-b bg] [-f fg] [-i icon] [-c cond] [-t text]
Print a user-defined prompt segment. Can be called only during prompt rendering.
Options:
-t text segment's main content; will undergo prompt expansion: '%%F{blue}%%*%%f' will
show as %F{blue}%*%f; default is empty
-i icon segment's icon; default is empty
-r icon is a symbolic reference that needs to be resolved; for example, 'LOCK_ICON'
+r icon is already resolved and should be printed literally; for example, 'โญ';
this is the default; you can also use \$'\u2B50' if you don't want to have
non-ascii characters in source code
-b bg background color; for example, 'blue', '4', or '#0000ff'; empty value means
transparent background, as in '%%k'; default is black
-f fg foreground color; for example, 'blue', '4', or '#0000ff'; empty value means
default foreground color, as in '%%f'; default is empty
-s state segment's state for the purpose of applying styling options; if you want to
to be able to use POWERLEVEL9K parameters to specify different colors or icons
depending on some property, use different states for different values of that
property
-c condition; if empty after parameter expansion and process substitution, the
segment is hidden; this is an advanced feature, use with caution; default is '1'
-e segment's main content will undergo parameter expansion and process
substitution; the content will be surrounded with double quotes and thus
should quote its own double quotes; this is an advanced feature, use with
caution
+e segment's main content should not undergo parameter expansion and process
substitution; this is the default
-h print this help message
Example: 'core' segment tells you if there is a file name 'core' in the current directory.
- Segment's icon is 'โญ'.
- Segment's text is the file's size in bytes.
- If you have permissions to delete the file, state is DELETABLE. If not, it's PROTECTED.
zmodload -F zsh/stat b:zstat
function prompt_core() {
local size=()
if ! zstat -A size +size core 2>/dev/null; then
# No 'core' file in the current directory.
return
fi
if [[ -w . ]]; then
local state=DELETABLE
else
local state=PROTECTED
fi
p10k segment -s \$state -i 'โญ' -f blue -t \${size[1]}b
}
To enable this segment, add 'core' to POWERLEVEL9K_LEFT_PROMPT_ELEMENTS or
POWERLEVEL9K_RIGHT_PROMPT_ELEMENTS.
Example customizations:
# Override default foreground.
POWERLEVEL9K_CORE_FOREGROUND=red
# Override foreground when DELETABLE.
POWERLEVEL9K_CORE_DELETABLE_BACKGROUND=green
# Override icon when PROTECTED.
POWERLEVEL9K_CORE_PROTECTED_VISUAL_IDENTIFIER_EXPANSION='โ'
# Don't show file size when PROTECTED.
POWERLEVEL9K_CORE_PROTECTED_CONTENT_EXPANSION=''"
typeset -gr __p9k_p10k_configure_usage="Usage: %2Fp10k%f %Bconfigure%b
Run interactive configuration wizard."
typeset -gr __p9k_p10k_reload_usage="Usage: %2Fp10k%f %Breload%b
Reload configuration."
typeset -gr __p9k_p10k_finalize_usage="Usage: %2Fp10k%f %Bfinalize%b
Perform the final stage of initialization. Must be called at the very end of zshrc."
typeset -gr __p9k_p10k_display_usage="Usage: %2Fp10k%f %Bdisplay%b part-pattern=state-list...
Show, hide or toggle prompt parts. If called from zle, the current
prompt is refreshed.
Usage: %2Fp10k%f %Bdisplay%b -a [part-pattern]...
Populate array \`reply\` with states of prompt parts matching the patterns.
If no patterns are supplied, assume \`*\`.
Usage: %2Fp10k%f %Bdisplay%b -r
Redisplay prompt.
Parts:
empty_line empty line (duh)
ruler ruler; if POWERLEVEL9K_RULER_CHAR=' ', it's essentially another
new_line
N prompt line number N, 1-based; counting from the top if positive,
from the bottom if negative
N/left_frame left frame on the Nth line
N/left left prompt on the Nth line
N/gap gap between left and right prompts on the Nth line
N/right right prompt on the Nth line
N/right_frame right frame on the Nth line
N/left/S segment S within N/left (dir, time, etc.)
N/right/S segment S within N/right (dir, time, etc.)
Part States:
show the part is displayed
hide the part is not displayed
print the part is printed in precmd; only applicable to empty_line and
ruler; unlike show, the effects of print cannot be undone with hide;
print used to look better after \`clear\` but this is no longer the
case; it's best to avoid it unless you know what you are doing
part-pattern is a glob pattern for parts. Examples:
*/kubecontext all kubecontext prompt segments, regardless of where
they are
1/(right|right_frame) all prompt segments and frame from the right side of
the first line
state-list is a comma-separated list of states. Must have at least one element.
If more than one, states will rotate.
Example: Bind Ctrl+P to toggle right prompt.
function toggle-right-prompt() { p10k display '*/right'=hide,show; }
zle -N toggle-right-prompt
bindkey '^P' toggle-right-prompt
Example: Print the current state of all prompt parts:
typeset -A reply
p10k display -a '*'
printf '%%-32s = %%q\n' \${(@kv)reply} | sort
"
# 0 -- reset-prompt not blocked
# 1 -- reset-prompt blocked and not needed
# 2 -- reset-prompt blocked and needed
typeset -gi __p9k_reset_state
function p10k() {
[[ $# != 1 || $1 != finalize ]] || { p10k-instant-prompt-finalize; return 0 }
eval "$__p9k_intro_no_reply"
if (( !ARGC )); then
print -rP -- $__p9k_p10k_usage >&2
return 1
fi
case $1 in
segment)
local REPLY
local -a reply
shift
local -i OPTIND
local OPTARG opt state bg=0 fg icon cond text ref=0 expand=0
while getopts ':s:b:f:i:c:t:reh' opt; do
case $opt in
s) state=$OPTARG;;
b) bg=$OPTARG;;
f) fg=$OPTARG;;
i) icon=$OPTARG;;
c) cond=${OPTARG:-'${:-}'};;
t) text=$OPTARG;;
r) ref=1;;
e) expand=1;;
+r) ref=0;;
+e) expand=0;;
h) print -rP -- $__p9k_p10k_segment_usage; return 0;;
?) print -rP -- $__p9k_p10k_segment_usage >&2; return 1;;
esac
done
if (( OPTIND <= ARGC )); then
print -rP -- $__p9k_p10k_segment_usage >&2
return 1
fi
if [[ -z $_p9k__prompt_side ]]; then
print -rP -- "%1F[ERROR]%f %Bp10k segment%b: can be called only during prompt rendering." >&2
if (( !ARGC )); then
print -rP -- ""
print -rP -- "For help, type:" >&2
print -rP -- ""
print -rP -- " %2Fp10k%f %Bhelp%b %Bsegment%b" >&2
fi
return 1
fi
(( ref )) || icon=$'\1'$icon
typeset -i _p9k__has_upglob
"_p9k_${_p9k__prompt_side}_prompt_segment" "prompt_${_p9k__segment_name}${state:+_${${(U)state}//ฤฐ/I}}" \
"$bg" "${fg:-$_p9k_color1}" "$icon" "$expand" "$cond" "$text"
return 0
;;
display)
if (( ARGC == 1 )); then
print -rP -- $__p9k_p10k_display_usage >&2
return 1
fi
shift
local -i k dump
local opt prev new pair list name var
while getopts ':har' opt; do
case $opt in
r)
if (( __p9k_reset_state > 0 )); then
__p9k_reset_state=2
else
__p9k_reset_state=-1
fi
;;
a) dump=1;;
h) print -rP -- $__p9k_p10k_display_usage; return 0;;
?) print -rP -- $__p9k_p10k_display_usage >&2; return 1;;
esac
done
if (( dump )); then
reply=()
shift $((OPTIND-1))
(( ARGC )) || set -- '*'
for opt; do
for k in ${(u@)_p9k_display_k[(I)$opt]:/(#m)*/$_p9k_display_k[$MATCH]}; do
reply+=($_p9k__display_v[k,k+1])
done
done
if (( __p9k_reset_state == -1 )); then
_p9k_reset_prompt
fi
return 0
fi
local REPLY
local -a reply
for opt in "${@:$OPTIND}"; do
pair=(${(s:=:)opt})
list=(${(s:,:)${pair[2]}})
if [[ ${(b)pair[1]} == $pair[1] ]]; then # this branch is purely for optimization
local ks=($_p9k_display_k[$pair[1]])
else
local ks=(${(u@)_p9k_display_k[(I)$pair[1]]:/(#m)*/$_p9k_display_k[$MATCH]})
fi
for k in $ks; do
if (( $#list == 1 )); then # this branch is purely for optimization
[[ $_p9k__display_v[k+1] == $list[1] ]] && continue
new=$list[1]
else
new=${list[list[(I)$_p9k__display_v[k+1]]+1]:-$list[1]}
[[ $_p9k__display_v[k+1] == $new ]] && continue
fi
_p9k__display_v[k+1]=$new
name=$_p9k__display_v[k]
if [[ $name == (empty_line|ruler) ]]; then
var=_p9k__${name}_i
[[ $new == show ]] && unset $var || typeset -gi $var=3
elif [[ $name == (#b)(<->)(*) ]]; then
var=_p9k__${match[1]}${${${${match[2]//\/}/#left/l}/#right/r}/#gap/g}
[[ $new == hide ]] && typeset -g $var= || unset $var
fi
if (( __p9k_reset_state > 0 )); then
__p9k_reset_state=2
else
__p9k_reset_state=-1
fi
done
done
if (( __p9k_reset_state == -1 )); then
_p9k_reset_prompt
fi
;;
configure)
if (( ARGC > 1 )); then
print -rP -- $__p9k_p10k_configure_usage >&2
return 1
fi
local REPLY
local -a reply
p9k_configure "$@" || return
;;
reload)
if (( ARGC > 1 )); then
print -rP -- $__p9k_p10k_reload_usage >&2
return 1
fi
(( $+_p9k__force_must_init )) || return 0
_p9k__force_must_init=1
;;
help)
local var=__p9k_p10k_$2_usage
if (( $+parameters[$var] )); then
print -rP -- ${(P)var}
return 0
elif (( ARGC == 1 )); then
print -rP -- $__p9k_p10k_usage
return 0
else
print -rP -- $__p9k_p10k_usage >&2
return 1
fi
;;
finalize)
print -rP -- $__p9k_p10k_finalize_usage >&2
return 1
;;
clear-instant-prompt)
if (( $+__p9k_instant_prompt_active )); then
_p9k_clear_instant_prompt
unset __p9k_instant_prompt_active
fi
return 0
;;
*)
print -rP -- $__p9k_p10k_usage >&2
return 1
;;
esac
}
# Hook for zplugin.
powerlevel10k_plugin_unload() { prompt_powerlevel9k_teardown; }
function p10k-instant-prompt-finalize() {
unsetopt local_options
(( ${+__p9k_instant_prompt_active} )) && unsetopt prompt_cr prompt_sp || setopt prompt_cr prompt_sp
}
autoload -Uz add-zsh-hook
zmodload zsh/datetime
zmodload zsh/mathfunc
zmodload zsh/parameter 2>/dev/null # https://github.com/romkatv/gitstatus/issues/58#issuecomment-553407177
zmodload zsh/system
zmodload zsh/termcap
zmodload zsh/terminfo
zmodload zsh/zleparameter
zmodload -F zsh/stat b:zstat
zmodload -F zsh/net/socket b:zsocket
zmodload -F zsh/files b:zf_mv b:zf_rm
if [[ $__p9k_dump_file != $__p9k_instant_prompt_dump_file && -n $__p9k_instant_prompt_dump_file ]]; then
_p9k_delete_instant_prompt
zf_rm -f -- $__p9k_dump_file{,.zwc} 2>/dev/null
zf_rm -f -- $__p9k_instant_prompt_dump_file{,.zwc} 2>/dev/null
fi
if [[ $+__p9k_instant_prompt_sourced == 1 && $__p9k_instant_prompt_sourced != $__p9k_instant_prompt_version ]]; then
_p9k_delete_instant_prompt
zf_rm -f -- $__p9k_dump_file{,.zwc} 2>/dev/null
fi
_p9k_init_ssh
prompt_powerlevel9k_setup
|
# Bubble Sort Algorithm
def bubble_sort(nums):
# For every element (arranged backwards)
for n in range(len(nums)-1,0,-1):
# For every element (arranged downwards)
for k in range(n):
# If the element is greater than the next element
if nums[k]>nums[k+1]:
# Swap the two
temp = nums[k]
nums[k] = nums[k+1]
nums[k+1] = temp
return nums |
//Greedy Algorithms
#include <stdio.h>
#include <cs50.h>
#include <math.h>
float change(void);
int main(void)
{
float n = change();
//round cents to the nearest penny
int cents = round(n * 100);
int coins = 0;
int quarters = 0;
int dimes = 0;
int nickels = 0;
int pennies = 0;
//starting loops to subtract cents and add 1 to coins for every subtarction
for (int i = 0; cents >= 25; i++)
{
cents = cents - 25;
coins++;
quarters++;
}
for (int j = 0; cents >= 10; j++)
{
cents = cents - 10;
coins++;
dimes++;
}
for (int x = 0; cents >= 5; x++)
{
cents = cents - 5;
coins++;
nickels++;
}
for (int y = 0; cents >= 1; y++)
{
cents = cents - 1;
coins++;
pennies++;
}
//print num of coins
printf("number of coins you should give to the customer: %i\n", coins);
//print how many quarters, dimes, nickels, pennies
printf("%i quarters.\n%i dimes.\n%i nickels.\n%i pennies.\n", quarters, dimes, nickels, pennies);
}
//prompt the user for change
float change(void)
{
float n;
do
{
n = get_float("Change owed: ");
}
while (n < 0.001);
return n;
}
/*we can use while loops for simplicity*/ |
/*
* $Id: alist.h,v 1.2 2001/07/13 18:10:08 ljb Exp $
*/
#ifndef _ALIST_H
#define _ALIST_H
#define MAX_ALIST 100
typedef struct _condition_t {
int permit; /* just a flag */
prefix_t *prefix; /* all in case of null */
prefix_t *wildcard; /* may be null */
int exact; /* just a flag */
int refine; /* just a flag */
} condition_t;
int add_access_list (int num, int permit, prefix_t *prefix, prefix_t *wildcard,
int exact, int refine);
int remove_access_list (int num, int permit, prefix_t *prefix,
prefix_t *wildcard, int exact, int refine);
int del_access_list (int num);
int apply_condition (condition_t *condition, prefix_t *prefix);
int apply_access_list (int num, prefix_t *prefix);
void access_list_out (int num, void_fn_t fn);
#endif /* _ALIST_H */
|
def remove_duplicate_words(string):
# Split the string into a list
word_list = string.split(" ")
# Create a set to store the unique words
uniques = set()
# Create a new list to store the processed string
processed = []
# Iterate over the list of words
for word in word_list:
# If the word is not in the set, add it
if word not in uniques:
uniques.add(word)
processed.append(word)
# Return the processed string
return " ".join(processed)
# Process the string
string = "I am going going to to the the store store to to buy buy food food"
processed = remove_duplicate_words(string)
print(processed) # I am going to the store to buy food |
/*global describe, before, it, beforeEach */
'use strict';
var fs = require('fs');
var assert = require('assert');
var path = require('path');
var util = require('util');
var generators = require('yeoman-generator');
var helpers = require('yeoman-generator').test;
var _ = require('underscore.string');
describe('Angular generator', function () {
var angular;
beforeEach(function (done) {
var deps = [
'../../app',
'../../common',
'../../controller',
'../../main', [
helpers.createDummyGenerator(),
'karma:app'
]
];
helpers.testDirectory(path.join(__dirname, 'temp'), function (err) {
if (err) {
done(err);
}
angular = helpers.createGenerator('angular:app', deps);
angular.options['skip-install'] = true;
done();
});
});
it('should generate dotfiles', function (done) {
helpers.mockPrompt(angular, {
compass: true,
bootstrap: true,
compassBootstrap: true,
modules: []
});
angular.run({}, function () {
helpers.assertFiles(['.bowerrc', '.gitignore', '.editorconfig', '.jshintrc']);
done();
});
});
it('creates expected files', function (done) {
var expected = ['app/.htaccess',
'app/404.html',
'app/favicon.ico',
'app/robots.txt',
'app/styles/main.scss',
'app/views/main.html',
['.bowerrc', /"directory": "app\/bower_components"/],
'Gruntfile.js',
'package.json',
['bower.json', /"name":\s+"temp"/],
'app/scripts/app.js',
'app/index.html',
'app/scripts/controllers/main.js',
'test/spec/controllers/main.js'
];
helpers.mockPrompt(angular, {
compass: true,
bootstrap: true,
compassBootstrap: true,
modules: []
});
angular.run({}, function() {
helpers.assertFiles(expected);
done();
});
});
it('creates coffeescript files', function (done) {
var expected = ['app/.htaccess',
'app/404.html',
'app/favicon.ico',
'app/robots.txt',
'app/styles/main.scss',
'app/views/main.html',
['.bowerrc', /"directory": "app\/bower_components"/],
'Gruntfile.js',
'package.json',
['bower.json', /"name":\s+"temp"/],
'app/scripts/app.coffee',
'app/index.html',
'app/scripts/controllers/main.coffee',
'test/spec/controllers/main.coffee'
];
helpers.mockPrompt(angular, {
compass: true,
bootstrap: true,
compassBootstrap: true,
modules: []
});
angular.env.options.coffee = true;
angular.run([], function () {
helpers.assertFiles(expected);
done();
});
});
/**
* Generic test function that can be used to cover the scenarios where a generator is creating both a source file
* and a test file. The function will run the respective generator, and then check for the existence of the two
* generated files. A RegExp check is done on each file, checking for the generated content with a pattern.
*
* The number of parameters is quite huge due to the many options in which the generated files differ,
* e.g. Services start with an upper case letter, whereas filters, directives or constants start with a lower case
* letter.
*
* The generated items all use the dummy name 'foo'.
*
* @param generatorType The type of generator to run, e.g. 'filter'.
* @param specType The type of the generated spec file, e.g. 'service' - all service types (constant, value, ...)
* use the same Service spec template.
* @param targetDirectory The directory into which the files are generated, e.g. 'directives' - this will be
* located under 'app/scripts' for the sources and 'test/spec' for the tests.
* @param scriptNameFn The function used to create the name of the created item, e.g. _.classify to generate 'Foo',
* or _.camelize to generate 'foo'.
* @param specNameFn Same as scriptNameFn, but for the describe text used in the Spec file. Some generators use
* _.classify, others use _.camelize.
* @param suffix An optional suffix to be appended to the generated item name, e.g. 'Ctrl' for controllers, which
* will generate 'FooCtrl'.
* @param done The done function.
*/
function generatorTest(generatorType, specType, targetDirectory, scriptNameFn, specNameFn, suffix, done) {
var angularGenerator;
var name = 'foo';
var deps = [path.join('../..', generatorType)];
angularGenerator = helpers.createGenerator('angular:' + generatorType, deps, [name]);
helpers.mockPrompt(angular, {
compass: true,
bootstrap: true,
compassBootstrap: true,
modules: []
});
angular.run([], function (){
angularGenerator.run([], function () {
helpers.assertFiles([
[path.join('app/scripts', targetDirectory, name + '.js'), new RegExp(generatorType + '\\(\'' + scriptNameFn(name) + suffix + '\'', 'g')],
[path.join('test/spec', targetDirectory, name + '.js'), new RegExp('describe\\(\'' + _.classify(specType) + ': ' + specNameFn(name) + suffix + '\'', 'g')]
]);
done();
});
});
}
describe('Controller', function () {
it('should generate a new controller', function (done) {
generatorTest('controller', 'controller', 'controllers', _.classify, _.classify, 'Ctrl', done);
});
});
describe('Directive', function () {
it('should generate a new directive', function (done) {
generatorTest('directive', 'directive', 'directives', _.camelize, _.camelize, '', done);
});
});
describe('Filter', function () {
it('should generate a new filter', function (done) {
generatorTest('filter', 'filter', 'filters', _.camelize, _.camelize, '', done);
});
});
describe('Service', function () {
function serviceTest (generatorType, nameFn, done) {
generatorTest(generatorType, 'service', 'services', nameFn, nameFn, '', done);
}
it('should generate a new constant', function (done) {
serviceTest('constant', _.camelize, done);
});
it('should generate a new service', function (done) {
serviceTest('service', _.classify, done);
});
it('should generate a new factory', function (done) {
serviceTest('factory', _.camelize, done);
});
it('should generate a new provider', function (done) {
serviceTest('provider', _.camelize, done);
});
it('should generate a new value', function (done) {
serviceTest('value', _.camelize, done);
});
});
describe('View', function () {
it('should generate a new view', function (done) {
var angularView;
var deps = ['../../view'];
angularView = helpers.createGenerator('angular:view', deps, ['foo']);
helpers.mockPrompt(angular, {
compass: true,
bootstrap: true,
compassBootstrap: true,
modules: []
});
angular.run([], function (){
angularView.run([], function () {
helpers.assertFile(
['app/views/foo.html']
);
done();
});
});
});
it('should generate a new view in subdirectories', function (done) {
var angularView;
var deps = ['../../view'];
angularView = helpers.createGenerator('angular:view', deps, ['foo/bar']);
helpers.mockPrompt(angular, {
compass: true,
bootstrap: true,
compassBootstrap: true,
modules: []
});
angular.run([], function (){
angularView.run([], function () {
helpers.assertFile(
['app/views/foo/bar.html']
);
done();
});
});
});
});
});
|
#!/usr/bin/env bash
#
# BASH Script to run soluble isotropic simulation using
# amberff14sb.
# Set seed for random number generation
# Change this between different replicates
# MD is deterministic
SEED=917
# Set devices available for simulations
# Only useful for CUDA platform
export CUDA_VISIBLE_DEVICES="0"
# Set shortcut for OpenMM/MD script directory
SDIR="../md_scripts/openmm/"
# Set path to initial structure
initialStructure="system_ini.cif"
## You should not need to edit anything below unless you want
## to change simulation parameters/protocols.
# Copy compiled '.so' file for distance calculations
cp ${SDIR}/lib/*.so .
# Set scripts for AMBER (much better pun with CHARMM)
SDIR=${SDIR}/amberff
if [ ! -f "$initialStructure" ]
then
echo "Initial structure not found: $initialStructure"
exit 1
fi
# (1) Build 'system': add Hs, process through force field.
if [ ! -f "solute_H.cif" ]
then
echo ">> Building system ..."
python ${SDIR}/buildSystem.py $initialStructure \
--seed $SEED \
--output solute_H.cif &> buildSystem.runlog
[[ "$?" -ne 0 ]] && exit 1
else
echo "'buildSystem' finished successfully before..."
fi
# (2) Add periodic box
if [ ! -f "solute_PBC.cif" ]
then
echo ">> Adding periodic boundary conditions ..."
python ${SDIR}/setPeriodicBox.py solute_H.cif \
--seed $SEED \
--padding 1.1 \
--boxtype cubic \
--output solute_PBC.cif &> setPeriodicBox.runlog
[[ "$?" -ne 0 ]] && exit 1
else
echo "'setPeriodicBox' finished successfully before..."
fi
# (3) Do restrained energy minimization to optimize Hs and clear strains
if [ ! -f "solute_EM.cif" ]
then
echo ">> Minimizing system in vacuum ..."
python ${SDIR}/minimizeSystem.py solute_PBC.cif \
--iterations 1000 \
--posre \
--seed $SEED \
--output solute_EM.cif &> solute_EM.runlog
[[ "$?" -ne 0 ]] && exit 1
else
echo "'minimizeSystem' in vacuum finished successfully before..."
fi
# (4) Add solvent and counter ions to physiological concentration
if [ ! -f "solvated.cif" ]
then
echo ">> Solvating solute and neutralizing system ..."
python ${SDIR}/solvateBox.py solute_EM.cif \
--neutralize \
--seed $SEED \
--output solvated.cif &> solvateBox.runlog
[[ "$?" -ne 0 ]] && exit 1
else
echo "'solvateBox' finished successfully before..."
fi
# (5) Minimize the solvated system to optimize solvent and ion positions
if [ ! -f "solvated_EM.cif" ]
then
echo ">> Minimizing solvated system ..."
python ${SDIR}/minimizeSystem.py solvated.cif \
--posre \
--seed $SEED \
--output solvated_EM.cif &> solvated_EM.runlog
[[ "$?" -ne 0 ]] && exit 1
else
echo "'minimizeSystem' in explicit solvent finished successfully before..."
fi
# (6) Heat the system to the desired temperature in gradual steps
if [ ! -f "solvated_Heat.cif" ]
then
echo ">> Heating system ..."
python ${SDIR}/heatSystem.py solvated_EM.cif \
--temperature 310 \
--seed $SEED \
--output solvated_Heat &> heatSystem.runlog
[[ "$?" -ne 0 ]] && exit 1
else
echo "'heatSystem' finished successfully before..."
fi
#
# Equilibrate the system
#
# (7) Do first round of equilibration: NVT
if [ ! -f "Eq_NVT.cif" ]
then
echo ">> Equilibration under NVT ..."
python ${SDIR}/equilibrate_NVT.py solvated_Heat.cif \
--temperature 310 \
--seed $SEED \
--state solvated_Heat.cpt \
--runtime 5 \
--restraint-heavy-atom \
--restraint-heavy-atom-k 500 \
--output Eq_NVT &> Eq_NVT.runlog
[[ "$?" -ne 0 ]] && exit 1
else
echo "NVT finished successfully before..."
fi
# (8) Do second round of equilibration: NPT, strong restraints
if [ ! -f "Eq_NPT_k500.cif" ]
then
echo ">> Equilibration under NPT (k=500) ..."
python ${SDIR}/equilibrate_NPT.py Eq_NVT.cif \
--temperature 310 \
--barostat isotropic \
--seed $SEED \
--state Eq_NVT.cpt \
--runtime 5 \
--restraint-heavy-atom \
--restraint-heavy-atom-k 500 \
--output Eq_NPT_k500 &> Eq_NPT_k500.runlog
[[ "$?" -ne 0 ]] && exit 1
else
echo "NPT (k=500) finished successfully before..."
fi
# (9) Do third round of equilibration: NPT, weak restraints
if [ ! -f "Eq_NPT_k250.cif" ]
then
echo ">> Equilibration under NPT (k=250) ..."
python ${SDIR}/equilibrate_NPT.py Eq_NPT_k500.cif \
--temperature 310 \
--barostat isotropic \
--seed $SEED \
--state Eq_NPT_k500.cpt \
--runtime 5 \
--restraint-heavy-atom \
--restraint-heavy-atom-k 250 \
--output Eq_NPT_k250 &> Eq_NPT_k250.runlog
[[ "$?" -ne 0 ]] && exit 1
else
echo "NPT (k=250) finished successfully before..."
fi
# (10) Do fourth round of equilibration: NPT, very weak restraints
if [ ! -f "Eq_NPT_k50.cif" ]
then
echo ">> Equilibration under NPT (k=50) ..."
python ${SDIR}/equilibrate_NPT.py Eq_NPT_k250.cif \
--temperature 310 \
--barostat isotropic \
--seed $SEED \
--state Eq_NPT_k250.cpt \
--runtime 5 \
--restraint-heavy-atom \
--restraint-heavy-atom-k 50 \
--output Eq_NPT_k50 &> Eq_NPT_k50.runlog
[[ "$?" -ne 0 ]] && exit 1
else
echo "NPT (k=50) finished successfully before..."
fi
# (11) Last round of equilibration. No restraints.
# Load state from XML file labelled noDUM.
if [ ! -f "Eq_NPT_noPR.cif" ]
then
echo ">> Equilibration under NPT (k=0) ..."
python ${SDIR}/equilibrate_NPT.py Eq_NPT_k50_noDUM.cif \
--temperature 310 \
--barostat isotropic \
--seed $SEED \
--state Eq_NPT_k50_noDUM.xml \
--runtime 5 \
--output Eq_NPT_noPR &> Eq_NPT_noPR.runlog
[[ "$?" -ne 0 ]] && exit 1
else
echo "NPT (k=0) finished successfully before..."
fi
# Check equilibration finished successfully
if [ ! -f "Eq_NPT_noPR.xml" ]
then
echo "Equilibration did not finish successfully"
exit 1
fi
#
# Production Simulation
#
# (12) Run production simulation
# 5 fs time step with HMR
# xyz every .1 ns, log every .1 ns
if [ ! -f "production.cif" ]
then
if [ -f "production.dcd" ]
then
echo ">> Continuing production simulation ..."
python ${SDIR}/runProduction.py Eq_NPT_noPR_noDUM.cif \
--continuation \
--hmr \
--state production.cpt \
--xyz-frequency 20000 \
--log-frequency 20000 \
--barostat isotropic \
--runtime 200 \
--seed $SEED \
--output production >> production.runlog 2>&1
else
echo ">> Running production simulation ..."
python ${SDIR}/runProduction.py Eq_NPT_noPR_noDUM.cif \
--hmr \
--state Eq_NPT_noPR_noDUM.xml \
--xyz-frequency 20000 \
--log-frequency 20000 \
--barostat isotropic \
--runtime 200 \
--seed $SEED \
--output production &> production.runlog
fi
else
echo "'runProduction' finished successfully before..."
fi
echo "Nothing to do here .."
exit 0 |
<gh_stars>0
const returnEmpty = function() {
return null;
};
const path = require('path');
const fs = require('fs');
const ssr = require('../../release/page.generated').default;
const cwd = process.cwd();
let ssrHtml = ssr();
const stylesheets = ['page.css'];
stylesheets.forEach(function(fileName) {
const content = fs.readFileSync(path.join(cwd, 'release', 'stylesheets', fileName), 'utf8');
ssrHtml = `<style>\n${content}\n</style>\n${ssrHtml}`;
});
fs.writeFile(path.join(cwd, 'release', 'ssr.html'), ssrHtml, function (err) {
if (err) {
console.log(err);
}
console.log('write file complete');
});
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
# NOTE: spack-completion.bash is auto-generated by:
#
# $ spack commands --aliases --format=bash
# --header=bash/spack-completion.in --update=spack-completion.bash
#
# Please do not manually modify this file.
# The following global variables are set by Bash programmable completion:
#
# COMP_CWORD: An index into ${COMP_WORDS} of the word containing the
# current cursor position
# COMP_KEY: The key (or final key of a key sequence) used to invoke
# the current completion function
# COMP_LINE: The current command line
# COMP_POINT: The index of the current cursor position relative to the
# beginning of the current command
# COMP_TYPE: Set to an integer value corresponding to the type of
# completion attempted that caused a completion function
# to be called
# COMP_WORDBREAKS: The set of characters that the readline library treats
# as word separators when performing word completion
# COMP_WORDS: An array variable consisting of the individual words in
# the current command line
#
# The following global variable is used by Bash programmable completion:
#
# COMPREPLY: An array variable from which bash reads the possible
# completions generated by a shell function invoked by the
# programmable completion facility
#
# See `man bash` for more details.
if test -n "${ZSH_VERSION:-}" ; then
if [[ "$(emulate)" = zsh ]] ; then
# ensure base completion support is enabled, ignore insecure directories
autoload -U +X compinit && compinit -i
# ensure bash compatible completion support is enabled
autoload -U +X bashcompinit && bashcompinit
emulate sh -c "source '$0:A'"
return # stop interpreting file
fi
fi
# Bash programmable completion for Spack
_bash_completion_spack() {
# In all following examples, let the cursor be denoted by brackets, i.e. []
# For our purposes, flags should not affect tab completion. For instance,
# `spack install []` and `spack -d install --jobs 8 []` should both give the same
# possible completions. Therefore, we need to ignore any flags in COMP_WORDS.
local COMP_WORDS_NO_FLAGS=()
local index=0
while [[ "$index" -lt "$COMP_CWORD" ]]
do
if [[ "${COMP_WORDS[$index]}" == [a-z]* ]]
then
COMP_WORDS_NO_FLAGS+=("${COMP_WORDS[$index]}")
fi
let index++
done
# Options will be listed by a subfunction named after non-flag arguments.
# For example, `spack -d install []` will call _spack_install
# and `spack compiler add []` will call _spack_compiler_add
local subfunction=$(IFS='_'; echo "_${COMP_WORDS_NO_FLAGS[*]}")
# Translate dashes to underscores, as dashes are not permitted in
# compatibility mode. See https://github.com/spack/spack/pull/4079
subfunction=${subfunction//-/_}
# However, the word containing the current cursor position needs to be
# added regardless of whether or not it is a flag. This allows us to
# complete something like `spack install --keep-st[]`
COMP_WORDS_NO_FLAGS+=("${COMP_WORDS[$COMP_CWORD]}")
# Since we have removed all words after COMP_CWORD, we can safely assume
# that COMP_CWORD_NO_FLAGS is simply the index of the last element
local COMP_CWORD_NO_FLAGS=$((${#COMP_WORDS_NO_FLAGS[@]} - 1))
# There is no guarantee that the cursor is at the end of the command line
# when tab completion is envoked. For example, in the following situation:
# `spack -d [] install`
# if the user presses the TAB key, a list of valid flags should be listed.
# Note that we cannot simply ignore everything after the cursor. In the
# previous scenario, the user should expect to see a list of flags, but
# not of other subcommands. Obviously, `spack -d list install` would be
# invalid syntax. To accomplish this, we use the variable list_options
# which is true if the current word starts with '-' or if the cursor is
# not at the end of the line.
local list_options=false
if [[ "${COMP_WORDS[$COMP_CWORD]}" == -* || "$COMP_POINT" -ne "${#COMP_LINE}" ]]
then
list_options=true
fi
# In general, when envoking tab completion, the user is not expecting to
# see optional flags mixed in with subcommands or package names. Tab
# completion is used by those who are either lazy or just bad at spelling.
# If someone doesn't remember what flag to use, seeing single letter flags
# in their results won't help them, and they should instead consult the
# documentation. However, if the user explicitly declares that they are
# looking for a flag, we can certainly help them out.
# `spack install -[]`
# and
# `spack install --[]`
# should list all flags and long flags, respectively. Furthermore, if a
# subcommand has no non-flag completions, such as `spack arch []`, it
# should list flag completions.
local cur=${COMP_WORDS_NO_FLAGS[$COMP_CWORD_NO_FLAGS]}
# If the cursor is in the middle of the line, like:
# `spack -d [] install`
# COMP_WORDS will not contain the empty character, so we have to add it.
if [[ "${COMP_LINE:$COMP_POINT:1}" == " " ]]
then
cur=""
fi
# Uncomment this line to enable logging
#_test_vars >> temp
# Make sure function exists before calling it
local rgx #this dance is necessary to cover bash and zsh regex
rgx="$subfunction.*function.* "
if [[ "$(type $subfunction 2>&1)" =~ $rgx ]]
then
$subfunction
COMPREPLY=($(compgen -W "$SPACK_COMPREPLY" -- "$cur"))
fi
}
# Helper functions for subcommands
# Results of each query are cached via environment variables
_subcommands() {
if [[ -z "${SPACK_SUBCOMMANDS:-}" ]]
then
SPACK_SUBCOMMANDS="$(spack commands)"
fi
SPACK_COMPREPLY="$SPACK_SUBCOMMANDS"
}
_all_packages() {
if [[ -z "${SPACK_ALL_PACKAGES:-}" ]]
then
SPACK_ALL_PACKAGES="$(spack list)"
fi
SPACK_COMPREPLY="$SPACK_ALL_PACKAGES"
}
_all_resource_hashes() {
if [[ -z "${SPACK_ALL_RESOURCES_HASHES:-}" ]]
then
SPACK_ALL_RESOURCE_HASHES="$(spack resource list --only-hashes)"
fi
SPACK_COMPREPLY="$SPACK_ALL_RESOURCE_HASHES"
}
_installed_packages() {
if [[ -z "${SPACK_INSTALLED_PACKAGES:-}" ]]
then
SPACK_INSTALLED_PACKAGES="$(spack --color=never find --no-groups)"
fi
SPACK_COMPREPLY="$SPACK_INSTALLED_PACKAGES"
}
_installed_compilers() {
if [[ -z "${SPACK_INSTALLED_COMPILERS:-}" ]]
then
SPACK_INSTALLED_COMPILERS="$(spack compilers | egrep -v "^(-|=)")"
fi
SPACK_COMPREPLY="$SPACK_INSTALLED_COMPILERS"
}
_providers() {
if [[ -z "${SPACK_PROVIDERS:-}" ]]
then
SPACK_PROVIDERS="$(spack providers)"
fi
SPACK_COMPREPLY="$SPACK_PROVIDERS"
}
_mirrors() {
if [[ -z "${SPACK_MIRRORS:-}" ]]
then
SPACK_MIRRORS="$(spack mirror list | awk '{print $1}')"
fi
SPACK_COMPREPLY="$SPACK_MIRRORS"
}
_repos() {
if [[ -z "${SPACK_REPOS:-}" ]]
then
SPACK_REPOS="$(spack repo list | awk '{print $1}')"
fi
SPACK_COMPREPLY="$SPACK_REPOS"
}
_tests() {
if [[ -z "${SPACK_TESTS:-}" ]]
then
SPACK_TESTS="$(spack test -l)"
fi
SPACK_COMPREPLY="$SPACK_TESTS"
}
_environments() {
if [[ -z "${SPACK_ENVIRONMENTS:-}" ]]
then
SPACK_ENVIRONMENTS="$(spack env list)"
fi
SPACK_COMPREPLY="$SPACK_ENVIRONMENTS"
}
_keys() {
if [[ -z "${SPACK_KEYS:-}" ]]
then
SPACK_KEYS="$(spack gpg list)"
fi
SPACK_COMPREPLY="$SPACK_KEYS"
}
_config_sections() {
if [[ -z "${SPACK_CONFIG_SECTIONS:-}" ]]
then
SPACK_CONFIG_SECTIONS="$(spack config list)"
fi
SPACK_COMPREPLY="$SPACK_CONFIG_SECTIONS"
}
_extensions() {
if [[ -z "${SPACK_EXTENSIONS:-}" ]]
then
SPACK_EXTENSIONS="$(spack extensions)"
fi
SPACK_COMPREPLY="$SPACK_EXTENSIONS"
}
# Testing functions
# Function for unit testing tab completion
# Syntax: _spack_completions spack install py-
_spack_completions() {
local COMP_CWORD COMP_KEY COMP_LINE COMP_POINT COMP_TYPE COMP_WORDS COMPREPLY
# Set each variable the way bash would
COMP_LINE="$*"
COMP_POINT=${#COMP_LINE}
COMP_WORDS=("$@")
if [[ ${COMP_LINE: -1} == ' ' ]]
then
COMP_WORDS+=('')
fi
COMP_CWORD=$((${#COMP_WORDS[@]} - 1))
COMP_KEY=9 # ASCII 09: Horizontal Tab
COMP_TYPE=64 # ASCII 64: '@', to list completions if the word is not unmodified
# Run Spack's tab completion function
_bash_completion_spack
# Return the result
echo "${COMPREPLY[@]:-}"
}
# Log the environment variables used
# Syntax: _test_vars >> temp
_test_vars() {
echo "-----------------------------------------------------"
echo "Variables set by bash:"
echo
echo "COMP_LINE: '$COMP_LINE'"
echo "# COMP_LINE: '${#COMP_LINE}'"
echo "COMP_WORDS: $(_pretty_print COMP_WORDS[@])"
echo "# COMP_WORDS: '${#COMP_WORDS[@]}'"
echo "COMP_CWORD: '$COMP_CWORD'"
echo "COMP_KEY: '$COMP_KEY'"
echo "COMP_POINT: '$COMP_POINT'"
echo "COMP_TYPE: '$COMP_TYPE'"
echo "COMP_WORDBREAKS: '$COMP_WORDBREAKS'"
echo
echo "Intermediate variables:"
echo
echo "COMP_WORDS_NO_FLAGS: $(_pretty_print COMP_WORDS_NO_FLAGS[@])"
echo "# COMP_WORDS_NO_FLAGS: '${#COMP_WORDS_NO_FLAGS[@]}'"
echo "COMP_CWORD_NO_FLAGS: '$COMP_CWORD_NO_FLAGS'"
echo
echo "Subfunction: '$subfunction'"
if $list_options
then
echo "List options: 'True'"
else
echo "List options: 'False'"
fi
echo "Current word: '$cur'"
}
# Pretty-prints one or more arrays
# Syntax: _pretty_print array1[@] ...
_pretty_print() {
for arg in $@
do
local array=("${!arg}")
printf "$arg: ["
printf "'%s'" "${array[0]}"
printf ", '%s'" "${array[@]:1}"
echo "]"
done
}
complete -o bashdefault -o default -F _bash_completion_spack spack
# Completion for spacktivate
complete -o bashdefault -o default -F _bash_completion_spack spacktivate
_spacktivate() {
_spack_env_activate
}
# Spack commands
#
# Everything below here is auto-generated.
_spack() {
if $list_options
then
SPACK_COMPREPLY="-h --help -H --all-help --color -C --config-scope -d --debug --timestamp --pdb -e --env -D --env-dir -E --no-env --use-env-repo -k --insecure -l --enable-locks -L --disable-locks -m --mock -p --profile --sorted-profile --lines -v --verbose --stacktrace -V --version --print-shell-vars"
else
SPACK_COMPREPLY="activate add arch blame build-env buildcache cd checksum ci clean clone commands compiler compilers concretize config containerize create deactivate debug dependencies dependents deprecate dev-build develop docs edit env extensions external fetch find flake8 gc gpg graph help info install license list load location log-parse maintainers mark mirror module patch pkg providers pydoc python reindex remove rm repo resource restage solve spec stage style test test-env tutorial undevelop uninstall unit-test unload url verify versions view"
fi
}
_spack_activate() {
if $list_options
then
SPACK_COMPREPLY="-h --help -f --force -v --view"
else
_installed_packages
fi
}
_spack_add() {
if $list_options
then
SPACK_COMPREPLY="-h --help -l --list-name"
else
_all_packages
fi
}
_spack_arch() {
SPACK_COMPREPLY="-h --help --known-targets -p --platform -o --operating-system -t --target -f --frontend -b --backend"
}
_spack_blame() {
if $list_options
then
SPACK_COMPREPLY="-h --help -t --time -p --percent -g --git"
else
_all_packages
fi
}
_spack_build_env() {
if $list_options
then
SPACK_COMPREPLY="-h --help --clean --dirty --dump --pickle"
else
_all_packages
fi
}
_spack_buildcache() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
SPACK_COMPREPLY="create install list keys preview check download get-buildcache-name save-yaml copy update-index"
fi
}
_spack_buildcache_create() {
if $list_options
then
SPACK_COMPREPLY="-h --help -r --rel -f --force -u --unsigned -a --allow-root -k --key -d --directory -m --mirror-name --mirror-url --rebuild-index -y --spec-yaml --only"
else
_all_packages
fi
}
_spack_buildcache_install() {
if $list_options
then
SPACK_COMPREPLY="-h --help -f --force -m --multiple -a --allow-root -u --unsigned -o --otherarch"
else
_all_packages
fi
}
_spack_buildcache_list() {
if $list_options
then
SPACK_COMPREPLY="-h --help -l --long -L --very-long -v --variants -a --allarch"
else
_all_packages
fi
}
_spack_buildcache_keys() {
SPACK_COMPREPLY="-h --help -i --install -t --trust -f --force"
}
_spack_buildcache_preview() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
_installed_packages
fi
}
_spack_buildcache_check() {
SPACK_COMPREPLY="-h --help -m --mirror-url -o --output-file --scope -s --spec -y --spec-yaml --rebuild-on-error"
}
_spack_buildcache_download() {
SPACK_COMPREPLY="-h --help -s --spec -y --spec-yaml -p --path -c --require-cdashid"
}
_spack_buildcache_get_buildcache_name() {
SPACK_COMPREPLY="-h --help -s --spec -y --spec-yaml"
}
_spack_buildcache_save_yaml() {
SPACK_COMPREPLY="-h --help --root-spec --root-spec-yaml -s --specs -y --yaml-dir"
}
_spack_buildcache_copy() {
SPACK_COMPREPLY="-h --help --base-dir --spec-yaml --destination-url"
}
_spack_buildcache_update_index() {
SPACK_COMPREPLY="-h --help -d --mirror-url -k --keys"
}
_spack_cd() {
if $list_options
then
SPACK_COMPREPLY="-h --help -m --module-dir -r --spack-root -i --install-dir -p --package-dir -P --packages -s --stage-dir -S --stages -b --build-dir -e --env"
else
_all_packages
fi
}
_spack_checksum() {
if $list_options
then
SPACK_COMPREPLY="-h --help --keep-stage -b --batch"
else
_all_packages
fi
}
_spack_ci() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
SPACK_COMPREPLY="generate rebuild"
fi
}
_spack_ci_generate() {
SPACK_COMPREPLY="-h --help --output-file --copy-to --optimize --dependencies"
}
_spack_ci_rebuild() {
SPACK_COMPREPLY="-h --help"
}
_spack_clean() {
if $list_options
then
SPACK_COMPREPLY="-h --help -s --stage -d --downloads -f --failures -m --misc-cache -p --python-cache -a --all"
else
_all_packages
fi
}
_spack_clone() {
if $list_options
then
SPACK_COMPREPLY="-h --help -r --remote"
else
SPACK_COMPREPLY=""
fi
}
_spack_commands() {
if $list_options
then
SPACK_COMPREPLY="-h --help --update-completion -a --aliases --format --header --update"
else
SPACK_COMPREPLY=""
fi
}
_spack_compiler() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
SPACK_COMPREPLY="find add remove rm list info"
fi
}
_spack_compiler_find() {
if $list_options
then
SPACK_COMPREPLY="-h --help --scope"
else
SPACK_COMPREPLY=""
fi
}
_spack_compiler_add() {
if $list_options
then
SPACK_COMPREPLY="-h --help --scope"
else
SPACK_COMPREPLY=""
fi
}
_spack_compiler_remove() {
if $list_options
then
SPACK_COMPREPLY="-h --help -a --all --scope"
else
_installed_compilers
fi
}
_spack_compiler_rm() {
if $list_options
then
SPACK_COMPREPLY="-h --help -a --all --scope"
else
_installed_compilers
fi
}
_spack_compiler_list() {
SPACK_COMPREPLY="-h --help --scope"
}
_spack_compiler_info() {
if $list_options
then
SPACK_COMPREPLY="-h --help --scope"
else
_installed_compilers
fi
}
_spack_compilers() {
SPACK_COMPREPLY="-h --help --scope"
}
_spack_concretize() {
SPACK_COMPREPLY="-h --help -f --force"
}
_spack_config() {
if $list_options
then
SPACK_COMPREPLY="-h --help --scope"
else
SPACK_COMPREPLY="get blame edit list add remove rm update revert"
fi
}
_spack_config_get() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
_config_sections
fi
}
_spack_config_blame() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
_config_sections
fi
}
_spack_config_edit() {
if $list_options
then
SPACK_COMPREPLY="-h --help --print-file"
else
_config_sections
fi
}
_spack_config_list() {
SPACK_COMPREPLY="-h --help"
}
_spack_config_add() {
if $list_options
then
SPACK_COMPREPLY="-h --help -f --file"
else
SPACK_COMPREPLY=""
fi
}
_spack_config_remove() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
SPACK_COMPREPLY=""
fi
}
_spack_config_rm() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
SPACK_COMPREPLY=""
fi
}
_spack_config_update() {
if $list_options
then
SPACK_COMPREPLY="-h --help -y --yes-to-all"
else
_config_sections
fi
}
_spack_config_revert() {
if $list_options
then
SPACK_COMPREPLY="-h --help -y --yes-to-all"
else
_config_sections
fi
}
_spack_containerize() {
SPACK_COMPREPLY="-h --help"
}
_spack_create() {
if $list_options
then
SPACK_COMPREPLY="-h --help --keep-stage -n --name -t --template -r --repo -N --namespace -f --force --skip-editor -b --batch"
else
SPACK_COMPREPLY=""
fi
}
_spack_deactivate() {
if $list_options
then
SPACK_COMPREPLY="-h --help -f --force -v --view -a --all"
else
_installed_packages
fi
}
_spack_debug() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
SPACK_COMPREPLY="create-db-tarball report"
fi
}
_spack_debug_create_db_tarball() {
SPACK_COMPREPLY="-h --help"
}
_spack_debug_report() {
SPACK_COMPREPLY="-h --help"
}
_spack_dependencies() {
if $list_options
then
SPACK_COMPREPLY="-h --help -i --installed -t --transitive --deptype -V --no-expand-virtuals"
else
_all_packages
fi
}
_spack_dependents() {
if $list_options
then
SPACK_COMPREPLY="-h --help -i --installed -t --transitive"
else
_all_packages
fi
}
_spack_deprecate() {
if $list_options
then
SPACK_COMPREPLY="-h --help -y --yes-to-all -d --dependencies -D --no-dependencies -i --install-deprecator -I --no-install-deprecator -l --link-type"
else
_all_packages
fi
}
_spack_dev_build() {
if $list_options
then
SPACK_COMPREPLY="-h --help -j --jobs -d --source-path -i --ignore-dependencies -n --no-checksum --keep-prefix --skip-patch -q --quiet --drop-in --test -b --before -u --until --clean --dirty"
else
_all_packages
fi
}
_spack_develop() {
if $list_options
then
SPACK_COMPREPLY="-h --help -p --path --no-clone --clone -f --force"
else
_all_packages
fi
}
_spack_docs() {
SPACK_COMPREPLY="-h --help"
}
_spack_edit() {
if $list_options
then
SPACK_COMPREPLY="-h --help -b --build-system -c --command -d --docs -t --test -m --module -r --repo -N --namespace"
else
_all_packages
fi
}
_spack_env() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
SPACK_COMPREPLY="activate deactivate create remove rm list ls status st loads view update revert"
fi
}
_spack_env_activate() {
if $list_options
then
SPACK_COMPREPLY="-h --help --sh --csh --fish -v --with-view -V --without-view -d --dir -p --prompt"
else
_environments
fi
}
_spack_env_deactivate() {
SPACK_COMPREPLY="-h --help --sh --csh --fish"
}
_spack_env_create() {
if $list_options
then
SPACK_COMPREPLY="-h --help -d --dir --without-view --with-view"
else
_environments
fi
}
_spack_env_remove() {
if $list_options
then
SPACK_COMPREPLY="-h --help -y --yes-to-all"
else
_environments
fi
}
_spack_env_rm() {
if $list_options
then
SPACK_COMPREPLY="-h --help -y --yes-to-all"
else
_environments
fi
}
_spack_env_list() {
SPACK_COMPREPLY="-h --help"
}
_spack_env_ls() {
SPACK_COMPREPLY="-h --help"
}
_spack_env_status() {
SPACK_COMPREPLY="-h --help"
}
_spack_env_st() {
SPACK_COMPREPLY="-h --help"
}
_spack_env_loads() {
if $list_options
then
SPACK_COMPREPLY="-h --help -m --module-type --input-only -p --prefix -x --exclude -r --dependencies"
else
_environments
fi
}
_spack_env_view() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
SPACK_COMPREPLY=""
fi
}
_spack_env_update() {
if $list_options
then
SPACK_COMPREPLY="-h --help -y --yes-to-all"
else
_environments
fi
}
_spack_env_revert() {
if $list_options
then
SPACK_COMPREPLY="-h --help -y --yes-to-all"
else
_environments
fi
}
_spack_extensions() {
if $list_options
then
SPACK_COMPREPLY="-h --help -l --long -L --very-long -d --deps -p --paths -s --show -v --view"
else
_extensions
fi
}
_spack_external() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
SPACK_COMPREPLY="find list"
fi
}
_spack_external_find() {
if $list_options
then
SPACK_COMPREPLY="-h --help --not-buildable --scope -t --tag"
else
_all_packages
fi
}
_spack_external_list() {
SPACK_COMPREPLY="-h --help"
}
_spack_fetch() {
if $list_options
then
SPACK_COMPREPLY="-h --help -n --no-checksum -m --missing -D --dependencies"
else
_all_packages
fi
}
_spack_find() {
if $list_options
then
SPACK_COMPREPLY="-h --help --format --json -d --deps -p --paths --groups --no-groups -l --long -L --very-long -t --tag -c --show-concretized -f --show-flags --show-full-compiler -x --explicit -X --implicit -u --unknown -m --missing -v --variants --loaded -M --only-missing --deprecated --only-deprecated -N --namespace --start-date --end-date"
else
_installed_packages
fi
}
_spack_flake8() {
if $list_options
then
SPACK_COMPREPLY="-h --help -b --base -a --all -o --output -r --root-relative -U --no-untracked --no-flake8 --no-mypy --black"
else
SPACK_COMPREPLY=""
fi
}
_spack_gc() {
SPACK_COMPREPLY="-h --help -y --yes-to-all"
}
_spack_gpg() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
SPACK_COMPREPLY="verify trust untrust sign create list init export publish"
fi
}
_spack_gpg_verify() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
_installed_packages
fi
}
_spack_gpg_trust() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
SPACK_COMPREPLY=""
fi
}
_spack_gpg_untrust() {
if $list_options
then
SPACK_COMPREPLY="-h --help --signing"
else
_keys
fi
}
_spack_gpg_sign() {
if $list_options
then
SPACK_COMPREPLY="-h --help --output --key --clearsign"
else
_installed_packages
fi
}
_spack_gpg_create() {
if $list_options
then
SPACK_COMPREPLY="-h --help --comment --expires --export"
else
SPACK_COMPREPLY=""
fi
}
_spack_gpg_list() {
SPACK_COMPREPLY="-h --help --trusted --signing"
}
_spack_gpg_init() {
SPACK_COMPREPLY="-h --help --from"
}
_spack_gpg_export() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
_keys
fi
}
_spack_gpg_publish() {
if $list_options
then
SPACK_COMPREPLY="-h --help -d --directory -m --mirror-name --mirror-url --rebuild-index"
else
_keys
fi
}
_spack_graph() {
if $list_options
then
SPACK_COMPREPLY="-h --help -a --ascii -d --dot -s --static -i --installed --deptype"
else
_all_packages
fi
}
_spack_help() {
if $list_options
then
SPACK_COMPREPLY="-h --help -a --all --spec"
else
_subcommands
fi
}
_spack_info() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
_all_packages
fi
}
_spack_install() {
if $list_options
then
SPACK_COMPREPLY="-h --help --only -u --until -j --jobs --overwrite --fail-fast --keep-prefix --keep-stage --dont-restage --use-cache --no-cache --cache-only --include-build-deps --no-check-signature --require-full-hash-match --show-log-on-error --source -n --no-checksum -v --verbose --fake --only-concrete -f --file --clean --dirty --test --run-tests --log-format --log-file --help-cdash --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp -y --yes-to-all"
else
_all_packages
fi
}
_spack_license() {
if $list_options
then
SPACK_COMPREPLY="-h --help --root"
else
SPACK_COMPREPLY="list-files verify update-copyright-year"
fi
}
_spack_license_list_files() {
SPACK_COMPREPLY="-h --help"
}
_spack_license_verify() {
SPACK_COMPREPLY="-h --help"
}
_spack_license_update_copyright_year() {
SPACK_COMPREPLY="-h --help"
}
_spack_list() {
if $list_options
then
SPACK_COMPREPLY="-h --help -d --search-description --format --update -v --virtuals -t --tag"
else
_all_packages
fi
}
_spack_load() {
if $list_options
then
SPACK_COMPREPLY="-h --help -r --dependencies --sh --csh --fish --first --only"
else
_installed_packages
fi
}
_spack_location() {
if $list_options
then
SPACK_COMPREPLY="-h --help -m --module-dir -r --spack-root -i --install-dir -p --package-dir -P --packages -s --stage-dir -S --stages -b --build-dir -e --env"
else
_all_packages
fi
}
_spack_log_parse() {
if $list_options
then
SPACK_COMPREPLY="-h --help --show -c --context -p --profile -w --width -j --jobs"
else
SPACK_COMPREPLY=""
fi
}
_spack_maintainers() {
if $list_options
then
SPACK_COMPREPLY="-h --help --maintained --unmaintained -a --all --by-user"
else
_all_packages
fi
}
_spack_mark() {
if $list_options
then
SPACK_COMPREPLY="-h --help -a --all -e --explicit -i --implicit"
else
_installed_packages
fi
}
_spack_mirror() {
if $list_options
then
SPACK_COMPREPLY="-h --help -n --no-checksum"
else
SPACK_COMPREPLY="create add remove rm set-url list"
fi
}
_spack_mirror_create() {
if $list_options
then
SPACK_COMPREPLY="-h --help -d --directory -a --all -f --file --exclude-file --exclude-specs --skip-unstable-versions -D --dependencies -n --versions-per-spec"
else
_all_packages
fi
}
_spack_mirror_add() {
if $list_options
then
SPACK_COMPREPLY="-h --help --scope"
else
_mirrors
fi
}
_spack_mirror_remove() {
if $list_options
then
SPACK_COMPREPLY="-h --help --scope"
else
_mirrors
fi
}
_spack_mirror_rm() {
if $list_options
then
SPACK_COMPREPLY="-h --help --scope"
else
_mirrors
fi
}
_spack_mirror_set_url() {
if $list_options
then
SPACK_COMPREPLY="-h --help --push --scope"
else
_mirrors
fi
}
_spack_mirror_list() {
SPACK_COMPREPLY="-h --help --scope"
}
_spack_module() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
SPACK_COMPREPLY="lmod tcl"
fi
}
_spack_module_lmod() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
SPACK_COMPREPLY="refresh find rm loads setdefault"
fi
}
_spack_module_lmod_refresh() {
if $list_options
then
SPACK_COMPREPLY="-h --help --delete-tree --upstream-modules -y --yes-to-all"
else
_installed_packages
fi
}
_spack_module_lmod_find() {
if $list_options
then
SPACK_COMPREPLY="-h --help --full-path -r --dependencies"
else
_installed_packages
fi
}
_spack_module_lmod_rm() {
if $list_options
then
SPACK_COMPREPLY="-h --help -y --yes-to-all"
else
_installed_packages
fi
}
_spack_module_lmod_loads() {
if $list_options
then
SPACK_COMPREPLY="-h --help --input-only -p --prefix -x --exclude -r --dependencies"
else
_installed_packages
fi
}
_spack_module_lmod_setdefault() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
_installed_packages
fi
}
_spack_module_tcl() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
SPACK_COMPREPLY="refresh find rm loads"
fi
}
_spack_module_tcl_refresh() {
if $list_options
then
SPACK_COMPREPLY="-h --help --delete-tree --upstream-modules -y --yes-to-all"
else
_installed_packages
fi
}
_spack_module_tcl_find() {
if $list_options
then
SPACK_COMPREPLY="-h --help --full-path -r --dependencies"
else
_installed_packages
fi
}
_spack_module_tcl_rm() {
if $list_options
then
SPACK_COMPREPLY="-h --help -y --yes-to-all"
else
_installed_packages
fi
}
_spack_module_tcl_loads() {
if $list_options
then
SPACK_COMPREPLY="-h --help --input-only -p --prefix -x --exclude -r --dependencies"
else
_installed_packages
fi
}
_spack_patch() {
if $list_options
then
SPACK_COMPREPLY="-h --help -n --no-checksum"
else
_all_packages
fi
}
_spack_pkg() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
SPACK_COMPREPLY="add list diff added changed removed"
fi
}
_spack_pkg_add() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
_all_packages
fi
}
_spack_pkg_list() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
SPACK_COMPREPLY=""
fi
}
_spack_pkg_diff() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
SPACK_COMPREPLY=""
fi
}
_spack_pkg_added() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
SPACK_COMPREPLY=""
fi
}
_spack_pkg_changed() {
if $list_options
then
SPACK_COMPREPLY="-h --help -t --type"
else
SPACK_COMPREPLY=""
fi
}
_spack_pkg_removed() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
SPACK_COMPREPLY=""
fi
}
_spack_providers() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
_providers
fi
}
_spack_pydoc() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
SPACK_COMPREPLY=""
fi
}
_spack_python() {
if $list_options
then
SPACK_COMPREPLY="-h --help -V --version -c -i -m"
else
SPACK_COMPREPLY=""
fi
}
_spack_reindex() {
SPACK_COMPREPLY="-h --help"
}
_spack_remove() {
if $list_options
then
SPACK_COMPREPLY="-h --help -a --all -l --list-name -f --force"
else
_all_packages
fi
}
_spack_rm() {
if $list_options
then
SPACK_COMPREPLY="-h --help -a --all -l --list-name -f --force"
else
_all_packages
fi
}
_spack_repo() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
SPACK_COMPREPLY="create list add remove rm"
fi
}
_spack_repo_create() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
_repos
fi
}
_spack_repo_list() {
SPACK_COMPREPLY="-h --help --scope"
}
_spack_repo_add() {
if $list_options
then
SPACK_COMPREPLY="-h --help --scope"
else
SPACK_COMPREPLY=""
fi
}
_spack_repo_remove() {
if $list_options
then
SPACK_COMPREPLY="-h --help --scope"
else
_repos
fi
}
_spack_repo_rm() {
if $list_options
then
SPACK_COMPREPLY="-h --help --scope"
else
_repos
fi
}
_spack_resource() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
SPACK_COMPREPLY="list show"
fi
}
_spack_resource_list() {
SPACK_COMPREPLY="-h --help --only-hashes"
}
_spack_resource_show() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
_all_resource_hashes
fi
}
_spack_restage() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
_all_packages
fi
}
_spack_solve() {
if $list_options
then
SPACK_COMPREPLY="-h --help --show --models -l --long -L --very-long -I --install-status -y --yaml -j --json -c --cover -N --namespaces -t --types --timers --stats"
else
_all_packages
fi
}
_spack_spec() {
if $list_options
then
SPACK_COMPREPLY="-h --help -l --long -L --very-long -I --install-status -y --yaml -j --json -c --cover -N --namespaces -t --types"
else
_all_packages
fi
}
_spack_stage() {
if $list_options
then
SPACK_COMPREPLY="-h --help -n --no-checksum -p --path"
else
_all_packages
fi
}
_spack_style() {
if $list_options
then
SPACK_COMPREPLY="-h --help -b --base -a --all -o --output -r --root-relative -U --no-untracked --no-flake8 --no-mypy --black"
else
SPACK_COMPREPLY=""
fi
}
_spack_test() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
SPACK_COMPREPLY="run list find status results remove"
fi
}
_spack_test_run() {
if $list_options
then
SPACK_COMPREPLY="-h --help --alias --fail-fast --fail-first --keep-stage --log-format --log-file --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp --help-cdash --clean --dirty"
else
_installed_packages
fi
}
_spack_test_list() {
SPACK_COMPREPLY="-h --help"
}
_spack_test_find() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
_all_packages
fi
}
_spack_test_status() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
SPACK_COMPREPLY=""
fi
}
_spack_test_results() {
if $list_options
then
SPACK_COMPREPLY="-h --help -l --logs -f --failed"
else
SPACK_COMPREPLY=""
fi
}
_spack_test_remove() {
if $list_options
then
SPACK_COMPREPLY="-h --help -y --yes-to-all"
else
SPACK_COMPREPLY=""
fi
}
_spack_test_env() {
if $list_options
then
SPACK_COMPREPLY="-h --help --clean --dirty --dump --pickle"
else
_all_packages
fi
}
_spack_tutorial() {
SPACK_COMPREPLY="-h --help -y --yes-to-all"
}
_spack_undevelop() {
if $list_options
then
SPACK_COMPREPLY="-h --help -a --all"
else
_all_packages
fi
}
_spack_uninstall() {
if $list_options
then
SPACK_COMPREPLY="-h --help -f --force -R --dependents -y --yes-to-all -a --all"
else
_installed_packages
fi
}
_spack_unit_test() {
if $list_options
then
SPACK_COMPREPLY="-h --help -H --pytest-help -l --list -L --list-long -N --list-names --extension -s -k --showlocals"
else
_tests
fi
}
_spack_unload() {
if $list_options
then
SPACK_COMPREPLY="-h --help --sh --csh --fish -a --all"
else
_installed_packages
fi
}
_spack_url() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
SPACK_COMPREPLY="parse list summary stats"
fi
}
_spack_url_parse() {
if $list_options
then
SPACK_COMPREPLY="-h --help -s --spider"
else
SPACK_COMPREPLY=""
fi
}
_spack_url_list() {
SPACK_COMPREPLY="-h --help -c --color -e --extrapolation -n --incorrect-name -N --correct-name -v --incorrect-version -V --correct-version"
}
_spack_url_summary() {
SPACK_COMPREPLY="-h --help"
}
_spack_url_stats() {
SPACK_COMPREPLY="-h --help"
}
_spack_verify() {
if $list_options
then
SPACK_COMPREPLY="-h --help -l --local -j --json -a --all -s --specs -f --files"
else
_all_packages
fi
}
_spack_versions() {
if $list_options
then
SPACK_COMPREPLY="-h --help -s --safe --safe-only -r --remote -n --new -c --concurrency"
else
_all_packages
fi
}
_spack_view() {
if $list_options
then
SPACK_COMPREPLY="-h --help -v --verbose -e --exclude -d --dependencies"
else
SPACK_COMPREPLY="symlink add soft hardlink hard copy relocate remove rm statlink status check"
fi
}
_spack_view_symlink() {
if $list_options
then
SPACK_COMPREPLY="-h --help --projection-file -i --ignore-conflicts"
else
_all_packages
fi
}
_spack_view_add() {
if $list_options
then
SPACK_COMPREPLY="-h --help --projection-file -i --ignore-conflicts"
else
_all_packages
fi
}
_spack_view_soft() {
if $list_options
then
SPACK_COMPREPLY="-h --help --projection-file -i --ignore-conflicts"
else
_all_packages
fi
}
_spack_view_hardlink() {
if $list_options
then
SPACK_COMPREPLY="-h --help --projection-file -i --ignore-conflicts"
else
_all_packages
fi
}
_spack_view_hard() {
if $list_options
then
SPACK_COMPREPLY="-h --help --projection-file -i --ignore-conflicts"
else
_all_packages
fi
}
_spack_view_copy() {
if $list_options
then
SPACK_COMPREPLY="-h --help --projection-file -i --ignore-conflicts"
else
_all_packages
fi
}
_spack_view_relocate() {
if $list_options
then
SPACK_COMPREPLY="-h --help --projection-file -i --ignore-conflicts"
else
_all_packages
fi
}
_spack_view_remove() {
if $list_options
then
SPACK_COMPREPLY="-h --help --no-remove-dependents -a --all"
else
_all_packages
fi
}
_spack_view_rm() {
if $list_options
then
SPACK_COMPREPLY="-h --help --no-remove-dependents -a --all"
else
_all_packages
fi
}
_spack_view_statlink() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
_all_packages
fi
}
_spack_view_status() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
_all_packages
fi
}
_spack_view_check() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
else
_all_packages
fi
}
|
package elasta.webutils.impl;
import elasta.webutils.UriToEventAddressMap;
import elasta.webutils.UriToEventAddressTranslator;
import elasta.webutils.exceptions.UriToEventAddressTranslationException;
import elasta.webutils.model.UriAndHttpMethodPair;
import java.util.Objects;
/**
* Created by Jango on 11/7/2016.
*/
final public class UriToEventAddressTranslatorImpl implements UriToEventAddressTranslator {
final UriToEventAddressMap uriToEventAddressMap;
public UriToEventAddressTranslatorImpl(UriToEventAddressMap uriToEventAddressMap) {
Objects.requireNonNull(uriToEventAddressMap);
this.uriToEventAddressMap = uriToEventAddressMap;
}
@Override
public String apply(UriAndHttpMethodPair uriAndHttpMethodPair) throws Throwable {
return getAddress(uriAndHttpMethodPair);
}
private String getAddress(UriAndHttpMethodPair uriAndHttpMethodPair) {
final String address = uriToEventAddressMap.getMap().get(uriAndHttpMethodPair);
if (address == null) {
throw new UriToEventAddressTranslationException("No event address found for '" + uriAndHttpMethodPair + "'");
}
return address;
}
}
|
<filename>src/content/ReactPage/index.js
import ReactPage from "./ReactPage";
export default ReactPage;
|
#ifndef DEBUG_H
#define DEBUG_H
#if DEBUG
#include <stdio.h>
extern FILE *debug;
#endif
#endif /* DEBUG_H */
|
<reponame>kdubiel/bh-events
import { waitFor } from '@testing-library/dom';
import React from 'react';
import { render, screen } from 'test-utils';
import { API } from 'utils';
import { EventsList } from './EventsList';
jest.mock('../../../utils/api.ts');
const mockedApi = API as jest.Mocked<typeof API>;
describe('<EventsList />', () => {
afterEach(() => {
jest.clearAllMocks();
});
it('should render heading', async () => {
render(<EventsList />);
expect(screen.getByText('events:events-list')).toBeInTheDocument();
});
it('should fetch events on mount', async () => {
const spy = jest.spyOn(API, 'get');
render(<EventsList />);
await waitFor(() => {
expect(screen.getByTestId('component-loader')).toBeInTheDocument();
});
await waitFor(() => {
expect(API.get).toBeCalledWith('events');
});
spy.mockRestore();
});
it('should pass fetched events to <EventsGrid />', async () => {
mockedApi.get.mockResolvedValue({
data: [
{
_id: '12345',
title: 'TestEvent#1',
date: '1993-11-18T16:25:17.761Z',
user: {
firstName: 'Kamil',
lastName: 'Dubiel',
email: '<EMAIL>',
},
},
],
});
render(<EventsList />);
await waitFor(() => {
expect(screen.getByText('TestEvent#1')).toBeInTheDocument();
});
});
it('should pass error message to <EventsGrid />', async () => {
mockedApi.get.mockRejectedValue('Test Error');
render(<EventsList />);
await waitFor(() => {
expect(screen.getByText('Test Error')).toBeInTheDocument();
});
});
});
|
import React from 'react';
import styles from './VirtualDevice.less';
const VirtualDevice = () => (
<div className={styles['virtual-device-manage']}>
<h3 className={styles.title}>่ๆ่ฎพๅค็ฎก็</h3>
</div>
);
export default VirtualDevice;
|
import React, { useState } from 'react';
import Arrow from '@material-ui/icons/SubdirectoryArrowRightOutlined';
import { Link, Tooltip } from '@material-ui/core';
import Modal from 'src/views/manager/ManagerView';
const Manager = () => {
const [openDialog, setOpenDialog] = useState(false);
const handleDialogClose = () => {
setOpenDialog(false);
};
return (
<>
<Tooltip title="Manager">
<Link
component="button"
variant="body2"
style={{
position: 'fixed', top: 32, left: 32, textDecoration: 'none'
}}
onClick={() => {
setOpenDialog(true);
}}
>
<Arrow fontSize="medium" />
ใ
คManager
</Link>
</Tooltip>
{openDialog && <Modal open={openDialog} onClose={handleDialogClose} />}
</>
);
};
export default Manager;
|
def alphabet_positions(text: str) -> str:
alphabet = 'abcdefghijklmnopqrstuvwxyz'
positions = [str(alphabet.find(char) + 1) for char in text.lower() if char.isalpha()]
return ' '.join(positions) |
#!/usr/bin/env bash
# Build wheel packages containing both CLI product and tests. The script doesn't rely on a pre-existing virtual
# environment.
set -ev
##############################################
# clean up and dir search
mkdir -p ./artifacts
echo `git rev-parse --verify HEAD` > ./artifacts/build.sha
mkdir -p ./artifacts/build
mkdir -p ./artifacts/source
mkdir -p ./artifacts/testsrc
output_dir=$(cd artifacts/build && pwd)
sdist_dir=$(cd artifacts/source && pwd)
testsrc_dir=$(cd artifacts/testsrc && pwd)
script_dir=`cd $(dirname $BASH_SOURCE[0]); pwd`
target_profile=${AZURE_CLI_TEST_TARGET_PROFILE:-latest}
if [ "$target_profile" != "latest" ]; then
# example: hybrid-2019-03-01. Python module name can't begin with a digit.
target_profile=hybrid_${target_profile//-/_}
fi
echo Pick up profile: $target_profile
##############################################
# Define colored output func
function title {
LGREEN='\033[1;32m'
CLEAR='\033[0m'
echo -e ${LGREEN}$1${CLEAR}
}
##############################################
# Update version strings
title 'Determine version'
. $script_dir/version.sh $1
# echo -n $version > ./artifacts/version
##############################################
# build product packages
title 'Build Azure CLI and its command modules'
for setup_file in $(find src -name 'setup.py'); do
pushd $(dirname ${setup_file}) >/dev/null
echo "Building module at $(pwd) ..."
python setup.py -q bdist_wheel -d $output_dir
python setup.py -q sdist -d $sdist_dir
popd >/dev/null
done
##############################################
# copy private packages
if [ -z ./privates ]; then
cp ./privates/*.whl $output_dir
fi
##############################################
# build test packages
title 'Build Azure CLI tests package'
for test_src in $(find src/azure-cli/azure/cli/command_modules -name tests -type d); do
rel_path=${test_src##src/azure-cli/}
mkdir -p $testsrc_dir/$rel_path
cp -R $test_src/* $testsrc_dir/$rel_path
done
if [ "$target_profile" == "latest" ]; then
# don't pack core tests for profiles other than latest
for test_src in $(find src -name tests | grep -v command_modules); do
rel_path=${test_src##src/}
rel_path=(${rel_path/\// })
rel_path=${rel_path[1]}
mkdir -p $testsrc_dir/$rel_path
cp -R $test_src/* $testsrc_dir/$rel_path
done
fi
cat >$testsrc_dir/setup.py <<EOL
#!/usr/bin/env python
from setuptools import setup
VERSION = "1.0.0.$version"
CLASSIFIERS = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'License :: OSI Approved :: MIT License',
]
DEPENDENCIES = [
'azure-cli',
'azure-cli-testsdk'
]
setup(
name='azure-cli-fulltest',
version=VERSION,
description='Microsoft Azure Command-Line Tools Full Tests',
license='MIT',
author='Microsoft Corporation',
author_email='azpycli@microsoft.com',
url='https://github.com/Azure/azure-cli',
zip_safe=False,
classifiers=CLASSIFIERS,
packages=[
EOL
if [ "$target_profile" == "latest" ]; then
echo " 'azure.cli.core.tests'," >>$testsrc_dir/setup.py
fi
for name in `ls src/azure-cli/azure/cli/command_modules`; do
test_folder=src/azure-cli/azure/cli/command_modules/$name/tests
if [ -d $test_folder ]; then
echo " 'azure.cli.command_modules.$name.tests'," >>$testsrc_dir/setup.py
if [ -d $test_folder/$target_profile ]; then
echo " 'azure.cli.command_modules.$name.tests.$target_profile'," >>$testsrc_dir/setup.py
fi
fi
done
cat >>$testsrc_dir/setup.py <<EOL
],
package_data={'': ['*.bat',
'*.byok',
'*.cer',
'*.js',
'*.json',
'*.kql',
'*.md',
'*.pem',
'*.pfx',
'*.sql',
'*.txt',
'*.txt',
'*.xml',
'*.yml',
'*.zip',
'**/*.bat',
'**/*.byok',
'**/*.cer',
'**/*.ipynb',
'**/*.jar',
'**/*.js',
'**/*.json',
'**/*.kql',
'**/*.md',
'**/*.pem',
'**/*.pfx',
'**/*.sql',
'**/*.txt',
'**/*.txt',
'**/*.xml',
'data/*.whl',
'data/*.yaml',
'data/*.zip',
'recordings/*.yaml']},
install_requires=DEPENDENCIES
)
EOL
cat >>$testsrc_dir/setup.cfg <<EOL
[bdist_wheel]
universal=1
EOL
cat >>$testsrc_dir/README.txt <<EOL
Azure CLI Test Cases
EOL
pushd $testsrc_dir >/dev/null
python setup.py -q bdist_wheel -d $output_dir
python setup.py -q sdist -d $sdist_dir
popd >/dev/null
##############################################
# clear afterwards
rm -rf $testsrc_dir
git checkout src
##############################################
# summary
title 'Results'
echo $(ls $sdist_dir | wc -l) packages created.
|
import styled from "styled-components";
import type { TinyProps } from "./Tiny.types";
import type React from "react";
const TinyMarkup: React.FC<TinyProps> = (props) => {
return <small className={`${props.className} tiny`}>{props.children}</small>;
};
const Tiny: React.FC<TinyProps> = styled(TinyMarkup)`
a {
color: var(--blue-500);
font-weight: var(--font-weight-bold);
}
a:hover {
color: var(--blue-900);
}
`;
export default Tiny;
|
#include <vector>
#include <utility>
struct DataType {
int value;
// Constructor and other methods are omitted for brevity
};
class Scheduler {
public:
Scheduler(const std::vector<std::pair<DataType, DataType>>& domains, const Product* product) : domains_(domains), product_(product) {}
const std::vector<std::pair<DataType, DataType>>& getDomains() const {
return domains_;
}
// Other methods are omitted for brevity
private:
std::vector<std::pair<DataType, DataType>> domains_;
const Product* product_;
};
class Product {
public:
virtual bool propagate(Scheduler& scheduler) = 0;
// Other methods are omitted for brevity
};
class ConcreteProduct : public Product {
public:
bool propagate(Scheduler& scheduler) override {
// Implement constraint propagation algorithm here
// Update the domains in the scheduler based on the constraints
// Return true if propagation is successful, false otherwise
// Example implementation:
scheduler.getDomains()[0].first.value = 2;
scheduler.getDomains()[0].second.value = 3;
scheduler.getDomains()[1].first.value = 4;
scheduler.getDomains()[1].second.value = 6;
return true;
}
// Other methods are omitted for brevity
};
int main() {
std::vector<std::pair<DataType, DataType>> domains;
domains.push_back({DataType{10}, DataType{12}}); // c
Scheduler scheduler(domains, new ConcreteProduct());
bool success = scheduler.getDomains()[0].first.value == 2 &&
scheduler.getDomains()[0].second.value == 3 &&
scheduler.getDomains()[1].first.value == 4 &&
scheduler.getDomains()[1].second.value == 6;
// Perform the necessary checks using the testing framework
// Example: BOOST_CHECK(success);
return 0;
} |
<reponame>michaldivis/MaterialDesignExtensions
function AppViewModel(contentDivId, drawer) {
let self = this;
self.contentDivId = contentDivId;
self.drawer = drawer;
self.selectedNavigationItem = ko.observable();
self.navigationItems = [
new NavigationItem('home', 'Home', 'home', 'snippets/home.html'),
new NavigationItem('releasenotes', 'Release notes', 'subject', 'snippets/releasenotes.html'),
new NavigationItem('documentation', 'Documentation', 'help', 'snippets/documentation.html'),
new NavigationItem('license', 'License', 'receipt', 'snippets/license.html')
];
self.documentationItems = [
new DocumentationItem('appbar', 'App bar', 'https://raw.githubusercontent.com/spiegelp/MaterialDesignExtensions/master/screenshots/MaterialWindow.png', 'snippets/documentation/appbar.html'),
new DocumentationItem('autocomplete', 'Autocomplete', 'https://raw.githubusercontent.com/spiegelp/MaterialDesignExtensions/master/screenshots/Autocomplete.png', 'snippets/documentation/autocomplete.html'),
new DocumentationItem('busyoverlay', 'Busy overlay', 'https://raw.githubusercontent.com/spiegelp/MaterialDesignExtensions/master/screenshots/BusyOverlay.png', 'snippets/documentation/busyoverlay.html'),
new DocumentationItem('filesystemcontrols', 'File system controls', 'https://raw.githubusercontent.com/spiegelp/MaterialDesignExtensions/master/screenshots/OpenFileControl1.png', 'snippets/documentation/filesystemcontrols.html'),
new DocumentationItem('gridlist', 'Grid list', 'https://raw.githubusercontent.com/spiegelp/MaterialDesignExtensions/master/screenshots/GridList.png', 'snippets/documentation/gridlist.html'),
new DocumentationItem('materialwindow', 'Material window', 'https://raw.githubusercontent.com/spiegelp/MaterialDesignExtensions/master/screenshots/MaterialWindow.png', 'snippets/documentation/materialwindow.html'),
new DocumentationItem('navigation', 'Navigation', 'https://raw.githubusercontent.com/spiegelp/MaterialDesignExtensions/master/screenshots/SideNavigation.png', 'snippets/documentation/navigation.html'),
new DocumentationItem('oversizednumberspinner', 'Oversized number spinner', 'https://raw.githubusercontent.com/spiegelp/MaterialDesignExtensions/master/screenshots/OversizedNumberSpinner.png', 'snippets/documentation/oversizednumberspinner.html'),
new DocumentationItem('search', 'Search', 'https://raw.githubusercontent.com/spiegelp/MaterialDesignExtensions/master/screenshots/PersistentSearch.png', 'snippets/documentation/search.html'),
new DocumentationItem('stepper', 'Stepper', 'https://raw.githubusercontent.com/spiegelp/MaterialDesignExtensions/master/screenshots/HorizontalStepper.png', 'snippets/documentation/stepper.html'),
new DocumentationItem('tabs', 'Tabs', 'https://raw.githubusercontent.com/spiegelp/MaterialDesignExtensions/master/screenshots/TabControl1.png', 'snippets/documentation/tabs.html'),
new DocumentationItem('textboxsuggestions', 'Text box suggestions', 'https://raw.githubusercontent.com/spiegelp/MaterialDesignExtensions/master/screenshots/TextBoxSuggestions.png', 'snippets/documentation/textboxsuggestions.html')
];
self.goToNavigationItem = function (navigationItem) {
self.goToNavigationItemId(navigationItem.id);
};
self.goToNavigationItemId = function (navigationItemId) {
location.hash = navigationItemId;
};
self.goToDocumentationItem = function (documentationItem) {
self.goToDocumentationItemId(documentationItem.id);
};
self.goToDocumentationItemId = function (documentationItemId) {
location.hash = 'documentation/' + documentationItemId;
};
self.setHtmlForNavigationItem = function (navigationItem) {
self.setHtmlForUrl(navigationItem.contentUrl);
};
self.setHtmlForUrl = function (url) {
$('#' + self.contentDivId).load(url, null, function () { self.drawer.open = false; window.scrollTo(0, 0); });
};
self.prepareCodeSnippets = function () {
/*let codeElements = $("code[class='language-markup']");
for (let i = 0; i < codeElements.length; i++) {
codeElements[i].innerHTML = codeElements[i].innerHTML.replace('<', '<');
}*/
Prism.highlightAll();
};
Sammy(function () {
this.get('#:navigationItemId', function () {
let navigationItemId = this.params.navigationItemId;
for (let i = 0; i < self.navigationItems.length; i++) {
if (navigationItemId === self.navigationItems[i].id) {
self.selectedNavigationItem = self.navigationItems[i];
self.navigationItems[i].isSelected(true);
self.setHtmlForNavigationItem(self.navigationItems[i]);
} else {
self.navigationItems[i].isSelected(false);
}
}
});
this.get('#:navigationItemId/:documentationItemId', function () {
let navigationItemId = this.params.navigationItemId;
if (navigationItemId === 'documentation') {
let documentationItemId = this.params.documentationItemId;
for (let i = 0; i < self.documentationItems.length; i++) {
if (self.documentationItems[i].id === documentationItemId) {
self.setHtmlForUrl(self.documentationItems[i].contentUrl);
break;
}
}
}
});
this.get('', function () { this.app.runRoute('get', '#home'); });
}).run();
}
|
exports = async function setShareLocation(groupId, shareLocation) {
if (!groupId)
return { error: { message: 'Please provide which group to change.' }};
if (typeof shareLocation !== 'boolean')
return { error: { message: 'Please provide whether or not to share location.' }};
const db = context.services.get('mongodb-atlas').db('findourdevices');
const realmUser = context.user;
const userId = BSON.ObjectId(realmUser.id);
groupId = BSON.ObjectId(groupId);
try {
// We can use MongoDB's "arrayFilters" to the element that match the condition.
// db.collection.updateOne(
// { <query conditions> },
// { <update operator>: { "<array>.$[<identifier>]" : value } },
// { arrayFilters: [ { <identifier>: <condition> } ] }
// )
await db.collection('User').updateOne(
{ _id: userId },
{ $set: { 'groups.$[group].shareLocation': shareLocation } },
{ arrayFilters: [ { 'group.groupId': groupId } ] }
);
return { success: true };
}
catch (err) {
console.error('Error setting shareLocation: ', err.message);
return { error: { message: err.message || 'There was an error changing location sharing setting.' } };
}
};
|
<filename>src/Graphics/ViewableEllipsoid.cpp
/*
* Gray: A Ray Tracing-based Monte Carlo Simulator for PET
*
* Copyright (c) 2018, <NAME>, <NAME>, <NAME>, <NAME>
*
* This software is distributed under the terms of the MIT License unless
* otherwise noted. See LICENSE for further details.
*
*/
#include <math.h>
#include "Gray/Graphics/ViewableEllipsoid.h"
#include "Gray/Graphics/ViewableSphere.h"
#include "Gray/VrMath/PolynomialRC.h"
// Returns an intersection if found with distance maxDistance
// viewDir must be a unit vector.
// intersectDistance and visPoint are returned values.
bool ViewableEllipsoid::FindIntersectionNT (
const VectorR3& viewPos, const VectorR3& viewDir, double maxDistance,
double *intersectDistance, VisiblePoint& returnedPoint ) const
{
VectorR3 v = viewPos;
v -= Center;
double pdotuA = v^AxisA;
double pdotuB = v^AxisB;
double pdotuC = v^AxisC;
double udotuA = viewDir^AxisA;
double udotuB = viewDir^AxisB;
double udotuC = viewDir^AxisC;
double C = Square(pdotuA) + Square(pdotuB) + Square(pdotuC) - 1.0;
double B = ( pdotuA*udotuA + pdotuB*udotuB + pdotuC*udotuC );
if ( C>0.0 && B>=0.0 ) {
return false; // Pointing away from the ellipsoid
}
B += B; // Double B to get final factor of 2.
double A = Square(udotuA) + Square(udotuB) + Square(udotuC);
double alpha1, alpha2;
int numRoots = QuadraticSolveRealSafe( A, B, C, &alpha1, &alpha2 );
if ( numRoots==0 ) {
return false;
}
if ( alpha1>0.0 ) {
if ( alpha1>=maxDistance ) {
return false; // Too far away
}
// Found an intersection from outside.
returnedPoint.SetFrontFace();
returnedPoint.SetMaterial(GetMaterialInner());
*intersectDistance = alpha1;
} else if ( numRoots==2 && alpha2>0.0 && alpha2<maxDistance ) {
// Found an intersection from inside.
returnedPoint.SetBackFace();
returnedPoint.SetMaterial(GetMaterialOuter());
*intersectDistance = alpha2;
} else {
return false; // Both intersections behind us (should never get here)
}
// Calculate intersection position
v=viewDir;
v *= (*intersectDistance);
v += viewPos;
returnedPoint.SetPosition( v ); // Intersection Position
v -= Center; // Now v is the relative position
double vdotuA = v^AxisA;
double vdotuB = v^AxisB;
double vdotuC = v^AxisC;
v = vdotuA*AxisA + vdotuB*AxisB + vdotuC*AxisC;
v.Normalize();
return true;
}
void ViewableEllipsoid::CalcBoundingPlanes( const VectorR3& u, double *minDot, double *maxDot ) const
{
double centerDot = (u^Center);
double deltaDot = sqrt(Square(RadiusA*RadiusA*(u^AxisA))
+Square(RadiusB*RadiusB*(u^AxisB))
+Square(RadiusC*RadiusC*(u^AxisC)));
*maxDot = centerDot + deltaDot;
*minDot = centerDot - deltaDot;
}
|
import matplotlib.pyplot as plt
import numpy as np
# Generate mock data for loss and accuracy metrics over multiple epochs
epochs = 10
loss = np.random.rand(epochs)
accuracy = np.random.rand(epochs)
# Create a plot using matplotlib to visualize the loss and accuracy metrics
plt.plot(range(1, epochs + 1), loss, label='Loss')
plt.plot(range(1, epochs + 1), accuracy, label='Accuracy')
# Set labels and title for the plot
plt.xlabel("Epoch #")
plt.ylabel("Loss/Accuracy")
plt.title("Neural Network Training Metrics")
# Include a legend in the plot to distinguish between the loss and accuracy curves
plt.legend()
# Save the plot to a specified output file
output_file = "training_metrics_plot.png"
plt.savefig(output_file)
# Display the plot (optional)
plt.show() |
<gh_stars>0
import { css, CSSResultGroup, html, PropertyValues, TemplateResult } from "lit";
import { customElement, property, state } from "lit/decorators";
import memoizeOne from "memoize-one";
import { mainWindow } from "../../../homeassistant-frontend/src/common/dom/get_main_window";
import { computeRTL } from "../../../homeassistant-frontend/src/common/util/compute_rtl";
import "../../../homeassistant-frontend/src/components/ha-alert";
import "../../../homeassistant-frontend/src/components/ha-circular-progress";
import "../../../homeassistant-frontend/src/components/ha-form/ha-form";
import { HaFormSchema } from "../../../homeassistant-frontend/src/components/ha-form/types";
import { showConfirmationDialog } from "../../../homeassistant-frontend/src/dialogs/generic/show-dialog-box";
import { Repository } from "../../data/common";
import {
getRepositories,
repositoryInstall,
repositoryInstallVersion,
repositorySetVersion,
repositoryToggleBeta,
repositoryUpdate,
} from "../../data/websocket";
import { HacsStyles } from "../../styles/hacs-common-style";
import { generateLovelaceURL } from "../../tools/added-to-lovelace";
import { updateLovelaceResources } from "../../tools/update-lovelace-resources";
import "../hacs-link";
import "./hacs-dialog";
import { HacsDialogBase } from "./hacs-dialog-base";
@customElement("hacs-download-dialog")
export class HacsDonwloadDialog extends HacsDialogBase {
@property() public repository?: string;
@state() private _toggle = true;
@state() private _installing = false;
@state() private _error?: any;
@state() public _repository?: Repository;
@state() private _downloadRepositoryData = { beta: false, version: "" };
shouldUpdate(changedProperties: PropertyValues) {
changedProperties.forEach((_oldValue, propName) => {
if (propName === "hass") {
this.sidebarDocked = window.localStorage.getItem("dockedSidebar") === '"docked"';
}
if (propName === "repositories") {
this._repository = this._getRepository(this.hacs.repositories, this.repository!);
}
});
return (
changedProperties.has("sidebarDocked") ||
changedProperties.has("narrow") ||
changedProperties.has("active") ||
changedProperties.has("_toggle") ||
changedProperties.has("_error") ||
changedProperties.has("_repository") ||
changedProperties.has("_downloadRepositoryData") ||
changedProperties.has("_installing")
);
}
private _getRepository = memoizeOne((repositories: Repository[], repository: string) =>
repositories?.find((repo) => repo.id === repository)
);
private _getInstallPath = memoizeOne((repository: Repository) => {
let path: string = repository.local_path;
if (repository.category === "theme") {
path = `${path}/${repository.file_name}`;
}
return path;
});
protected async firstUpdated() {
this._repository = this._getRepository(this.hacs.repositories, this.repository!);
if (!this._repository?.updated_info) {
await repositoryUpdate(this.hass, this._repository!.id);
const repositories = await getRepositories(this.hass);
this.dispatchEvent(
new CustomEvent("update-hacs", {
detail: { repositories },
bubbles: true,
composed: true,
})
);
this._repository = this._getRepository(repositories, this.repository!);
}
this._toggle = false;
this.hass.connection.subscribeEvents((msg) => (this._error = (msg as any).data), "hacs/error");
this._downloadRepositoryData.beta = this._repository!.beta;
this._downloadRepositoryData.version =
this._repository?.version_or_commit === "version" ? this._repository.releases[0] : "";
}
protected render(): TemplateResult | void {
if (!this.active || !this._repository) return html``;
const installPath = this._getInstallPath(this._repository);
const donwloadRepositorySchema: HaFormSchema[] = [
{
type: "boolean",
name: "beta",
},
{
type: "select",
name: "version",
optional: true,
//@ts-ignore
options:
this._repository.version_or_commit === "version"
? this._repository.releases
.map((version) => [version, version])
.concat(
this._repository.full_name === "hacs/integration" ||
this._repository.hide_default_branch
? []
: [[this._repository.default_branch, this._repository.default_branch]]
)
: [],
},
];
return html`
<hacs-dialog
.active=${this.active}
.narrow=${this.narrow}
.hass=${this.hass}
.secondary=${this.secondary}
.title=${this._repository.name}
>
<div class="content">
${this._repository.version_or_commit === "version"
? html`
<ha-form
.disabled=${this._toggle}
?narrow=${this.narrow}
.data=${this._downloadRepositoryData}
.schema=${donwloadRepositorySchema}
.computeLabel=${(schema: HaFormSchema) =>
schema.name === "beta"
? this.hacs.localize("dialog_download.show_beta")
: this.hacs.localize("dialog_download.select_version")}
@value-changed=${this._valueChanged}
>
</ha-form>
`
: ""}
${!this._repository.can_install
? html`<ha-alert alert-type="error" .rtl=${computeRTL(this.hass)}>
${this.hacs.localize("confirm.home_assistant_version_not_correct", {
haversion: this.hass.config.version,
minversion: this._repository.homeassistant,
})}
</ha-alert>`
: ""}
<div class="note">
${this.hacs.localize("dialog_download.note_downloaded", {
location: html`<code>'${installPath}'</code>`,
})}
${this._repository.category === "plugin" && this.hacs.status.lovelace_mode !== "storage"
? html`
<p>${this.hacs.localize(`dialog_download.lovelace_instruction`)}</p>
<pre>
url: ${generateLovelaceURL({ repository: this._repository, skipTag: true })}
type: module
</pre
>
`
: ""}
${this._repository.category === "integration"
? html`<p>${this.hacs.localize("dialog_download.restart")}</p>`
: ""}
</div>
${this._error?.message
? html`<ha-alert alert-type="error" .rtl=${computeRTL(this.hass)}>
${this._error.message}
</ha-alert>`
: ""}
</div>
<mwc-button
raised
slot="primaryaction"
?disabled=${!this._repository.can_install ||
this._toggle ||
this._repository.version_or_commit === "version"
? !this._downloadRepositoryData.version
: false}
@click=${this._installRepository}
>
${this._installing
? html`<ha-circular-progress active size="small"></ha-circular-progress>`
: this.hacs.localize("common.download")}
</mwc-button>
<hacs-link slot="secondaryaction" .url="https://github.com/${this._repository.full_name}">
<mwc-button> ${this.hacs.localize("common.repository")} </mwc-button>
</hacs-link>
</hacs-dialog>
`;
}
private async _valueChanged(ev) {
let updateNeeded = false;
if (this._downloadRepositoryData.beta !== ev.detail.value.beta) {
updateNeeded = true;
this._toggle = true;
await repositoryToggleBeta(this.hass, this.repository!);
}
if (ev.detail.value.version) {
updateNeeded = true;
this._toggle = true;
await repositorySetVersion(this.hass, this.repository!, ev.detail.value.version);
}
if (updateNeeded) {
const repositories = await getRepositories(this.hass);
this.dispatchEvent(
new CustomEvent("update-hacs", {
detail: { repositories },
bubbles: true,
composed: true,
})
);
this._repository = this._getRepository(repositories, this.repository!);
this._toggle = false;
}
this._downloadRepositoryData = ev.detail.value;
}
private async _installRepository(): Promise<void> {
this._installing = true;
if (!this._repository) {
return;
}
const selectedVersion =
this._downloadRepositoryData.version ||
this._repository.available_version ||
this._repository.default_branch;
if (this._repository?.version_or_commit !== "commit") {
await repositoryInstallVersion(this.hass, this._repository.id, selectedVersion);
} else {
await repositoryInstall(this.hass, this._repository.id);
}
this.hacs.log.debug(this._repository.category, "_installRepository");
this.hacs.log.debug(this.hacs.status.lovelace_mode, "_installRepository");
if (this._repository.category === "plugin" && this.hacs.status.lovelace_mode === "storage") {
await updateLovelaceResources(this.hass, this._repository, selectedVersion);
}
this._installing = false;
this.dispatchEvent(
new Event("hacs-secondary-dialog-closed", {
bubbles: true,
composed: true,
})
);
this.dispatchEvent(
new Event("hacs-dialog-closed", {
bubbles: true,
composed: true,
})
);
if (this._repository.category === "plugin" && this.hacs.status.lovelace_mode === "storage") {
showConfirmationDialog(this, {
title: this.hacs.localize!("common.reload"),
text: html`${this.hacs.localize!("dialog.reload.description")}</br>${this.hacs.localize!(
"dialog.reload.confirm"
)}`,
dismissText: this.hacs.localize!("common.cancel"),
confirmText: this.hacs.localize!("common.reload"),
confirm: () => {
// eslint-disable-next-line
mainWindow.location.href = mainWindow.location.href;
},
});
}
}
static get styles(): CSSResultGroup {
return [
HacsStyles,
css`
.note {
margin-top: 12px;
}
.lovelace {
margin-top: 8px;
}
pre {
white-space: pre-line;
user-select: all;
}
`,
];
}
}
|
<form action="/search" method="GET">
<input type="text" name="q" placeholder="Search by keyword or tag..."/>
<input type="submit" value="Search"/>
</form> |
#!/bin/bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
go run "${DIR}/main.go" "${DIR}/gen-metadata-defs" "${1}" "${DIR}/metadata.yaml" "${DIR}/../../${2}"
goimports -w -local istio.io "${DIR}/../../${2}"
|
<gh_stars>1-10
const { Command, MayfiEmbed, Constants } = require('../../')
let moment = require("moment")
module.exports = class Guildinfo extends Command {
constructor (client) {
super({
name: 'guildinfo',
aliases: ['si', 'serverinfo'],
category: 'utility',
parameters: [{
type: 'guild', full: true, required: false
}]
}, client)
}
async run ({ channel, t, language, author }, guild = channel.guild) {
moment.locale(language)
let embed = new MayfiEmbed(author)
.setAuthor(guild.name, guild.iconURL)
.addField("ID", guild.id, true)
.addField(t("commands:guildinfo.owner"), `${guild.owner.user.tag ? guild.owner.user.tag : t("commands:guildinfo.invalidOwner")}`, true)
.addField(t("commands:guildinfo.region"), t(`regions:${guild.region}`))
.addField(t("commands:guildinfo.channels"), guild.channels.size)
.addField(t("commands:guildinfo.roles"), guild.roles.size)
.addField(t("commands:guildinfo.joinedAt"), `${moment(guild.joinedTimestamp).format('LLL')}\n(${moment(guild.joinedTimestamp).fromNow()})`, true)
.addField(t("commands:guildinfo.createdAt"), `${moment(guild.createdAt).format('LLL')}\n(${moment(guild.createdAt).fromNow()})`, true)
.addField(t('commands:guildinfo.members', { count: guild.members.size }), [
`${Constants.streaming} ${t('commands:guildinfo.streaming', { count: guild.members.filter(m => m.game === 'streaming').size })}`,
`${Constants.online} Online: **${guild.members.filter(m => m.presence.status === 'online').size}**`,
`${Constants.idle} ${t('commands:guildinfo.idle', { count: guild.members.filter(m => m.presence.status === 'idle').size })}`,
`${Constants.dnd} ${t('commands:guildinfo.dnd', { count: guild.members.filter(m => m.presence.status === 'dnd').size })}`,
`${Constants.offline} Offline: **${guild.members.filter(m => m.presence.status === 'offline').size}**\n`,
t('commands:guildinfo.users', { count: guild.members.filter(m => !m.user.bot).size }),
t('commands:guildinfo.bots', { count: guild.members.filter(m => m.user.bot).size })
].join('\n'))
.setThumbnail(guild.iconURL)
channel.send({embed})
}
}
|
# models.py
from django.contrib.auth.models import User
from django.db import models
class EmailTemplate(models.Model):
name = models.CharField(max_length=100)
subject = models.CharField(max_length=200)
body = models.TextField()
def __str__(self):
return self.name
class SentEmail(models.Model):
template = models.ForeignKey(EmailTemplate, on_delete=models.CASCADE)
users = models.ManyToManyField(User)
created_at = models.DateTimeField(auto_now_add=True)
status = models.CharField(max_length=20, choices=[('pending', 'Pending'), ('sent', 'Sent'), ('failed', 'Failed')])
def total_emails(self):
return self.users.count()
total_emails.short_description = 'Total Emails'
def __str__(self):
return f"{self.template.name} - {self.created_at}"
# admin.py
from django.contrib import admin
from .models import EmailTemplate, SentEmail
@admin.register(EmailTemplate)
class EmailTemplateAdmin(admin.ModelAdmin):
list_display = ('name', 'subject')
@admin.register(SentEmail)
class SentEmailAdmin(admin.ModelAdmin):
filter_horizontal = ('users',)
list_display = ('subject', 'template', 'created_at', 'total_emails', 'status') |
<filename>src/utils/xrLC_Light/net_execution_factory_register.cpp
#include "stdafx.h"
#include "net_execution_factory.h"
#include "net_execution_lightmaps.h"
namespace lc_net{
template < execution_types etype > class tnet_execution :
public tnet_execution_base< etype >
{
public:
tnet_execution ( u32 id ): tnet_execution_base< etype >(id){}
private:
net_execution_impl execution_impl;
virtual net_execution_impl &implementation ( )
{
return execution_impl;
};
virtual void send_task ( IGridUser& user, IGenericStream* outStream, u32 id )
{
tnet_execution_base< etype >::send_task( user, outStream, id );
execution_impl.send_task( user, outStream, id );
};
virtual LPCSTR data_files ()
{
return execution_impl.data_files();
}
virtual void receive_result ( IGenericStream* outStream )
{
execution_impl.receive_result(outStream);
};
virtual bool receive_task ( IAgent* agent, DWORD sessionId, IGenericStream* inStream )
{
return execution_impl.receive_task( agent, sessionId, inStream );
};
virtual void send_result ( IGenericStream* outStream )
{
execution_impl.send_result( outStream );
};
virtual bool execute ()
{
return execution_impl.execute ();
};
};
//template < execution_types etype >
//tnet_execution_base< etype > *factory::create( const execution_types etype )
//{
// return xr_new< tnet_execution< etype > > ();
//}
template<typename execution >
class execution_type_creator:
public base_execution_type_creator
{
//static const u32 class_type = execution::class_type;
virtual net_execution* create( u32 _net_id )
{
return xr_new<execution>(_net_id);
}
virtual u32 type() { return execution::class_type; }
};
template<typename execution>
static void register_type()
{
execution_factory.register_type( xr_new< execution_type_creator<execution> >() );
}
template < execution_types i >
struct it
{
static const execution_types et = (execution_types)(i);
static const execution_types next_et = (execution_types)(i+1);
typedef it<next_et> next;
next ni;
it(){ register_type< tnet_execution< et > >(); }
} ;
template<> struct it<et_last>
{};
void factory::register_all( )
{
vec_types.resize( et_last, 0 );
it< et_lightmaps > i;
//static const execution_types et = it<et_lightmaps>::et ;
//lc_net::register_type< tnet_execution< et > >( );
}
}; |
#! /bin/sh
## DO NOT EDIT - This file generated from ./build-aux/ltmain.in
## by inline-source v2014-01-03.01
# libtool (GNU libtool) 2.4.2.444.28-053d
# Provide generalized library-building support services.
# Written by Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996
# Copyright (C) 1996-2014 Free Software Foundation, Inc.
# This is free software; see the source for copying conditions. There is NO
# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# GNU Libtool is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# As a special exception to the GNU General Public License,
# if you distribute this file as part of a program or library that
# is built using GNU Libtool, you may include this file under the
# same distribution terms that you use for the rest of that program.
#
# GNU Libtool is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
PROGRAM=libtool
PACKAGE=libtool
VERSION=2.4.2.444.28-053d
package_revision=2.4.2.444.28
## ------ ##
## Usage. ##
## ------ ##
# Run './libtool --help' for help with using this script from the
# command line.
## ------------------------------- ##
## User overridable command paths. ##
## ------------------------------- ##
# After configure completes, it has a better idea of some of the
# shell tools we need than the defaults used by the functions shared
# with bootstrap, so set those here where they can still be over-
# ridden by the user, but otherwise take precedence.
: ${AUTOCONF="autoconf"}
: ${AUTOMAKE="automake"}
## -------------------------- ##
## Source external libraries. ##
## -------------------------- ##
# Much of our low-level functionality needs to be sourced from external
# libraries, which are installed to $pkgauxdir.
# Set a version string for this script.
scriptversion=2014-02-10.13; # UTC
# General shell script boiler plate, and helper functions.
# Written by Gary V. Vaughan, 2004
# Copyright (C) 2004-2014 Free Software Foundation, Inc.
# This is free software; see the source for copying conditions. There is NO
# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
# As a special exception to the GNU General Public License, if you distribute
# this file as part of a program or library that is built using GNU Libtool,
# you may include this file under the same distribution terms that you use
# for the rest of that program.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNES FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Please report bugs or propose patches to gary@gnu.org.
## ------ ##
## Usage. ##
## ------ ##
# Evaluate this file near the top of your script to gain access to
# the functions and variables defined here:
#
# . `echo "$0" | ${SED-sed} 's|[^/]*$||'`/build-aux/funclib.sh
#
# If you need to override any of the default environment variable
# settings, do that before evaluating this file.
## -------------------- ##
## Shell normalisation. ##
## -------------------- ##
# Some shells need a little help to be as Bourne compatible as possible.
# Before doing anything else, make sure all that help has been provided!
DUALCASE=1; export DUALCASE # for MKS sh
if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then :
emulate sh
NULLCMD=:
# Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
# is contrary to our usage. Disable this feature.
alias -g '${1+"$@"}'='"$@"'
setopt NO_GLOB_SUBST
else
case `(set -o) 2>/dev/null` in *posix*) set -o posix ;; esac
fi
# NLS nuisances: We save the old values in case they are required later.
_G_user_locale=
_G_safe_locale=
for _G_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES
do
eval "if test set = \"\${$_G_var+set}\"; then
save_$_G_var=\$$_G_var
$_G_var=C
export $_G_var
_G_user_locale=\"$_G_var=\\\$save_\$_G_var; \$_G_user_locale\"
_G_safe_locale=\"$_G_var=C; \$_G_safe_locale\"
fi"
done
# CDPATH.
(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
# Make sure IFS has a sensible default
sp=' '
nl='
'
IFS="$sp $nl"
# There are apparently some retarded systems that use ';' as a PATH separator!
if test "${PATH_SEPARATOR+set}" != set; then
PATH_SEPARATOR=:
(PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
(PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
PATH_SEPARATOR=';'
}
fi
## ------------------------- ##
## Locate command utilities. ##
## ------------------------- ##
# func_executable_p FILE
# ----------------------
# Check that FILE is an executable regular file.
func_executable_p ()
{
test -f "$1" && test -x "$1"
}
# func_path_progs PROGS_LIST CHECK_FUNC [PATH]
# --------------------------------------------
# Search for either a program that responds to --version with output
# containing "GNU", or else returned by CHECK_FUNC otherwise, by
# trying all the directories in PATH with each of the elements of
# PROGS_LIST.
#
# CHECK_FUNC should accept the path to a candidate program, and
# set $func_check_prog_result if it truncates its output less than
# $_G_path_prog_max characters.
func_path_progs ()
{
_G_progs_list=$1
_G_check_func=$2
_G_PATH=${3-"$PATH"}
_G_path_prog_max=0
_G_path_prog_found=false
_G_save_IFS=$IFS; IFS=$PATH_SEPARATOR
for _G_dir in $_G_PATH; do
IFS=$_G_save_IFS
test -z "$_G_dir" && _G_dir=.
for _G_prog_name in $_G_progs_list; do
for _exeext in '' .EXE; do
_G_path_prog=$_G_dir/$_G_prog_name$_exeext
func_executable_p "$_G_path_prog" || continue
case `"$_G_path_prog" --version 2>&1` in
*GNU*) func_path_progs_result=$_G_path_prog _G_path_prog_found=: ;;
*) $_G_check_func $_G_path_prog
func_path_progs_result=$func_check_prog_result
;;
esac
$_G_path_prog_found && break 3
done
done
done
IFS=$_G_save_IFS
test -z "$func_path_progs_result" && {
echo "no acceptable sed could be found in \$PATH" >&2
exit 1
}
}
# We want to be able to use the functions in this file before configure
# has figured out where the best binaries are kept, which means we have
# to search for them ourselves - except when the results are already set
# where we skip the searches.
# Unless the user overrides by setting SED, search the path for either GNU
# sed, or the sed that truncates its output the least.
test -z "$SED" && {
_G_sed_script=s/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb/
for _G_i in 1 2 3 4 5 6 7; do
_G_sed_script=$_G_sed_script$nl$_G_sed_script
done
echo "$_G_sed_script" 2>/dev/null | sed 99q >conftest.sed
_G_sed_script=
func_check_prog_sed ()
{
_G_path_prog=$1
_G_count=0
printf 0123456789 >conftest.in
while :
do
cat conftest.in conftest.in >conftest.tmp
mv conftest.tmp conftest.in
cp conftest.in conftest.nl
echo '' >> conftest.nl
"$_G_path_prog" -f conftest.sed <conftest.nl >conftest.out 2>/dev/null || break
diff conftest.out conftest.nl >/dev/null 2>&1 || break
_G_count=`expr $_G_count + 1`
if test "$_G_count" -gt "$_G_path_prog_max"; then
# Best one so far, save it but keep looking for a better one
func_check_prog_result=$_G_path_prog
_G_path_prog_max=$_G_count
fi
# 10*(2^10) chars as input seems more than enough
test 10 -lt "$_G_count" && break
done
rm -f conftest.in conftest.tmp conftest.nl conftest.out
}
func_path_progs "sed gsed" func_check_prog_sed $PATH:/usr/xpg4/bin
rm -f conftest.sed
SED=$func_path_progs_result
}
# Unless the user overrides by setting GREP, search the path for either GNU
# grep, or the grep that truncates its output the least.
test -z "$GREP" && {
func_check_prog_grep ()
{
_G_path_prog=$1
_G_count=0
_G_path_prog_max=0
printf 0123456789 >conftest.in
while :
do
cat conftest.in conftest.in >conftest.tmp
mv conftest.tmp conftest.in
cp conftest.in conftest.nl
echo 'GREP' >> conftest.nl
"$_G_path_prog" -e 'GREP$' -e '-(cannot match)-' <conftest.nl >conftest.out 2>/dev/null || break
diff conftest.out conftest.nl >/dev/null 2>&1 || break
_G_count=`expr $_G_count + 1`
if test "$_G_count" -gt "$_G_path_prog_max"; then
# Best one so far, save it but keep looking for a better one
func_check_prog_result=$_G_path_prog
_G_path_prog_max=$_G_count
fi
# 10*(2^10) chars as input seems more than enough
test 10 -lt "$_G_count" && break
done
rm -f conftest.in conftest.tmp conftest.nl conftest.out
}
func_path_progs "grep ggrep" func_check_prog_grep $PATH:/usr/xpg4/bin
GREP=$func_path_progs_result
}
## ------------------------------- ##
## User overridable command paths. ##
## ------------------------------- ##
# All uppercase variable names are used for environment variables. These
# variables can be overridden by the user before calling a script that
# uses them if a suitable command of that name is not already available
# in the command search PATH.
: ${CP="cp -f"}
: ${ECHO="printf %s\n"}
: ${EGREP="$GREP -E"}
: ${FGREP="$GREP -F"}
: ${LN_S="ln -s"}
: ${MAKE="make"}
: ${MKDIR="mkdir"}
: ${MV="mv -f"}
: ${RM="rm -f"}
: ${SHELL="${CONFIG_SHELL-/bin/sh}"}
## -------------------- ##
## Useful sed snippets. ##
## -------------------- ##
sed_dirname='s|/[^/]*$||'
sed_basename='s|^.*/||'
# Sed substitution that helps us do robust quoting. It backslashifies
# metacharacters that are still active within double-quoted strings.
sed_quote_subst='s|\([`"$\\]\)|\\\1|g'
# Same as above, but do not quote variable references.
sed_double_quote_subst='s/\(["`\\]\)/\\\1/g'
# Sed substitution that turns a string into a regex matching for the
# string literally.
sed_make_literal_regex='s|[].[^$\\*\/]|\\&|g'
# Sed substitution that converts a w32 file name or path
# that contains forward slashes, into one that contains
# (escaped) backslashes. A very naive implementation.
sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g'
# Re-'\' parameter expansions in output of sed_double_quote_subst that
# were '\'-ed in input to the same. If an odd number of '\' preceded a
# '$' in input to sed_double_quote_subst, that '$' was protected from
# expansion. Since each input '\' is now two '\'s, look for any number
# of runs of four '\'s followed by two '\'s and then a '$'. '\' that '$'.
_G_bs='\\'
_G_bs2='\\\\'
_G_bs4='\\\\\\\\'
_G_dollar='\$'
sed_double_backslash="\
s/$_G_bs4/&\\
/g
s/^$_G_bs2$_G_dollar/$_G_bs&/
s/\\([^$_G_bs]\\)$_G_bs2$_G_dollar/\\1$_G_bs2$_G_bs$_G_dollar/g
s/\n//g"
## ----------------- ##
## Global variables. ##
## ----------------- ##
# Except for the global variables explicitly listed below, the following
# functions in the '^func_' namespace, and the '^require_' namespace
# variables initialised in the 'Resource management' section, sourcing
# this file will not pollute your global namespace with anything
# else. There's no portable way to scope variables in Bourne shell
# though, so actually running these functions will sometimes place
# results into a variable named after the function, and often use
# temporary variables in the '^_G_' namespace. If you are careful to
# avoid using those namespaces casually in your sourcing script, things
# should continue to work as you expect. And, of course, you can freely
# overwrite any of the functions or variables defined here before
# calling anything to customize them.
EXIT_SUCCESS=0
EXIT_FAILURE=1
EXIT_MISMATCH=63 # $? = 63 is used to indicate version mismatch to missing.
EXIT_SKIP=77 # $? = 77 is used to indicate a skipped test to automake.
# Allow overriding, eg assuming that you follow the convention of
# putting '$debug_cmd' at the start of all your functions, you can get
# bash to show function call trace with:
#
# debug_cmd='eval echo "${FUNCNAME[0]} $*" >&2' bash your-script-name
debug_cmd=${debug_cmd-":"}
exit_cmd=:
# By convention, finish your script with:
#
# exit $exit_status
#
# so that you can set exit_status to non-zero if you want to indicate
# something went wrong during execution without actually bailing out at
# the point of failure.
exit_status=$EXIT_SUCCESS
# Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh
# is ksh but when the shell is invoked as "sh" and the current value of
# the _XPG environment variable is not equal to 1 (one), the special
# positional parameter $0, within a function call, is the name of the
# function.
progpath=$0
# The name of this program.
progname=`$ECHO "$progpath" |$SED "$sed_basename"`
# Make sure we have an absolute progpath for reexecution:
case $progpath in
[\\/]*|[A-Za-z]:\\*) ;;
*[\\/]*)
progdir=`$ECHO "$progpath" |$SED "$sed_dirname"`
progdir=`cd "$progdir" && pwd`
progpath=$progdir/$progname
;;
*)
_G_IFS=$IFS
IFS=${PATH_SEPARATOR-:}
for progdir in $PATH; do
IFS=$_G_IFS
test -x "$progdir/$progname" && break
done
IFS=$_G_IFS
test -n "$progdir" || progdir=`pwd`
progpath=$progdir/$progname
;;
esac
## ----------------- ##
## Standard options. ##
## ----------------- ##
# The following options affect the operation of the functions defined
# below, and should be set appropriately depending on run-time para-
# meters passed on the command line.
opt_dry_run=false
opt_quiet=false
opt_verbose=false
# Categories 'all' and 'none' are always available. Append any others
# you will pass as the first argument to func_warning from your own
# code.
warning_categories=
# By default, display warnings according to 'opt_warning_types'. Set
# 'warning_func' to ':' to elide all warnings, or func_fatal_error to
# treat the next displayed warning as a fatal error.
warning_func=func_warn_and_continue
# Set to 'all' to display all warnings, 'none' to suppress all
# warnings, or a space delimited list of some subset of
# 'warning_categories' to display only the listed warnings.
opt_warning_types=all
## -------------------- ##
## Resource management. ##
## -------------------- ##
# This section contains definitions for functions that each ensure a
# particular resource (a file, or a non-empty configuration variable for
# example) is available, and if appropriate to extract default values
# from pertinent package files. Call them using their associated
# 'require_*' variable to ensure that they are executed, at most, once.
#
# It's entirely deliberate that calling these functions can set
# variables that don't obey the namespace limitations obeyed by the rest
# of this file, in order that that they be as useful as possible to
# callers.
# require_term_colors
# -------------------
# Allow display of bold text on terminals that support it.
require_term_colors=func_require_term_colors
func_require_term_colors ()
{
$debug_cmd
test -t 1 && {
# COLORTERM and USE_ANSI_COLORS environment variables take
# precedence, because most terminfo databases neglect to describe
# whether color sequences are supported.
test -n "${COLORTERM+set}" && : ${USE_ANSI_COLORS="1"}
if test 1 = "$USE_ANSI_COLORS"; then
# Standard ANSI escape sequences
tc_reset='[0m'
tc_bold='[1m'; tc_standout='[7m'
tc_red='[31m'; tc_green='[32m'
tc_blue='[34m'; tc_cyan='[36m'
else
# Otherwise trust the terminfo database after all.
test -n "`tput sgr0 2>/dev/null`" && {
tc_reset=`tput sgr0`
test -n "`tput bold 2>/dev/null`" && tc_bold=`tput bold`
tc_standout=$tc_bold
test -n "`tput smso 2>/dev/null`" && tc_standout=`tput smso`
test -n "`tput setaf 1 2>/dev/null`" && tc_red=`tput setaf 1`
test -n "`tput setaf 2 2>/dev/null`" && tc_green=`tput setaf 2`
test -n "`tput setaf 4 2>/dev/null`" && tc_blue=`tput setaf 4`
test -n "`tput setaf 5 2>/dev/null`" && tc_cyan=`tput setaf 5`
}
fi
}
require_term_colors=:
}
## ----------------- ##
## Function library. ##
## ----------------- ##
# This section contains a variety of useful functions to call in your
# scripts. Take note of the portable wrappers for features provided by
# some modern shells, which will fall back to slower equivalents on
# less featureful shells.
# func_append VAR VALUE
# ---------------------
# Append VALUE onto the existing contents of VAR.
# We should try to minimise forks, especially on Windows where they are
# unreasonably slow, so skip the feature probes when bash or zsh are
# being used:
if test set = "${BASH_VERSION+set}${ZSH_VERSION+set}"; then
: ${_G_HAVE_ARITH_OP="yes"}
: ${_G_HAVE_XSI_OPS="yes"}
# The += operator was introduced in bash 3.1
case $BASH_VERSION in
[12].* | 3.0 | 3.0*) ;;
*)
: ${_G_HAVE_PLUSEQ_OP="yes"}
;;
esac
fi
# _G_HAVE_PLUSEQ_OP
# Can be empty, in which case the shell is probed, "yes" if += is
# useable or anything else if it does not work.
test -z "$_G_HAVE_PLUSEQ_OP" \
&& (eval 'x=a; x+=" b"; test "a b" = "$x"') 2>/dev/null \
&& _G_HAVE_PLUSEQ_OP=yes
if test yes = "$_G_HAVE_PLUSEQ_OP"
then
# This is an XSI compatible shell, allowing a faster implementation...
eval 'func_append ()
{
$debug_cmd
eval "$1+=\$2"
}'
else
# ...otherwise fall back to using expr, which is often a shell builtin.
func_append ()
{
$debug_cmd
eval "$1=\$$1\$2"
}
fi
# func_append_quoted VAR VALUE
# ----------------------------
# Quote VALUE and append to the end of shell variable VAR, separated
# by a space.
if test yes = "$_G_HAVE_PLUSEQ_OP"; then
eval 'func_append_quoted ()
{
$debug_cmd
func_quote_for_eval "$2"
eval "$1+=\\ \$func_quote_for_eval_result"
}'
else
func_append_quoted ()
{
$debug_cmd
func_quote_for_eval "$2"
eval "$1=\$$1\\ \$func_quote_for_eval_result"
}
fi
# func_append_uniq VAR VALUE
# --------------------------
# Append unique VALUE onto the existing contents of VAR, assuming
# entries are delimited by the first character of VALUE. For example:
#
# func_append_uniq options " --another-option option-argument"
#
# will only append to $options if " --another-option option-argument "
# is not already present somewhere in $options already (note spaces at
# each end implied by leading space in second argument).
func_append_uniq ()
{
$debug_cmd
eval _G_current_value='`$ECHO $'$1'`'
_G_delim=`expr "$2" : '\(.\)'`
case $_G_delim$_G_current_value$_G_delim in
*"$2$_G_delim"*) ;;
*) func_append "$@" ;;
esac
}
# func_arith TERM...
# ------------------
# Set func_arith_result to the result of evaluating TERMs.
test -z "$_G_HAVE_ARITH_OP" \
&& (eval 'test 2 = $(( 1 + 1 ))') 2>/dev/null \
&& _G_HAVE_ARITH_OP=yes
if test yes = "$_G_HAVE_ARITH_OP"; then
eval 'func_arith ()
{
$debug_cmd
func_arith_result=$(( $* ))
}'
else
func_arith ()
{
$debug_cmd
func_arith_result=`expr "$@"`
}
fi
# func_basename FILE
# ------------------
# Set func_basename_result to FILE with everything up to and including
# the last / stripped.
if test yes = "$_G_HAVE_XSI_OPS"; then
# If this shell supports suffix pattern removal, then use it to avoid
# forking. Hide the definitions single quotes in case the shell chokes
# on unsupported syntax...
_b='func_basename_result=${1##*/}'
_d='case $1 in
*/*) func_dirname_result=${1%/*}$2 ;;
* ) func_dirname_result=$3 ;;
esac'
else
# ...otherwise fall back to using sed.
_b='func_basename_result=`$ECHO "$1" |$SED "$sed_basename"`'
_d='func_dirname_result=`$ECHO "$1" |$SED "$sed_dirname"`
if test "X$func_dirname_result" = "X$1"; then
func_dirname_result=$3
else
func_append func_dirname_result "$2"
fi'
fi
eval 'func_basename ()
{
$debug_cmd
'"$_b"'
}'
# func_dirname FILE APPEND NONDIR_REPLACEMENT
# -------------------------------------------
# Compute the dirname of FILE. If nonempty, add APPEND to the result,
# otherwise set result to NONDIR_REPLACEMENT.
eval 'func_dirname ()
{
$debug_cmd
'"$_d"'
}'
# func_dirname_and_basename FILE APPEND NONDIR_REPLACEMENT
# --------------------------------------------------------
# Perform func_basename and func_dirname in a single function
# call:
# dirname: Compute the dirname of FILE. If nonempty,
# add APPEND to the result, otherwise set result
# to NONDIR_REPLACEMENT.
# value returned in "$func_dirname_result"
# basename: Compute filename of FILE.
# value retuned in "$func_basename_result"
# For efficiency, we do not delegate to the functions above but instead
# duplicate the functionality here.
eval 'func_dirname_and_basename ()
{
$debug_cmd
'"$_b"'
'"$_d"'
}'
# func_echo ARG...
# ----------------
# Echo program name prefixed message.
func_echo ()
{
$debug_cmd
_G_message=$*
func_echo_IFS=$IFS
IFS=$nl
for _G_line in $_G_message; do
IFS=$func_echo_IFS
$ECHO "$progname: $_G_line"
done
IFS=$func_echo_IFS
}
# func_echo_all ARG...
# --------------------
# Invoke $ECHO with all args, space-separated.
func_echo_all ()
{
$ECHO "$*"
}
# func_echo_infix_1 INFIX ARG...
# ------------------------------
# Echo program name, followed by INFIX on the first line, with any
# additional lines not showing INFIX.
func_echo_infix_1 ()
{
$debug_cmd
$require_term_colors
_G_infix=$1; shift
_G_indent=$_G_infix
_G_prefix="$progname: $_G_infix: "
_G_message=$*
# Strip color escape sequences before counting printable length
for _G_tc in "$tc_reset" "$tc_bold" "$tc_standout" "$tc_red" "$tc_green" "$tc_blue" "$tc_cyan"
do
test -n "$_G_tc" && {
_G_esc_tc=`$ECHO "$_G_tc" | $SED "$sed_make_literal_regex"`
_G_indent=`$ECHO "$_G_indent" | $SED "s|$_G_esc_tc||g"`
}
done
_G_indent="$progname: "`echo "$_G_indent" | $SED 's|.| |g'`" " ## exclude from sc_prohibit_nested_quotes
func_echo_infix_1_IFS=$IFS
IFS=$nl
for _G_line in $_G_message; do
IFS=$func_echo_infix_1_IFS
$ECHO "$_G_prefix$tc_bold$_G_line$tc_reset" >&2
_G_prefix=$_G_indent
done
IFS=$func_echo_infix_1_IFS
}
# func_error ARG...
# -----------------
# Echo program name prefixed message to standard error.
func_error ()
{
$debug_cmd
$require_term_colors
func_echo_infix_1 " $tc_standout${tc_red}error$tc_reset" "$*" >&2
}
# func_fatal_error ARG...
# -----------------------
# Echo program name prefixed message to standard error, and exit.
func_fatal_error ()
{
$debug_cmd
func_error "$*"
exit $EXIT_FAILURE
}
# func_grep EXPRESSION FILENAME
# -----------------------------
# Check whether EXPRESSION matches any line of FILENAME, without output.
func_grep ()
{
$debug_cmd
$GREP "$1" "$2" >/dev/null 2>&1
}
# func_len STRING
# ---------------
# Set func_len_result to the length of STRING. STRING may not
# start with a hyphen.
test -z "$_G_HAVE_XSI_OPS" \
&& (eval 'x=a/b/c;
test 5aa/bb/cc = "${#x}${x%%/*}${x%/*}${x#*/}${x##*/}"') 2>/dev/null \
&& _G_HAVE_XSI_OPS=yes
if test yes = "$_G_HAVE_XSI_OPS"; then
eval 'func_len ()
{
$debug_cmd
func_len_result=${#1}
}'
else
func_len ()
{
$debug_cmd
func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len`
}
fi
# func_mkdir_p DIRECTORY-PATH
# ---------------------------
# Make sure the entire path to DIRECTORY-PATH is available.
func_mkdir_p ()
{
$debug_cmd
_G_directory_path=$1
_G_dir_list=
if test -n "$_G_directory_path" && test : != "$opt_dry_run"; then
# Protect directory names starting with '-'
case $_G_directory_path in
-*) _G_directory_path=./$_G_directory_path ;;
esac
# While some portion of DIR does not yet exist...
while test ! -d "$_G_directory_path"; do
# ...make a list in topmost first order. Use a colon delimited
# list incase some portion of path contains whitespace.
_G_dir_list=$_G_directory_path:$_G_dir_list
# If the last portion added has no slash in it, the list is done
case $_G_directory_path in */*) ;; *) break ;; esac
# ...otherwise throw away the child directory and loop
_G_directory_path=`$ECHO "$_G_directory_path" | $SED -e "$sed_dirname"`
done
_G_dir_list=`$ECHO "$_G_dir_list" | $SED 's|:*$||'`
func_mkdir_p_IFS=$IFS; IFS=:
for _G_dir in $_G_dir_list; do
IFS=$func_mkdir_p_IFS
# mkdir can fail with a 'File exist' error if two processes
# try to create one of the directories concurrently. Don't
# stop in that case!
$MKDIR "$_G_dir" 2>/dev/null || :
done
IFS=$func_mkdir_p_IFS
# Bail out if we (or some other process) failed to create a directory.
test -d "$_G_directory_path" || \
func_fatal_error "Failed to create '$1'"
fi
}
# func_mktempdir [BASENAME]
# -------------------------
# Make a temporary directory that won't clash with other running
# libtool processes, and avoids race conditions if possible. If
# given, BASENAME is the basename for that directory.
func_mktempdir ()
{
$debug_cmd
_G_template=${TMPDIR-/tmp}/${1-$progname}
if test : = "$opt_dry_run"; then
# Return a directory name, but don't create it in dry-run mode
_G_tmpdir=$_G_template-$$
else
# If mktemp works, use that first and foremost
_G_tmpdir=`mktemp -d "$_G_template-XXXXXXXX" 2>/dev/null`
if test ! -d "$_G_tmpdir"; then
# Failing that, at least try and use $RANDOM to avoid a race
_G_tmpdir=$_G_template-${RANDOM-0}$$
func_mktempdir_umask=`umask`
umask 0077
$MKDIR "$_G_tmpdir"
umask $func_mktempdir_umask
fi
# If we're not in dry-run mode, bomb out on failure
test -d "$_G_tmpdir" || \
func_fatal_error "cannot create temporary directory '$_G_tmpdir'"
fi
$ECHO "$_G_tmpdir"
}
# func_normal_abspath PATH
# ------------------------
# Remove doubled-up and trailing slashes, "." path components,
# and cancel out any ".." path components in PATH after making
# it an absolute path.
func_normal_abspath ()
{
$debug_cmd
# These SED scripts presuppose an absolute path with a trailing slash.
_G_pathcar='s|^/\([^/]*\).*$|\1|'
_G_pathcdr='s|^/[^/]*||'
_G_removedotparts=':dotsl
s|/\./|/|g
t dotsl
s|/\.$|/|'
_G_collapseslashes='s|/\{1,\}|/|g'
_G_finalslash='s|/*$|/|'
# Start from root dir and reassemble the path.
func_normal_abspath_result=
func_normal_abspath_tpath=$1
func_normal_abspath_altnamespace=
case $func_normal_abspath_tpath in
"")
# Empty path, that just means $cwd.
func_stripname '' '/' "`pwd`"
func_normal_abspath_result=$func_stripname_result
return
;;
# The next three entries are used to spot a run of precisely
# two leading slashes without using negated character classes;
# we take advantage of case's first-match behaviour.
///*)
# Unusual form of absolute path, do nothing.
;;
//*)
# Not necessarily an ordinary path; POSIX reserves leading '//'
# and for example Cygwin uses it to access remote file shares
# over CIFS/SMB, so we conserve a leading double slash if found.
func_normal_abspath_altnamespace=/
;;
/*)
# Absolute path, do nothing.
;;
*)
# Relative path, prepend $cwd.
func_normal_abspath_tpath=`pwd`/$func_normal_abspath_tpath
;;
esac
# Cancel out all the simple stuff to save iterations. We also want
# the path to end with a slash for ease of parsing, so make sure
# there is one (and only one) here.
func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \
-e "$_G_removedotparts" -e "$_G_collapseslashes" -e "$_G_finalslash"`
while :; do
# Processed it all yet?
if test / = "$func_normal_abspath_tpath"; then
# If we ascended to the root using ".." the result may be empty now.
if test -z "$func_normal_abspath_result"; then
func_normal_abspath_result=/
fi
break
fi
func_normal_abspath_tcomponent=`$ECHO "$func_normal_abspath_tpath" | $SED \
-e "$_G_pathcar"`
func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \
-e "$_G_pathcdr"`
# Figure out what to do with it
case $func_normal_abspath_tcomponent in
"")
# Trailing empty path component, ignore it.
;;
..)
# Parent dir; strip last assembled component from result.
func_dirname "$func_normal_abspath_result"
func_normal_abspath_result=$func_dirname_result
;;
*)
# Actual path component, append it.
func_append func_normal_abspath_result "/$func_normal_abspath_tcomponent"
;;
esac
done
# Restore leading double-slash if one was found on entry.
func_normal_abspath_result=$func_normal_abspath_altnamespace$func_normal_abspath_result
}
# func_notquiet ARG...
# --------------------
# Echo program name prefixed message only when not in quiet mode.
func_notquiet ()
{
$debug_cmd
$opt_quiet || func_echo ${1+"$@"}
# A bug in bash halts the script if the last line of a function
# fails when set -e is in force, so we need another command to
# work around that:
:
}
# func_relative_path SRCDIR DSTDIR
# --------------------------------
# Set func_relative_path_result to the relative path from SRCDIR to DSTDIR.
func_relative_path ()
{
$debug_cmd
func_relative_path_result=
func_normal_abspath "$1"
func_relative_path_tlibdir=$func_normal_abspath_result
func_normal_abspath "$2"
func_relative_path_tbindir=$func_normal_abspath_result
# Ascend the tree starting from libdir
while :; do
# check if we have found a prefix of bindir
case $func_relative_path_tbindir in
$func_relative_path_tlibdir)
# found an exact match
func_relative_path_tcancelled=
break
;;
$func_relative_path_tlibdir*)
# found a matching prefix
func_stripname "$func_relative_path_tlibdir" '' "$func_relative_path_tbindir"
func_relative_path_tcancelled=$func_stripname_result
if test -z "$func_relative_path_result"; then
func_relative_path_result=.
fi
break
;;
*)
func_dirname $func_relative_path_tlibdir
func_relative_path_tlibdir=$func_dirname_result
if test -z "$func_relative_path_tlibdir"; then
# Have to descend all the way to the root!
func_relative_path_result=../$func_relative_path_result
func_relative_path_tcancelled=$func_relative_path_tbindir
break
fi
func_relative_path_result=../$func_relative_path_result
;;
esac
done
# Now calculate path; take care to avoid doubling-up slashes.
func_stripname '' '/' "$func_relative_path_result"
func_relative_path_result=$func_stripname_result
func_stripname '/' '/' "$func_relative_path_tcancelled"
if test -n "$func_stripname_result"; then
func_append func_relative_path_result "/$func_stripname_result"
fi
# Normalisation. If bindir is libdir, return '.' else relative path.
if test -n "$func_relative_path_result"; then
func_stripname './' '' "$func_relative_path_result"
func_relative_path_result=$func_stripname_result
fi
test -n "$func_relative_path_result" || func_relative_path_result=.
:
}
# func_quote_for_eval ARG...
# --------------------------
# Aesthetically quote ARGs to be evaled later.
# This function returns two values:
# i) func_quote_for_eval_result
# double-quoted, suitable for a subsequent eval
# ii) func_quote_for_eval_unquoted_result
# has all characters that are still active within double
# quotes backslashified.
func_quote_for_eval ()
{
$debug_cmd
func_quote_for_eval_unquoted_result=
func_quote_for_eval_result=
while test 0 -lt $#; do
case $1 in
*[\\\`\"\$]*)
_G_unquoted_arg=`printf '%s\n' "$1" |$SED "$sed_quote_subst"` ;;
*)
_G_unquoted_arg=$1 ;;
esac
if test -n "$func_quote_for_eval_unquoted_result"; then
func_append func_quote_for_eval_unquoted_result " $_G_unquoted_arg"
else
func_append func_quote_for_eval_unquoted_result "$_G_unquoted_arg"
fi
case $_G_unquoted_arg in
# Double-quote args containing shell metacharacters to delay
# word splitting, command substitution and variable expansion
# for a subsequent eval.
# Many Bourne shells cannot handle close brackets correctly
# in scan sets, so we specify it separately.
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
_G_quoted_arg=\"$_G_unquoted_arg\"
;;
*)
_G_quoted_arg=$_G_unquoted_arg
;;
esac
if test -n "$func_quote_for_eval_result"; then
func_append func_quote_for_eval_result " $_G_quoted_arg"
else
func_append func_quote_for_eval_result "$_G_quoted_arg"
fi
shift
done
}
# func_quote_for_expand ARG
# -------------------------
# Aesthetically quote ARG to be evaled later; same as above,
# but do not quote variable references.
func_quote_for_expand ()
{
$debug_cmd
case $1 in
*[\\\`\"]*)
_G_arg=`$ECHO "$1" | $SED \
-e "$sed_double_quote_subst" -e "$sed_double_backslash"` ;;
*)
_G_arg=$1 ;;
esac
case $_G_arg in
# Double-quote args containing shell metacharacters to delay
# word splitting and command substitution for a subsequent eval.
# Many Bourne shells cannot handle close brackets correctly
# in scan sets, so we specify it separately.
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
_G_arg=\"$_G_arg\"
;;
esac
func_quote_for_expand_result=$_G_arg
}
# func_stripname PREFIX SUFFIX NAME
# ---------------------------------
# strip PREFIX and SUFFIX from NAME, and store in func_stripname_result.
# PREFIX and SUFFIX must not contain globbing or regex special
# characters, hashes, percent signs, but SUFFIX may contain a leading
# dot (in which case that matches only a dot).
if test yes = "$_G_HAVE_XSI_OPS"; then
eval 'func_stripname ()
{
$debug_cmd
# pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
# positional parameters, so assign one to ordinary variable first.
func_stripname_result=$3
func_stripname_result=${func_stripname_result#"$1"}
func_stripname_result=${func_stripname_result%"$2"}
}'
else
func_stripname ()
{
$debug_cmd
case $2 in
.*) func_stripname_result=`$ECHO "$3" | $SED -e "s%^$1%%" -e "s%\\\\$2\$%%"`;;
*) func_stripname_result=`$ECHO "$3" | $SED -e "s%^$1%%" -e "s%$2\$%%"`;;
esac
}
fi
# func_show_eval CMD [FAIL_EXP]
# -----------------------------
# Unless opt_quiet is true, then output CMD. Then, if opt_dryrun is
# not true, evaluate CMD. If the evaluation of CMD fails, and FAIL_EXP
# is given, then evaluate it.
func_show_eval ()
{
$debug_cmd
_G_cmd=$1
_G_fail_exp=${2-':'}
func_quote_for_expand "$_G_cmd"
eval "func_notquiet $func_quote_for_expand_result"
$opt_dry_run || {
eval "$_G_cmd"
_G_status=$?
if test 0 -ne "$_G_status"; then
eval "(exit $_G_status); $_G_fail_exp"
fi
}
}
# func_show_eval_locale CMD [FAIL_EXP]
# ------------------------------------
# Unless opt_quiet is true, then output CMD. Then, if opt_dryrun is
# not true, evaluate CMD. If the evaluation of CMD fails, and FAIL_EXP
# is given, then evaluate it. Use the saved locale for evaluation.
func_show_eval_locale ()
{
$debug_cmd
_G_cmd=$1
_G_fail_exp=${2-':'}
$opt_quiet || {
func_quote_for_expand "$_G_cmd"
eval "func_echo $func_quote_for_expand_result"
}
$opt_dry_run || {
eval "$_G_user_locale
$_G_cmd"
_G_status=$?
eval "$_G_safe_locale"
if test 0 -ne "$_G_status"; then
eval "(exit $_G_status); $_G_fail_exp"
fi
}
}
# func_tr_sh
# ----------
# Turn $1 into a string suitable for a shell variable name.
# Result is stored in $func_tr_sh_result. All characters
# not in the set a-zA-Z0-9_ are replaced with '_'. Further,
# if $1 begins with a digit, a '_' is prepended as well.
func_tr_sh ()
{
$debug_cmd
case $1 in
[0-9]* | *[!a-zA-Z0-9_]*)
func_tr_sh_result=`$ECHO "$1" | $SED -e 's/^\([0-9]\)/_\1/' -e 's/[^a-zA-Z0-9_]/_/g'`
;;
* )
func_tr_sh_result=$1
;;
esac
}
# func_verbose ARG...
# -------------------
# Echo program name prefixed message in verbose mode only.
func_verbose ()
{
$debug_cmd
$opt_verbose && func_echo "$*"
:
}
# func_warn_and_continue ARG...
# -----------------------------
# Echo program name prefixed warning message to standard error.
func_warn_and_continue ()
{
$debug_cmd
$require_term_colors
func_echo_infix_1 "${tc_red}warning$tc_reset" "$*" >&2
}
# func_warning CATEGORY ARG...
# ----------------------------
# Echo program name prefixed warning message to standard error. Warning
# messages can be filtered according to CATEGORY, where this function
# elides messages where CATEGORY is not listed in the global variable
# 'opt_warning_types'.
func_warning ()
{
$debug_cmd
# CATEGORY must be in the warning_categories list!
case " $warning_categories " in
*" $1 "*) ;;
*) func_internal_error "invalid warning category '$1'" ;;
esac
_G_category=$1
shift
case " $opt_warning_types " in
*" $_G_category "*) $warning_func ${1+"$@"} ;;
esac
}
# func_sort_ver VER1 VER2
# -----------------------
# 'sort -V' is not generally available.
# Note this deviates from the version comparison in automake
# in that it treats 1.5 < 1.5.0, and treats 1.4-p12a < 1.4-p3a
# but this should suffice as we won't be specifying old
# version formats or redundant trailing .0 in bootstrap.conf.
# If we did want full compatibility then we should probably
# use m4_version_compare from autoconf.
func_sort_ver ()
{
$debug_cmd
printf '%s\n%s\n' "$1" "$2" \
| sort -t. -k 1,1n -k 2,2n -k 3,3n -k 4,4n -k 5,5n -k 6,6n -k 7,7n -k 8,8n -k 9,9n
}
# func_lt_ver PREV CURR
# ---------------------
# Return true if PREV and CURR are in the correct order according to
# func_sort_ver, otherwise false. Use it like this:
#
# func_lt_ver "$prev_ver" "$proposed_ver" || func_fatal_error "..."
func_lt_ver ()
{
$debug_cmd
test "x$1" = x`func_sort_ver "$1" "$2" | $SED 1q`
}
# Local variables:
# mode: shell-script
# sh-indentation: 2
# eval: (add-hook 'before-save-hook 'time-stamp)
# time-stamp-pattern: "10/scriptversion=%:y-%02m-%02d.%02H; # UTC"
# time-stamp-time-zone: "UTC"
# End:
#! /bin/sh
# Set a version string for this script.
scriptversion=2014-01-07.03; # UTC
# A portable, pluggable option parser for Bourne shell.
# Written by Gary V. Vaughan, 2010
# Copyright (C) 2010-2014 Free Software Foundation, Inc.
# This is free software; see the source for copying conditions. There is NO
# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Please report bugs or propose patches to gary@gnu.org.
## ------ ##
## Usage. ##
## ------ ##
# This file is a library for parsing options in your shell scripts along
# with assorted other useful supporting features that you can make use
# of too.
#
# For the simplest scripts you might need only:
#
# #!/bin/sh
# . relative/path/to/funclib.sh
# . relative/path/to/options-parser
# scriptversion=1.0
# func_options ${1+"$@"}
# eval set dummy "$func_options_result"; shift
# ...rest of your script...
#
# In order for the '--version' option to work, you will need to have a
# suitably formatted comment like the one at the top of this file
# starting with '# Written by ' and ending with '# warranty; '.
#
# For '-h' and '--help' to work, you will also need a one line
# description of your script's purpose in a comment directly above the
# '# Written by ' line, like the one at the top of this file.
#
# The default options also support '--debug', which will turn on shell
# execution tracing (see the comment above debug_cmd below for another
# use), and '--verbose' and the func_verbose function to allow your script
# to display verbose messages only when your user has specified
# '--verbose'.
#
# After sourcing this file, you can plug processing for additional
# options by amending the variables from the 'Configuration' section
# below, and following the instructions in the 'Option parsing'
# section further down.
## -------------- ##
## Configuration. ##
## -------------- ##
# You should override these variables in your script after sourcing this
# file so that they reflect the customisations you have added to the
# option parser.
# The usage line for option parsing errors and the start of '-h' and
# '--help' output messages. You can embed shell variables for delayed
# expansion at the time the message is displayed, but you will need to
# quote other shell meta-characters carefully to prevent them being
# expanded when the contents are evaled.
usage='$progpath [OPTION]...'
# Short help message in response to '-h' and '--help'. Add to this or
# override it after sourcing this library to reflect the full set of
# options your script accepts.
usage_message="\
--debug enable verbose shell tracing
-W, --warnings=CATEGORY
report the warnings falling in CATEGORY [all]
-v, --verbose verbosely report processing
--version print version information and exit
-h, --help print short or long help message and exit
"
# Additional text appended to 'usage_message' in response to '--help'.
long_help_message="
Warning categories include:
'all' show all warnings
'none' turn off all the warnings
'error' warnings are treated as fatal errors"
# Help message printed before fatal option parsing errors.
fatal_help="Try '\$progname --help' for more information."
## ------------------------- ##
## Hook function management. ##
## ------------------------- ##
# This section contains functions for adding, removing, and running hooks
# to the main code. A hook is just a named list of of function, that can
# be run in order later on.
# func_hookable FUNC_NAME
# -----------------------
# Declare that FUNC_NAME will run hooks added with
# 'func_add_hook FUNC_NAME ...'.
func_hookable ()
{
$debug_cmd
func_append hookable_fns " $1"
}
# func_add_hook FUNC_NAME HOOK_FUNC
# ---------------------------------
# Request that FUNC_NAME call HOOK_FUNC before it returns. FUNC_NAME must
# first have been declared "hookable" by a call to 'func_hookable'.
func_add_hook ()
{
$debug_cmd
case " $hookable_fns " in
*" $1 "*) ;;
*) func_fatal_error "'$1' does not accept hook functions." ;;
esac
eval func_append ${1}_hooks '" $2"'
}
# func_remove_hook FUNC_NAME HOOK_FUNC
# ------------------------------------
# Remove HOOK_FUNC from the list of functions called by FUNC_NAME.
func_remove_hook ()
{
$debug_cmd
eval ${1}_hooks='`$ECHO "\$'$1'_hooks" |$SED "s| '$2'||"`'
}
# func_run_hooks FUNC_NAME [ARG]...
# ---------------------------------
# Run all hook functions registered to FUNC_NAME.
# It is assumed that the list of hook functions contains nothing more
# than a whitespace-delimited list of legal shell function names, and
# no effort is wasted trying to catch shell meta-characters or preserve
# whitespace.
func_run_hooks ()
{
$debug_cmd
case " $hookable_fns " in
*" $1 "*) ;;
*) func_fatal_error "'$1' does not support hook funcions.n" ;;
esac
eval _G_hook_fns=\$$1_hooks; shift
for _G_hook in $_G_hook_fns; do
eval $_G_hook '"$@"'
# store returned options list back into positional
# parameters for next 'cmd' execution.
eval _G_hook_result=\$${_G_hook}_result
eval set dummy "$_G_hook_result"; shift
done
func_quote_for_eval ${1+"$@"}
func_run_hooks_result=$func_quote_for_eval_result
}
## --------------- ##
## Option parsing. ##
## --------------- ##
# In order to add your own option parsing hooks, you must accept the
# full positional parameter list in your hook function, remove any
# options that you action, and then pass back the remaining unprocessed
# options in '<hooked_function_name>_result', escaped suitably for
# 'eval'. Like this:
#
# my_options_prep ()
# {
# $debug_cmd
#
# # Extend the existing usage message.
# usage_message=$usage_message'
# -s, --silent don'\''t print informational messages
# '
#
# func_quote_for_eval ${1+"$@"}
# my_options_prep_result=$func_quote_for_eval_result
# }
# func_add_hook func_options_prep my_options_prep
#
#
# my_silent_option ()
# {
# $debug_cmd
#
# # Note that for efficiency, we parse as many options as we can
# # recognise in a loop before passing the remainder back to the
# # caller on the first unrecognised argument we encounter.
# while test $# -gt 0; do
# opt=$1; shift
# case $opt in
# --silent|-s) opt_silent=: ;;
# # Separate non-argument short options:
# -s*) func_split_short_opt "$_G_opt"
# set dummy "$func_split_short_opt_name" \
# "-$func_split_short_opt_arg" ${1+"$@"}
# shift
# ;;
# *) set dummy "$_G_opt" "$*"; shift; break ;;
# esac
# done
#
# func_quote_for_eval ${1+"$@"}
# my_silent_option_result=$func_quote_for_eval_result
# }
# func_add_hook func_parse_options my_silent_option
#
#
# my_option_validation ()
# {
# $debug_cmd
#
# $opt_silent && $opt_verbose && func_fatal_help "\
# '--silent' and '--verbose' options are mutually exclusive."
#
# func_quote_for_eval ${1+"$@"}
# my_option_validation_result=$func_quote_for_eval_result
# }
# func_add_hook func_validate_options my_option_validation
#
# You'll alse need to manually amend $usage_message to reflect the extra
# options you parse. It's preferable to append if you can, so that
# multiple option parsing hooks can be added safely.
# func_options [ARG]...
# ---------------------
# All the functions called inside func_options are hookable. See the
# individual implementations for details.
func_hookable func_options
func_options ()
{
$debug_cmd
func_options_prep ${1+"$@"}
eval func_parse_options \
${func_options_prep_result+"$func_options_prep_result"}
eval func_validate_options \
${func_parse_options_result+"$func_parse_options_result"}
eval func_run_hooks func_options \
${func_validate_options_result+"$func_validate_options_result"}
# save modified positional parameters for caller
func_options_result=$func_run_hooks_result
}
# func_options_prep [ARG]...
# --------------------------
# All initialisations required before starting the option parse loop.
# Note that when calling hook functions, we pass through the list of
# positional parameters. If a hook function modifies that list, and
# needs to propogate that back to rest of this script, then the complete
# modified list must be put in 'func_run_hooks_result' before
# returning.
func_hookable func_options_prep
func_options_prep ()
{
$debug_cmd
# Option defaults:
opt_verbose=false
opt_warning_types=
func_run_hooks func_options_prep ${1+"$@"}
# save modified positional parameters for caller
func_options_prep_result=$func_run_hooks_result
}
# func_parse_options [ARG]...
# ---------------------------
# The main option parsing loop.
func_hookable func_parse_options
func_parse_options ()
{
$debug_cmd
func_parse_options_result=
# this just eases exit handling
while test $# -gt 0; do
# Defer to hook functions for initial option parsing, so they
# get priority in the event of reusing an option name.
func_run_hooks func_parse_options ${1+"$@"}
# Adjust func_parse_options positional parameters to match
eval set dummy "$func_run_hooks_result"; shift
# Break out of the loop if we already parsed every option.
test $# -gt 0 || break
_G_opt=$1
shift
case $_G_opt in
--debug|-x) debug_cmd='set -x'
func_echo "enabling shell trace mode"
$debug_cmd
;;
--no-warnings|--no-warning|--no-warn)
set dummy --warnings none ${1+"$@"}
shift
;;
--warnings|--warning|-W)
test $# = 0 && func_missing_arg $_G_opt && break
case " $warning_categories $1" in
*" $1 "*)
# trailing space prevents matching last $1 above
func_append_uniq opt_warning_types " $1"
;;
*all)
opt_warning_types=$warning_categories
;;
*none)
opt_warning_types=none
warning_func=:
;;
*error)
opt_warning_types=$warning_categories
warning_func=func_fatal_error
;;
*)
func_fatal_error \
"unsupported warning category: '$1'"
;;
esac
shift
;;
--verbose|-v) opt_verbose=: ;;
--version) func_version ;;
-\?|-h) func_usage ;;
--help) func_help ;;
# Separate optargs to long options (plugins may need this):
--*=*) func_split_equals "$_G_opt"
set dummy "$func_split_equals_lhs" \
"$func_split_equals_rhs" ${1+"$@"}
shift
;;
# Separate optargs to short options:
-W*)
func_split_short_opt "$_G_opt"
set dummy "$func_split_short_opt_name" \
"$func_split_short_opt_arg" ${1+"$@"}
shift
;;
# Separate non-argument short options:
-\?*|-h*|-v*|-x*)
func_split_short_opt "$_G_opt"
set dummy "$func_split_short_opt_name" \
"-$func_split_short_opt_arg" ${1+"$@"}
shift
;;
--) break ;;
-*) func_fatal_help "unrecognised option: '$_G_opt'" ;;
*) set dummy "$_G_opt" ${1+"$@"}; shift; break ;;
esac
done
# save modified positional parameters for caller
func_quote_for_eval ${1+"$@"}
func_parse_options_result=$func_quote_for_eval_result
}
# func_validate_options [ARG]...
# ------------------------------
# Perform any sanity checks on option settings and/or unconsumed
# arguments.
func_hookable func_validate_options
func_validate_options ()
{
$debug_cmd
# Display all warnings if -W was not given.
test -n "$opt_warning_types" || opt_warning_types=" $warning_categories"
func_run_hooks func_validate_options ${1+"$@"}
# Bail if the options were screwed!
$exit_cmd $EXIT_FAILURE
# save modified positional parameters for caller
func_validate_options_result=$func_run_hooks_result
}
## ----------------- ##
## Helper functions. ##
## ----------------- ##
# This section contains the helper functions used by the rest of the
# hookable option parser framework in ascii-betical order.
# func_fatal_help ARG...
# ----------------------
# Echo program name prefixed message to standard error, followed by
# a help hint, and exit.
func_fatal_help ()
{
$debug_cmd
eval \$ECHO \""Usage: $usage"\"
eval \$ECHO \""$fatal_help"\"
func_error ${1+"$@"}
exit $EXIT_FAILURE
}
# func_help
# ---------
# Echo long help message to standard output and exit.
func_help ()
{
$debug_cmd
func_usage_message
$ECHO "$long_help_message"
exit 0
}
# func_missing_arg ARGNAME
# ------------------------
# Echo program name prefixed message to standard error and set global
# exit_cmd.
func_missing_arg ()
{
$debug_cmd
func_error "Missing argument for '$1'."
exit_cmd=exit
}
# func_split_equals STRING
# ------------------------
# Set func_split_equals_lhs and func_split_equals_rhs shell variables after
# splitting STRING at the '=' sign.
test -z "$_G_HAVE_XSI_OPS" \
&& (eval 'x=a/b/c;
test 5aa/bb/cc = "${#x}${x%%/*}${x%/*}${x#*/}${x##*/}"') 2>/dev/null \
&& _G_HAVE_XSI_OPS=yes
if test yes = "$_G_HAVE_XSI_OPS"
then
# This is an XSI compatible shell, allowing a faster implementation...
eval 'func_split_equals ()
{
$debug_cmd
func_split_equals_lhs=${1%%=*}
func_split_equals_rhs=${1#*=}
test "x$func_split_equals_lhs" = "x$1" \
&& func_split_equals_rhs=
}'
else
# ...otherwise fall back to using expr, which is often a shell builtin.
func_split_equals ()
{
$debug_cmd
func_split_equals_lhs=`expr "x$1" : 'x\([^=]*\)'`
func_split_equals_rhs=
test "x$func_split_equals_lhs" = "x$1" \
|| func_split_equals_rhs=`expr "x$1" : 'x[^=]*=\(.*\)$'`
}
fi #func_split_equals
# func_split_short_opt SHORTOPT
# -----------------------------
# Set func_split_short_opt_name and func_split_short_opt_arg shell
# variables after splitting SHORTOPT after the 2nd character.
if test yes = "$_G_HAVE_XSI_OPS"
then
# This is an XSI compatible shell, allowing a faster implementation...
eval 'func_split_short_opt ()
{
$debug_cmd
func_split_short_opt_arg=${1#??}
func_split_short_opt_name=${1%"$func_split_short_opt_arg"}
}'
else
# ...otherwise fall back to using expr, which is often a shell builtin.
func_split_short_opt ()
{
$debug_cmd
func_split_short_opt_name=`expr "x$1" : 'x-\(.\)'`
func_split_short_opt_arg=`expr "x$1" : 'x-.\(.*\)$'`
}
fi #func_split_short_opt
# func_usage
# ----------
# Echo short help message to standard output and exit.
func_usage ()
{
$debug_cmd
func_usage_message
$ECHO "Run '$progname --help |${PAGER-more}' for full usage"
exit 0
}
# func_usage_message
# ------------------
# Echo short help message to standard output.
func_usage_message ()
{
$debug_cmd
eval \$ECHO \""Usage: $usage"\"
echo
$SED -n 's|^# ||
/^Written by/{
x;p;x
}
h
/^Written by/q' < "$progpath"
echo
eval \$ECHO \""$usage_message"\"
}
# func_version
# ------------
# Echo version message to standard output and exit.
func_version ()
{
$debug_cmd
printf '%s\n' "$progname $scriptversion"
$SED -n '
/(C)/!b go
:more
/\./!{
N
s|\n# | |
b more
}
:go
/^# Written by /,/# warranty; / {
s|^# ||
s|^# *$||
s|\((C)\)[ 0-9,-]*[ ,-]\([1-9][0-9]* \)|\1 \2|
p
}
/^# Written by / {
s|^# ||
p
}
/^warranty; /q' < "$progpath"
exit $?
}
# Local variables:
# mode: shell-script
# sh-indentation: 2
# eval: (add-hook 'before-save-hook 'time-stamp)
# time-stamp-pattern: "10/scriptversion=%:y-%02m-%02d.%02H; # UTC"
# time-stamp-time-zone: "UTC"
# End:
# Set a version string.
scriptversion='(GNU libtool) 2.4.2.444.28-053d'
# func_echo ARG...
# ----------------
# Libtool also displays the current mode in messages, so override
# funclib.sh func_echo with this custom definition.
func_echo ()
{
$debug_cmd
_G_message=$*
func_echo_IFS=$IFS
IFS=$nl
for _G_line in $_G_message; do
IFS=$func_echo_IFS
$ECHO "$progname${opt_mode+: $opt_mode}: $_G_line"
done
IFS=$func_echo_IFS
}
# func_warning ARG...
# -------------------
# Libtool warnings are not categorized, so override funclib.sh
# func_warning with this simpler definition.
func_warning ()
{
$debug_cmd
$warning_func ${1+"$@"}
}
## ---------------- ##
## Options parsing. ##
## ---------------- ##
# Hook in the functions to make sure our own options are parsed during
# the option parsing loop.
usage='$progpath [OPTION]... [MODE-ARG]...'
# Short help message in response to '-h'.
usage_message="Options:
--config show all configuration variables
--debug enable verbose shell tracing
-n, --dry-run display commands without modifying any files
--features display basic configuration information and exit
--mode=MODE use operation mode MODE
--no-warnings equivalent to '-Wnone'
--preserve-dup-deps don't remove duplicate dependency libraries
--quiet, --silent don't print informational messages
--tag=TAG use configuration variables from tag TAG
-v, --verbose print more informational messages than default
--version print version information
-W, --warnings=CATEGORY report the warnings falling in CATEGORY [all]
-h, --help, --help-all print short, long, or detailed help message
"
# Additional text appended to 'usage_message' in response to '--help'.
long_help_message=$long_help_message"
MODE must be one of the following:
clean remove files from the build directory
compile compile a source file into a libtool object
execute automatically set library path, then run a program
finish complete the installation of libtool libraries
install install libraries or executables
link create a library or an executable
uninstall remove libraries from an installed directory
MODE-ARGS vary depending on the MODE. When passed as first option,
'--mode=MODE' may be abbreviated as 'MODE' or a unique abbreviation of that.
Try '$progname --help --mode=MODE' for a more detailed description of MODE.
When reporting a bug, please describe a test case to reproduce it and
include the following information:
host-triplet: $host
shell: $SHELL
compiler: $LTCC
compiler flags: $LTCFLAGS
linker: $LD (gnu? $with_gnu_ld)
version: $progname (GNU libtool) 2.4.2.444.28-053d
automake: `($AUTOMAKE --version) 2>/dev/null |$SED 1q`
autoconf: `($AUTOCONF --version) 2>/dev/null |$SED 1q`
Report bugs to <bug-libtool@gnu.org>.
GNU libtool home page: <http://www.gnu.org/s/libtool/>.
General help using GNU software: <http://www.gnu.org/gethelp/>."
# func_lo2o OBJECT-NAME
# ---------------------
# Transform OBJECT-NAME from a '.lo' suffix to the platform specific
# object suffix.
lo2o=s/\\.lo\$/.$objext/
o2lo=s/\\.$objext\$/.lo/
if test yes = "$_G_HAVE_XSI_OPS"; then
eval 'func_lo2o ()
{
case $1 in
*.lo) func_lo2o_result=${1%.lo}.$objext ;;
* ) func_lo2o_result=$1 ;;
esac
}'
# func_xform LIBOBJ-OR-SOURCE
# ---------------------------
# Transform LIBOBJ-OR-SOURCE from a '.o' or '.c' (or otherwise)
# suffix to a '.lo' libtool-object suffix.
eval 'func_xform ()
{
func_xform_result=${1%.*}.lo
}'
else
# ...otherwise fall back to using sed.
func_lo2o ()
{
func_lo2o_result=`$ECHO "$1" | $SED "$lo2o"`
}
func_xform ()
{
func_xform_result=`$ECHO "$1" | $SED 's|\.[^.]*$|.lo|'`
}
fi
# func_fatal_configuration ARG...
# -------------------------------
# Echo program name prefixed message to standard error, followed by
# a configuration failure hint, and exit.
func_fatal_configuration ()
{
func__fatal_error ${1+"$@"} \
"See the $PACKAGE documentation for more information." \
"Fatal configuration error."
}
# func_config
# -----------
# Display the configuration for all the tags in this script.
func_config ()
{
re_begincf='^# ### BEGIN LIBTOOL'
re_endcf='^# ### END LIBTOOL'
# Default configuration.
$SED "1,/$re_begincf CONFIG/d;/$re_endcf CONFIG/,\$d" < "$progpath"
# Now print the configurations for the tags.
for tagname in $taglist; do
$SED -n "/$re_begincf TAG CONFIG: $tagname\$/,/$re_endcf TAG CONFIG: $tagname\$/p" < "$progpath"
done
exit $?
}
# func_features
# -------------
# Display the features supported by this script.
func_features ()
{
echo "host: $host"
if test yes = "$build_libtool_libs"; then
echo "enable shared libraries"
else
echo "disable shared libraries"
fi
if test yes = "$build_old_libs"; then
echo "enable static libraries"
else
echo "disable static libraries"
fi
exit $?
}
# func_enable_tag TAGNAME
# -----------------------
# Verify that TAGNAME is valid, and either flag an error and exit, or
# enable the TAGNAME tag. We also add TAGNAME to the global $taglist
# variable here.
func_enable_tag ()
{
# Global variable:
tagname=$1
re_begincf="^# ### BEGIN LIBTOOL TAG CONFIG: $tagname\$"
re_endcf="^# ### END LIBTOOL TAG CONFIG: $tagname\$"
sed_extractcf=/$re_begincf/,/$re_endcf/p
# Validate tagname.
case $tagname in
*[!-_A-Za-z0-9,/]*)
func_fatal_error "invalid tag name: $tagname"
;;
esac
# Don't test for the "default" C tag, as we know it's
# there but not specially marked.
case $tagname in
CC) ;;
*)
if $GREP "$re_begincf" "$progpath" >/dev/null 2>&1; then
taglist="$taglist $tagname"
# Evaluate the configuration. Be careful to quote the path
# and the sed script, to avoid splitting on whitespace, but
# also don't use non-portable quotes within backquotes within
# quotes we have to do it in 2 steps:
extractedcf=`$SED -n -e "$sed_extractcf" < "$progpath"`
eval "$extractedcf"
else
func_error "ignoring unknown tag $tagname"
fi
;;
esac
}
# func_check_version_match
# ------------------------
# Ensure that we are using m4 macros, and libtool script from the same
# release of libtool.
func_check_version_match ()
{
if test "$package_revision" != "$macro_revision"; then
if test "$VERSION" != "$macro_version"; then
if test -z "$macro_version"; then
cat >&2 <<_LT_EOF
$progname: Version mismatch error. This is $PACKAGE $VERSION, but the
$progname: definition of this LT_INIT comes from an older release.
$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION
$progname: and run autoconf again.
_LT_EOF
else
cat >&2 <<_LT_EOF
$progname: Version mismatch error. This is $PACKAGE $VERSION, but the
$progname: definition of this LT_INIT comes from $PACKAGE $macro_version.
$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION
$progname: and run autoconf again.
_LT_EOF
fi
else
cat >&2 <<_LT_EOF
$progname: Version mismatch error. This is $PACKAGE $VERSION, revision $package_revision,
$progname: but the definition of this LT_INIT comes from revision $macro_revision.
$progname: You should recreate aclocal.m4 with macros from revision $package_revision
$progname: of $PACKAGE $VERSION and run autoconf again.
_LT_EOF
fi
exit $EXIT_MISMATCH
fi
}
# libtool_options_prep [ARG]...
# -----------------------------
# Preparation for options parsed by libtool.
libtool_options_prep ()
{
$debug_mode
# Option defaults:
opt_config=false
opt_dlopen=
opt_dry_run=false
opt_help=false
opt_mode=
opt_preserve_dup_deps=false
opt_quiet=false
nonopt=
preserve_args=
# Shorthand for --mode=foo, only valid as the first argument
case $1 in
clean|clea|cle|cl)
shift; set dummy --mode clean ${1+"$@"}; shift
;;
compile|compil|compi|comp|com|co|c)
shift; set dummy --mode compile ${1+"$@"}; shift
;;
execute|execut|execu|exec|exe|ex|e)
shift; set dummy --mode execute ${1+"$@"}; shift
;;
finish|finis|fini|fin|fi|f)
shift; set dummy --mode finish ${1+"$@"}; shift
;;
install|instal|insta|inst|ins|in|i)
shift; set dummy --mode install ${1+"$@"}; shift
;;
link|lin|li|l)
shift; set dummy --mode link ${1+"$@"}; shift
;;
uninstall|uninstal|uninsta|uninst|unins|unin|uni|un|u)
shift; set dummy --mode uninstall ${1+"$@"}; shift
;;
esac
# Pass back the list of options.
func_quote_for_eval ${1+"$@"}
libtool_options_prep_result=$func_quote_for_eval_result
}
func_add_hook func_options_prep libtool_options_prep
# libtool_parse_options [ARG]...
# ---------------------------------
# Provide handling for libtool specific options.
libtool_parse_options ()
{
$debug_cmd
# Perform our own loop to consume as many options as possible in
# each iteration.
while test $# -gt 0; do
_G_opt=$1
shift
case $_G_opt in
--dry-run|--dryrun|-n)
opt_dry_run=:
;;
--config) func_config ;;
--dlopen|-dlopen)
opt_dlopen="${opt_dlopen+$opt_dlopen
}$1"
shift
;;
--preserve-dup-deps)
opt_preserve_dup_deps=: ;;
--features) func_features ;;
--finish) set dummy --mode finish ${1+"$@"}; shift ;;
--help) opt_help=: ;;
--help-all) opt_help=': help-all' ;;
--mode) test $# = 0 && func_missing_arg $_G_opt && break
opt_mode=$1
case $1 in
# Valid mode arguments:
clean|compile|execute|finish|install|link|relink|uninstall) ;;
# Catch anything else as an error
*) func_error "invalid argument for $_G_opt"
exit_cmd=exit
break
;;
esac
shift
;;
--no-silent|--no-quiet)
opt_quiet=false
func_append preserve_args " $_G_opt"
;;
--no-warnings|--no-warning|--no-warn)
opt_warning=false
func_append preserve_args " $_G_opt"
;;
--no-verbose)
opt_verbose=false
func_append preserve_args " $_G_opt"
;;
--silent|--quiet)
opt_quiet=:
opt_verbose=false
func_append preserve_args " $_G_opt"
;;
--tag) test $# = 0 && func_missing_arg $_G_opt && break
opt_tag=$1
func_append preserve_args " $_G_opt $1"
func_enable_tag "$1"
shift
;;
--verbose|-v) opt_quiet=false
opt_verbose=:
func_append preserve_args " $_G_opt"
;;
# An option not handled by this hook function:
*) set dummy "$_G_opt" ${1+"$@"}; shift; break ;;
esac
done
# save modified positional parameters for caller
func_quote_for_eval ${1+"$@"}
libtool_parse_options_result=$func_quote_for_eval_result
}
func_add_hook func_parse_options libtool_parse_options
# libtool_validate_options [ARG]...
# ---------------------------------
# Perform any sanity checks on option settings and/or unconsumed
# arguments.
libtool_validate_options ()
{
# save first non-option argument
if test 0 -lt $#; then
nonopt=$1
shift
fi
# preserve --debug
test : = "$debug_cmd" || func_append preserve_args " --debug"
case $host in
# Solaris2 added to fix http://debbugs.gnu.org/cgi/bugreport.cgi?bug=16452
# see also: http://gcc.gnu.org/bugzilla/show_bug.cgi?id=59788
*cygwin* | *mingw* | *pw32* | *cegcc* | *solaris2*)
# don't eliminate duplications in $postdeps and $predeps
opt_duplicate_compiler_generated_deps=:
;;
*)
opt_duplicate_compiler_generated_deps=$opt_preserve_dup_deps
;;
esac
$opt_help || {
# Sanity checks first:
func_check_version_match
test yes != "$build_libtool_libs" \
&& test yes != "$build_old_libs" \
&& func_fatal_configuration "not configured to build any kind of library"
# Darwin sucks
eval std_shrext=\"$shrext_cmds\"
# Only execute mode is allowed to have -dlopen flags.
if test -n "$opt_dlopen" && test execute != "$opt_mode"; then
func_error "unrecognized option '-dlopen'"
$ECHO "$help" 1>&2
exit $EXIT_FAILURE
fi
# Change the help message to a mode-specific one.
generic_help=$help
help="Try '$progname --help --mode=$opt_mode' for more information."
}
# Pass back the unparsed argument list
func_quote_for_eval ${1+"$@"}
libtool_validate_options_result=$func_quote_for_eval_result
}
func_add_hook func_validate_options libtool_validate_options
# Process options as early as possible so that --help and --version
# can return quickly.
func_options ${1+"$@"}
eval set dummy "$func_options_result"; shift
## ----------- ##
## Main. ##
## ----------- ##
magic='%%%MAGIC variable%%%'
magic_exe='%%%MAGIC EXE variable%%%'
# Global variables.
extracted_archives=
extracted_serial=0
# If this variable is set in any of the actions, the command in it
# will be execed at the end. This prevents here-documents from being
# left over by shells.
exec_cmd=
# A function that is used when there is no print builtin or printf.
func_fallback_echo ()
{
eval 'cat <<_LTECHO_EOF
$1
_LTECHO_EOF'
}
# func_lalib_p file
# True iff FILE is a libtool '.la' library or '.lo' object file.
# This function is only a basic sanity check; it will hardly flush out
# determined imposters.
func_lalib_p ()
{
test -f "$1" &&
$SED -e 4q "$1" 2>/dev/null \
| $GREP "^# Generated by .*$PACKAGE" > /dev/null 2>&1
}
# func_lalib_unsafe_p file
# True iff FILE is a libtool '.la' library or '.lo' object file.
# This function implements the same check as func_lalib_p without
# resorting to external programs. To this end, it redirects stdin and
# closes it afterwards, without saving the original file descriptor.
# As a safety measure, use it only where a negative result would be
# fatal anyway. Works if 'file' does not exist.
func_lalib_unsafe_p ()
{
lalib_p=no
if test -f "$1" && test -r "$1" && exec 5<&0 <"$1"; then
for lalib_p_l in 1 2 3 4
do
read lalib_p_line
case $lalib_p_line in
\#\ Generated\ by\ *$PACKAGE* ) lalib_p=yes; break;;
esac
done
exec 0<&5 5<&-
fi
test yes = "$lalib_p"
}
# func_ltwrapper_script_p file
# True iff FILE is a libtool wrapper script
# This function is only a basic sanity check; it will hardly flush out
# determined imposters.
func_ltwrapper_script_p ()
{
func_lalib_p "$1"
}
# func_ltwrapper_executable_p file
# True iff FILE is a libtool wrapper executable
# This function is only a basic sanity check; it will hardly flush out
# determined imposters.
func_ltwrapper_executable_p ()
{
func_ltwrapper_exec_suffix=
case $1 in
*.exe) ;;
*) func_ltwrapper_exec_suffix=.exe ;;
esac
$GREP "$magic_exe" "$1$func_ltwrapper_exec_suffix" >/dev/null 2>&1
}
# func_ltwrapper_scriptname file
# Assumes file is an ltwrapper_executable
# uses $file to determine the appropriate filename for a
# temporary ltwrapper_script.
func_ltwrapper_scriptname ()
{
func_dirname_and_basename "$1" "" "."
func_stripname '' '.exe' "$func_basename_result"
func_ltwrapper_scriptname_result=$func_dirname_result/$objdir/${func_stripname_result}_ltshwrapper
}
# func_ltwrapper_p file
# True iff FILE is a libtool wrapper script or wrapper executable
# This function is only a basic sanity check; it will hardly flush out
# determined imposters.
func_ltwrapper_p ()
{
func_ltwrapper_script_p "$1" || func_ltwrapper_executable_p "$1"
}
# func_execute_cmds commands fail_cmd
# Execute tilde-delimited COMMANDS.
# If FAIL_CMD is given, eval that upon failure.
# FAIL_CMD may read-access the current command in variable CMD!
func_execute_cmds ()
{
$debug_cmd
save_ifs=$IFS; IFS='~'
for cmd in $1; do
IFS=$sp$nl
eval cmd=\"$cmd\"
IFS=$save_ifs
func_show_eval "$cmd" "${2-:}"
done
IFS=$save_ifs
}
# func_source file
# Source FILE, adding directory component if necessary.
# Note that it is not necessary on cygwin/mingw to append a dot to
# FILE even if both FILE and FILE.exe exist: automatic-append-.exe
# behavior happens only for exec(3), not for open(2)! Also, sourcing
# 'FILE.' does not work on cygwin managed mounts.
func_source ()
{
$debug_cmd
case $1 in
*/* | *\\*) . "$1" ;;
*) . "./$1" ;;
esac
}
# func_resolve_sysroot PATH
# Replace a leading = in PATH with a sysroot. Store the result into
# func_resolve_sysroot_result
func_resolve_sysroot ()
{
func_resolve_sysroot_result=$1
case $func_resolve_sysroot_result in
=*)
func_stripname '=' '' "$func_resolve_sysroot_result"
func_resolve_sysroot_result=$lt_sysroot$func_stripname_result
;;
esac
}
# func_replace_sysroot PATH
# If PATH begins with the sysroot, replace it with = and
# store the result into func_replace_sysroot_result.
func_replace_sysroot ()
{
case $lt_sysroot:$1 in
?*:"$lt_sysroot"*)
func_stripname "$lt_sysroot" '' "$1"
func_replace_sysroot_result='='$func_stripname_result
;;
*)
# Including no sysroot.
func_replace_sysroot_result=$1
;;
esac
}
# func_infer_tag arg
# Infer tagged configuration to use if any are available and
# if one wasn't chosen via the "--tag" command line option.
# Only attempt this if the compiler in the base compile
# command doesn't match the default compiler.
# arg is usually of the form 'gcc ...'
func_infer_tag ()
{
$debug_cmd
if test -n "$available_tags" && test -z "$tagname"; then
CC_quoted=
for arg in $CC; do
func_append_quoted CC_quoted "$arg"
done
CC_expanded=`func_echo_all $CC`
CC_quoted_expanded=`func_echo_all $CC_quoted`
case $@ in
# Blanks in the command may have been stripped by the calling shell,
# but not from the CC environment variable when configure was run.
" $CC "* | "$CC "* | " $CC_expanded "* | "$CC_expanded "* | \
" $CC_quoted"* | "$CC_quoted "* | " $CC_quoted_expanded "* | "$CC_quoted_expanded "*) ;;
# Blanks at the start of $base_compile will cause this to fail
# if we don't check for them as well.
*)
for z in $available_tags; do
if $GREP "^# ### BEGIN LIBTOOL TAG CONFIG: $z$" < "$progpath" > /dev/null; then
# Evaluate the configuration.
eval "`$SED -n -e '/^# ### BEGIN LIBTOOL TAG CONFIG: '$z'$/,/^# ### END LIBTOOL TAG CONFIG: '$z'$/p' < $progpath`"
CC_quoted=
for arg in $CC; do
# Double-quote args containing other shell metacharacters.
func_append_quoted CC_quoted "$arg"
done
CC_expanded=`func_echo_all $CC`
CC_quoted_expanded=`func_echo_all $CC_quoted`
case "$@ " in
" $CC "* | "$CC "* | " $CC_expanded "* | "$CC_expanded "* | \
" $CC_quoted"* | "$CC_quoted "* | " $CC_quoted_expanded "* | "$CC_quoted_expanded "*)
# The compiler in the base compile command matches
# the one in the tagged configuration.
# Assume this is the tagged configuration we want.
tagname=$z
break
;;
esac
fi
done
# If $tagname still isn't set, then no tagged configuration
# was found and let the user know that the "--tag" command
# line option must be used.
if test -z "$tagname"; then
func_echo "unable to infer tagged configuration"
func_fatal_error "specify a tag with '--tag'"
# else
# func_verbose "using $tagname tagged configuration"
fi
;;
esac
fi
}
# func_write_libtool_object output_name pic_name nonpic_name
# Create a libtool object file (analogous to a ".la" file),
# but don't create it if we're doing a dry run.
func_write_libtool_object ()
{
write_libobj=$1
if test yes = "$build_libtool_libs"; then
write_lobj=\'$2\'
else
write_lobj=none
fi
if test yes = "$build_old_libs"; then
write_oldobj=\'$3\'
else
write_oldobj=none
fi
$opt_dry_run || {
cat >${write_libobj}T <<EOF
# $write_libobj - a libtool object file
# Generated by $PROGRAM (GNU $PACKAGE) $VERSION
#
# Please DO NOT delete this file!
# It is necessary for linking the library.
# Name of the PIC object.
pic_object=$write_lobj
# Name of the non-PIC object
non_pic_object=$write_oldobj
EOF
$MV "${write_libobj}T" "$write_libobj"
}
}
##################################################
# FILE NAME AND PATH CONVERSION HELPER FUNCTIONS #
##################################################
# func_convert_core_file_wine_to_w32 ARG
# Helper function used by file name conversion functions when $build is *nix,
# and $host is mingw, cygwin, or some other w32 environment. Relies on a
# correctly configured wine environment available, with the winepath program
# in $build's $PATH.
#
# ARG is the $build file name to be converted to w32 format.
# Result is available in $func_convert_core_file_wine_to_w32_result, and will
# be empty on error (or when ARG is empty)
func_convert_core_file_wine_to_w32 ()
{
$debug_cmd
func_convert_core_file_wine_to_w32_result=$1
if test -n "$1"; then
# Unfortunately, winepath does not exit with a non-zero error code, so we
# are forced to check the contents of stdout. On the other hand, if the
# command is not found, the shell will set an exit code of 127 and print
# *an error message* to stdout. So we must check for both error code of
# zero AND non-empty stdout, which explains the odd construction:
func_convert_core_file_wine_to_w32_tmp=`winepath -w "$1" 2>/dev/null`
if test "$?" -eq 0 && test -n "$func_convert_core_file_wine_to_w32_tmp"; then
func_convert_core_file_wine_to_w32_result=`$ECHO "$func_convert_core_file_wine_to_w32_tmp" |
$SED -e "$sed_naive_backslashify"`
else
func_convert_core_file_wine_to_w32_result=
fi
fi
}
# end: func_convert_core_file_wine_to_w32
# func_convert_core_path_wine_to_w32 ARG
# Helper function used by path conversion functions when $build is *nix, and
# $host is mingw, cygwin, or some other w32 environment. Relies on a correctly
# configured wine environment available, with the winepath program in $build's
# $PATH. Assumes ARG has no leading or trailing path separator characters.
#
# ARG is path to be converted from $build format to win32.
# Result is available in $func_convert_core_path_wine_to_w32_result.
# Unconvertible file (directory) names in ARG are skipped; if no directory names
# are convertible, then the result may be empty.
func_convert_core_path_wine_to_w32 ()
{
$debug_cmd
# unfortunately, winepath doesn't convert paths, only file names
func_convert_core_path_wine_to_w32_result=
if test -n "$1"; then
oldIFS=$IFS
IFS=:
for func_convert_core_path_wine_to_w32_f in $1; do
IFS=$oldIFS
func_convert_core_file_wine_to_w32 "$func_convert_core_path_wine_to_w32_f"
if test -n "$func_convert_core_file_wine_to_w32_result"; then
if test -z "$func_convert_core_path_wine_to_w32_result"; then
func_convert_core_path_wine_to_w32_result=$func_convert_core_file_wine_to_w32_result
else
func_append func_convert_core_path_wine_to_w32_result ";$func_convert_core_file_wine_to_w32_result"
fi
fi
done
IFS=$oldIFS
fi
}
# end: func_convert_core_path_wine_to_w32
# func_cygpath ARGS...
# Wrapper around calling the cygpath program via LT_CYGPATH. This is used when
# when (1) $build is *nix and Cygwin is hosted via a wine environment; or (2)
# $build is MSYS and $host is Cygwin, or (3) $build is Cygwin. In case (1) or
# (2), returns the Cygwin file name or path in func_cygpath_result (input
# file name or path is assumed to be in w32 format, as previously converted
# from $build's *nix or MSYS format). In case (3), returns the w32 file name
# or path in func_cygpath_result (input file name or path is assumed to be in
# Cygwin format). Returns an empty string on error.
#
# ARGS are passed to cygpath, with the last one being the file name or path to
# be converted.
#
# Specify the absolute *nix (or w32) name to cygpath in the LT_CYGPATH
# environment variable; do not put it in $PATH.
func_cygpath ()
{
$debug_cmd
if test -n "$LT_CYGPATH" && test -f "$LT_CYGPATH"; then
func_cygpath_result=`$LT_CYGPATH "$@" 2>/dev/null`
if test "$?" -ne 0; then
# on failure, ensure result is empty
func_cygpath_result=
fi
else
func_cygpath_result=
func_error "LT_CYGPATH is empty or specifies non-existent file: '$LT_CYGPATH'"
fi
}
#end: func_cygpath
# func_convert_core_msys_to_w32 ARG
# Convert file name or path ARG from MSYS format to w32 format. Return
# result in func_convert_core_msys_to_w32_result.
func_convert_core_msys_to_w32 ()
{
$debug_cmd
# awkward: cmd appends spaces to result
func_convert_core_msys_to_w32_result=`( cmd //c echo "$1" ) 2>/dev/null |
$SED -e 's/[ ]*$//' -e "$sed_naive_backslashify"`
}
#end: func_convert_core_msys_to_w32
# func_convert_file_check ARG1 ARG2
# Verify that ARG1 (a file name in $build format) was converted to $host
# format in ARG2. Otherwise, emit an error message, but continue (resetting
# func_to_host_file_result to ARG1).
func_convert_file_check ()
{
$debug_cmd
if test -z "$2" && test -n "$1"; then
func_error "Could not determine host file name corresponding to"
func_error " '$1'"
func_error "Continuing, but uninstalled executables may not work."
# Fallback:
func_to_host_file_result=$1
fi
}
# end func_convert_file_check
# func_convert_path_check FROM_PATHSEP TO_PATHSEP FROM_PATH TO_PATH
# Verify that FROM_PATH (a path in $build format) was converted to $host
# format in TO_PATH. Otherwise, emit an error message, but continue, resetting
# func_to_host_file_result to a simplistic fallback value (see below).
func_convert_path_check ()
{
$debug_cmd
if test -z "$4" && test -n "$3"; then
func_error "Could not determine the host path corresponding to"
func_error " '$3'"
func_error "Continuing, but uninstalled executables may not work."
# Fallback. This is a deliberately simplistic "conversion" and
# should not be "improved". See libtool.info.
if test "x$1" != "x$2"; then
lt_replace_pathsep_chars="s|$1|$2|g"
func_to_host_path_result=`echo "$3" |
$SED -e "$lt_replace_pathsep_chars"`
else
func_to_host_path_result=$3
fi
fi
}
# end func_convert_path_check
# func_convert_path_front_back_pathsep FRONTPAT BACKPAT REPL ORIG
# Modifies func_to_host_path_result by prepending REPL if ORIG matches FRONTPAT
# and appending REPL if ORIG matches BACKPAT.
func_convert_path_front_back_pathsep ()
{
$debug_cmd
case $4 in
$1 ) func_to_host_path_result=$3$func_to_host_path_result
;;
esac
case $4 in
$2 ) func_append func_to_host_path_result "$3"
;;
esac
}
# end func_convert_path_front_back_pathsep
##################################################
# $build to $host FILE NAME CONVERSION FUNCTIONS #
##################################################
# invoked via '$to_host_file_cmd ARG'
#
# In each case, ARG is the path to be converted from $build to $host format.
# Result will be available in $func_to_host_file_result.
# func_to_host_file ARG
# Converts the file name ARG from $build format to $host format. Return result
# in func_to_host_file_result.
func_to_host_file ()
{
$debug_cmd
$to_host_file_cmd "$1"
}
# end func_to_host_file
# func_to_tool_file ARG LAZY
# converts the file name ARG from $build format to toolchain format. Return
# result in func_to_tool_file_result. If the conversion in use is listed
# in (the comma separated) LAZY, no conversion takes place.
func_to_tool_file ()
{
$debug_cmd
case ,$2, in
*,"$to_tool_file_cmd",*)
func_to_tool_file_result=$1
;;
*)
$to_tool_file_cmd "$1"
func_to_tool_file_result=$func_to_host_file_result
;;
esac
}
# end func_to_tool_file
# func_convert_file_noop ARG
# Copy ARG to func_to_host_file_result.
func_convert_file_noop ()
{
func_to_host_file_result=$1
}
# end func_convert_file_noop
# func_convert_file_msys_to_w32 ARG
# Convert file name ARG from (mingw) MSYS to (mingw) w32 format; automatic
# conversion to w32 is not available inside the cwrapper. Returns result in
# func_to_host_file_result.
func_convert_file_msys_to_w32 ()
{
$debug_cmd
func_to_host_file_result=$1
if test -n "$1"; then
func_convert_core_msys_to_w32 "$1"
func_to_host_file_result=$func_convert_core_msys_to_w32_result
fi
func_convert_file_check "$1" "$func_to_host_file_result"
}
# end func_convert_file_msys_to_w32
# func_convert_file_cygwin_to_w32 ARG
# Convert file name ARG from Cygwin to w32 format. Returns result in
# func_to_host_file_result.
func_convert_file_cygwin_to_w32 ()
{
$debug_cmd
func_to_host_file_result=$1
if test -n "$1"; then
# because $build is cygwin, we call "the" cygpath in $PATH; no need to use
# LT_CYGPATH in this case.
func_to_host_file_result=`cygpath -m "$1"`
fi
func_convert_file_check "$1" "$func_to_host_file_result"
}
# end func_convert_file_cygwin_to_w32
# func_convert_file_nix_to_w32 ARG
# Convert file name ARG from *nix to w32 format. Requires a wine environment
# and a working winepath. Returns result in func_to_host_file_result.
func_convert_file_nix_to_w32 ()
{
$debug_cmd
func_to_host_file_result=$1
if test -n "$1"; then
func_convert_core_file_wine_to_w32 "$1"
func_to_host_file_result=$func_convert_core_file_wine_to_w32_result
fi
func_convert_file_check "$1" "$func_to_host_file_result"
}
# end func_convert_file_nix_to_w32
# func_convert_file_msys_to_cygwin ARG
# Convert file name ARG from MSYS to Cygwin format. Requires LT_CYGPATH set.
# Returns result in func_to_host_file_result.
func_convert_file_msys_to_cygwin ()
{
$debug_cmd
func_to_host_file_result=$1
if test -n "$1"; then
func_convert_core_msys_to_w32 "$1"
func_cygpath -u "$func_convert_core_msys_to_w32_result"
func_to_host_file_result=$func_cygpath_result
fi
func_convert_file_check "$1" "$func_to_host_file_result"
}
# end func_convert_file_msys_to_cygwin
# func_convert_file_nix_to_cygwin ARG
# Convert file name ARG from *nix to Cygwin format. Requires Cygwin installed
# in a wine environment, working winepath, and LT_CYGPATH set. Returns result
# in func_to_host_file_result.
func_convert_file_nix_to_cygwin ()
{
$debug_cmd
func_to_host_file_result=$1
if test -n "$1"; then
# convert from *nix to w32, then use cygpath to convert from w32 to cygwin.
func_convert_core_file_wine_to_w32 "$1"
func_cygpath -u "$func_convert_core_file_wine_to_w32_result"
func_to_host_file_result=$func_cygpath_result
fi
func_convert_file_check "$1" "$func_to_host_file_result"
}
# end func_convert_file_nix_to_cygwin
#############################################
# $build to $host PATH CONVERSION FUNCTIONS #
#############################################
# invoked via '$to_host_path_cmd ARG'
#
# In each case, ARG is the path to be converted from $build to $host format.
# The result will be available in $func_to_host_path_result.
#
# Path separators are also converted from $build format to $host format. If
# ARG begins or ends with a path separator character, it is preserved (but
# converted to $host format) on output.
#
# All path conversion functions are named using the following convention:
# file name conversion function : func_convert_file_X_to_Y ()
# path conversion function : func_convert_path_X_to_Y ()
# where, for any given $build/$host combination the 'X_to_Y' value is the
# same. If conversion functions are added for new $build/$host combinations,
# the two new functions must follow this pattern, or func_init_to_host_path_cmd
# will break.
# func_init_to_host_path_cmd
# Ensures that function "pointer" variable $to_host_path_cmd is set to the
# appropriate value, based on the value of $to_host_file_cmd.
to_host_path_cmd=
func_init_to_host_path_cmd ()
{
$debug_cmd
if test -z "$to_host_path_cmd"; then
func_stripname 'func_convert_file_' '' "$to_host_file_cmd"
to_host_path_cmd=func_convert_path_$func_stripname_result
fi
}
# func_to_host_path ARG
# Converts the path ARG from $build format to $host format. Return result
# in func_to_host_path_result.
func_to_host_path ()
{
$debug_cmd
func_init_to_host_path_cmd
$to_host_path_cmd "$1"
}
# end func_to_host_path
# func_convert_path_noop ARG
# Copy ARG to func_to_host_path_result.
func_convert_path_noop ()
{
func_to_host_path_result=$1
}
# end func_convert_path_noop
# func_convert_path_msys_to_w32 ARG
# Convert path ARG from (mingw) MSYS to (mingw) w32 format; automatic
# conversion to w32 is not available inside the cwrapper. Returns result in
# func_to_host_path_result.
func_convert_path_msys_to_w32 ()
{
$debug_cmd
func_to_host_path_result=$1
if test -n "$1"; then
# Remove leading and trailing path separator characters from ARG. MSYS
# behavior is inconsistent here; cygpath turns them into '.;' and ';.';
# and winepath ignores them completely.
func_stripname : : "$1"
func_to_host_path_tmp1=$func_stripname_result
func_convert_core_msys_to_w32 "$func_to_host_path_tmp1"
func_to_host_path_result=$func_convert_core_msys_to_w32_result
func_convert_path_check : ";" \
"$func_to_host_path_tmp1" "$func_to_host_path_result"
func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
fi
}
# end func_convert_path_msys_to_w32
# func_convert_path_cygwin_to_w32 ARG
# Convert path ARG from Cygwin to w32 format. Returns result in
# func_to_host_file_result.
func_convert_path_cygwin_to_w32 ()
{
$debug_cmd
func_to_host_path_result=$1
if test -n "$1"; then
# See func_convert_path_msys_to_w32:
func_stripname : : "$1"
func_to_host_path_tmp1=$func_stripname_result
func_to_host_path_result=`cygpath -m -p "$func_to_host_path_tmp1"`
func_convert_path_check : ";" \
"$func_to_host_path_tmp1" "$func_to_host_path_result"
func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
fi
}
# end func_convert_path_cygwin_to_w32
# func_convert_path_nix_to_w32 ARG
# Convert path ARG from *nix to w32 format. Requires a wine environment and
# a working winepath. Returns result in func_to_host_file_result.
func_convert_path_nix_to_w32 ()
{
$debug_cmd
func_to_host_path_result=$1
if test -n "$1"; then
# See func_convert_path_msys_to_w32:
func_stripname : : "$1"
func_to_host_path_tmp1=$func_stripname_result
func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1"
func_to_host_path_result=$func_convert_core_path_wine_to_w32_result
func_convert_path_check : ";" \
"$func_to_host_path_tmp1" "$func_to_host_path_result"
func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
fi
}
# end func_convert_path_nix_to_w32
# func_convert_path_msys_to_cygwin ARG
# Convert path ARG from MSYS to Cygwin format. Requires LT_CYGPATH set.
# Returns result in func_to_host_file_result.
func_convert_path_msys_to_cygwin ()
{
$debug_cmd
func_to_host_path_result=$1
if test -n "$1"; then
# See func_convert_path_msys_to_w32:
func_stripname : : "$1"
func_to_host_path_tmp1=$func_stripname_result
func_convert_core_msys_to_w32 "$func_to_host_path_tmp1"
func_cygpath -u -p "$func_convert_core_msys_to_w32_result"
func_to_host_path_result=$func_cygpath_result
func_convert_path_check : : \
"$func_to_host_path_tmp1" "$func_to_host_path_result"
func_convert_path_front_back_pathsep ":*" "*:" : "$1"
fi
}
# end func_convert_path_msys_to_cygwin
# func_convert_path_nix_to_cygwin ARG
# Convert path ARG from *nix to Cygwin format. Requires Cygwin installed in a
# a wine environment, working winepath, and LT_CYGPATH set. Returns result in
# func_to_host_file_result.
func_convert_path_nix_to_cygwin ()
{
$debug_cmd
func_to_host_path_result=$1
if test -n "$1"; then
# Remove leading and trailing path separator characters from
# ARG. msys behavior is inconsistent here, cygpath turns them
# into '.;' and ';.', and winepath ignores them completely.
func_stripname : : "$1"
func_to_host_path_tmp1=$func_stripname_result
func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1"
func_cygpath -u -p "$func_convert_core_path_wine_to_w32_result"
func_to_host_path_result=$func_cygpath_result
func_convert_path_check : : \
"$func_to_host_path_tmp1" "$func_to_host_path_result"
func_convert_path_front_back_pathsep ":*" "*:" : "$1"
fi
}
# end func_convert_path_nix_to_cygwin
# func_dll_def_p FILE
# True iff FILE is a Windows DLL '.def' file.
# Keep in sync with _LT_DLL_DEF_P in libtool.m4
func_dll_def_p ()
{
$debug_cmd
func_dll_def_p_tmp=`$SED -n \
-e 's/^[ ]*//' \
-e '/^\(;.*\)*$/d' \
-e 's/^\(EXPORTS\|LIBRARY\)\([ ].*\)*$/DEF/p' \
-e q \
"$1"`
test DEF = "$func_dll_def_p_tmp"
}
# func_mode_compile arg...
func_mode_compile ()
{
$debug_cmd
# Get the compilation command and the source file.
base_compile=
srcfile=$nonopt # always keep a non-empty value in "srcfile"
suppress_opt=yes
suppress_output=
arg_mode=normal
libobj=
later=
pie_flag=
for arg
do
case $arg_mode in
arg )
# do not "continue". Instead, add this to base_compile
lastarg=$arg
arg_mode=normal
;;
target )
libobj=$arg
arg_mode=normal
continue
;;
normal )
# Accept any command-line options.
case $arg in
-o)
test -n "$libobj" && \
func_fatal_error "you cannot specify '-o' more than once"
arg_mode=target
continue
;;
-pie | -fpie | -fPIE)
func_append pie_flag " $arg"
continue
;;
-shared | -static | -prefer-pic | -prefer-non-pic)
func_append later " $arg"
continue
;;
-no-suppress)
suppress_opt=no
continue
;;
-Xcompiler)
arg_mode=arg # the next one goes into the "base_compile" arg list
continue # The current "srcfile" will either be retained or
;; # replaced later. I would guess that would be a bug.
-Wc,*)
func_stripname '-Wc,' '' "$arg"
args=$func_stripname_result
lastarg=
save_ifs=$IFS; IFS=,
for arg in $args; do
IFS=$save_ifs
func_append_quoted lastarg "$arg"
done
IFS=$save_ifs
func_stripname ' ' '' "$lastarg"
lastarg=$func_stripname_result
# Add the arguments to base_compile.
func_append base_compile " $lastarg"
continue
;;
*)
# Accept the current argument as the source file.
# The previous "srcfile" becomes the current argument.
#
lastarg=$srcfile
srcfile=$arg
;;
esac # case $arg
;;
esac # case $arg_mode
# Aesthetically quote the previous argument.
func_append_quoted base_compile "$lastarg"
done # for arg
case $arg_mode in
arg)
func_fatal_error "you must specify an argument for -Xcompile"
;;
target)
func_fatal_error "you must specify a target with '-o'"
;;
*)
# Get the name of the library object.
test -z "$libobj" && {
func_basename "$srcfile"
libobj=$func_basename_result
}
;;
esac
# Recognize several different file suffixes.
# If the user specifies -o file.o, it is replaced with file.lo
case $libobj in
*.[cCFSifmso] | \
*.ada | *.adb | *.ads | *.asm | \
*.c++ | *.cc | *.ii | *.class | *.cpp | *.cxx | \
*.[fF][09]? | *.for | *.java | *.go | *.obj | *.sx | *.cu | *.cup)
func_xform "$libobj"
libobj=$func_xform_result
;;
esac
case $libobj in
*.lo) func_lo2o "$libobj"; obj=$func_lo2o_result ;;
*)
func_fatal_error "cannot determine name of library object from '$libobj'"
;;
esac
func_infer_tag $base_compile
for arg in $later; do
case $arg in
-shared)
test yes = "$build_libtool_libs" \
|| func_fatal_configuration "cannot build a shared library"
build_old_libs=no
continue
;;
-static)
build_libtool_libs=no
build_old_libs=yes
continue
;;
-prefer-pic)
pic_mode=yes
continue
;;
-prefer-non-pic)
pic_mode=no
continue
;;
esac
done
func_quote_for_eval "$libobj"
test "X$libobj" != "X$func_quote_for_eval_result" \
&& $ECHO "X$libobj" | $GREP '[]~#^*{};<>?"'"'"' &()|`$[]' \
&& func_warning "libobj name '$libobj' may not contain shell special characters."
func_dirname_and_basename "$obj" "/" ""
objname=$func_basename_result
xdir=$func_dirname_result
lobj=$xdir$objdir/$objname
test -z "$base_compile" && \
func_fatal_help "you must specify a compilation command"
# Delete any leftover library objects.
if test yes = "$build_old_libs"; then
removelist="$obj $lobj $libobj ${libobj}T"
else
removelist="$lobj $libobj ${libobj}T"
fi
# On Cygwin there's no "real" PIC flag so we must build both object types
case $host_os in
cygwin* | mingw* | pw32* | os2* | cegcc*)
pic_mode=default
;;
esac
if test no = "$pic_mode" && test pass_all != "$deplibs_check_method"; then
# non-PIC code in shared libraries is not supported
pic_mode=default
fi
# Calculate the filename of the output object if compiler does
# not support -o with -c
if test no = "$compiler_c_o"; then
output_obj=`$ECHO "$srcfile" | $SED 's%^.*/%%; s%\.[^.]*$%%'`.$objext
lockfile=$output_obj.lock
else
output_obj=
need_locks=no
lockfile=
fi
# Lock this critical section if it is needed
# We use this script file to make the link, it avoids creating a new file
if test yes = "$need_locks"; then
until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do
func_echo "Waiting for $lockfile to be removed"
sleep 2
done
elif test warn = "$need_locks"; then
if test -f "$lockfile"; then
$ECHO "\
*** ERROR, $lockfile exists and contains:
`cat $lockfile 2>/dev/null`
This indicates that another process is trying to use the same
temporary object file, and libtool could not work around it because
your compiler does not support '-c' and '-o' together. If you
repeat this compilation, it may succeed, by chance, but you had better
avoid parallel builds (make -j) in this platform, or get a better
compiler."
$opt_dry_run || $RM $removelist
exit $EXIT_FAILURE
fi
func_append removelist " $output_obj"
$ECHO "$srcfile" > "$lockfile"
fi
$opt_dry_run || $RM $removelist
func_append removelist " $lockfile"
trap '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' 1 2 15
func_to_tool_file "$srcfile" func_convert_file_msys_to_w32
srcfile=$func_to_tool_file_result
func_quote_for_eval "$srcfile"
qsrcfile=$func_quote_for_eval_result
# Only build a PIC object if we are building libtool libraries.
if test yes = "$build_libtool_libs"; then
# Without this assignment, base_compile gets emptied.
fbsd_hideous_sh_bug=$base_compile
if test no != "$pic_mode"; then
command="$base_compile $qsrcfile $pic_flag"
else
# Don't build PIC code
command="$base_compile $qsrcfile"
fi
func_mkdir_p "$xdir$objdir"
if test -z "$output_obj"; then
# Place PIC objects in $objdir
func_append command " -o $lobj"
fi
func_show_eval_locale "$command" \
'test -n "$output_obj" && $RM $removelist; exit $EXIT_FAILURE'
if test warn = "$need_locks" &&
test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then
$ECHO "\
*** ERROR, $lockfile contains:
`cat $lockfile 2>/dev/null`
but it should contain:
$srcfile
This indicates that another process is trying to use the same
temporary object file, and libtool could not work around it because
your compiler does not support '-c' and '-o' together. If you
repeat this compilation, it may succeed, by chance, but you had better
avoid parallel builds (make -j) in this platform, or get a better
compiler."
$opt_dry_run || $RM $removelist
exit $EXIT_FAILURE
fi
# Just move the object if needed, then go on to compile the next one
if test -n "$output_obj" && test "X$output_obj" != "X$lobj"; then
func_show_eval '$MV "$output_obj" "$lobj"' \
'error=$?; $opt_dry_run || $RM $removelist; exit $error'
fi
# Allow error messages only from the first compilation.
if test yes = "$suppress_opt"; then
suppress_output=' >/dev/null 2>&1'
fi
fi
# Only build a position-dependent object if we build old libraries.
if test yes = "$build_old_libs"; then
if test yes != "$pic_mode"; then
# Don't build PIC code
command="$base_compile $qsrcfile$pie_flag"
else
command="$base_compile $qsrcfile $pic_flag"
fi
if test yes = "$compiler_c_o"; then
func_append command " -o $obj"
fi
# Suppress compiler output if we already did a PIC compilation.
func_append command "$suppress_output"
func_show_eval_locale "$command" \
'$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE'
if test warn = "$need_locks" &&
test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then
$ECHO "\
*** ERROR, $lockfile contains:
`cat $lockfile 2>/dev/null`
but it should contain:
$srcfile
This indicates that another process is trying to use the same
temporary object file, and libtool could not work around it because
your compiler does not support '-c' and '-o' together. If you
repeat this compilation, it may succeed, by chance, but you had better
avoid parallel builds (make -j) in this platform, or get a better
compiler."
$opt_dry_run || $RM $removelist
exit $EXIT_FAILURE
fi
# Just move the object if needed
if test -n "$output_obj" && test "X$output_obj" != "X$obj"; then
func_show_eval '$MV "$output_obj" "$obj"' \
'error=$?; $opt_dry_run || $RM $removelist; exit $error'
fi
fi
$opt_dry_run || {
func_write_libtool_object "$libobj" "$objdir/$objname" "$objname"
# Unlock the critical section if it was locked
if test no != "$need_locks"; then
removelist=$lockfile
$RM "$lockfile"
fi
}
exit $EXIT_SUCCESS
}
$opt_help || {
test compile = "$opt_mode" && func_mode_compile ${1+"$@"}
}
func_mode_help ()
{
# We need to display help for each of the modes.
case $opt_mode in
"")
# Generic help is extracted from the usage comments
# at the start of this file.
func_help
;;
clean)
$ECHO \
"Usage: $progname [OPTION]... --mode=clean RM [RM-OPTION]... FILE...
Remove files from the build directory.
RM is the name of the program to use to delete files associated with each FILE
(typically '/bin/rm'). RM-OPTIONS are options (such as '-f') to be passed
to RM.
If FILE is a libtool library, object or program, all the files associated
with it are deleted. Otherwise, only FILE itself is deleted using RM."
;;
compile)
$ECHO \
"Usage: $progname [OPTION]... --mode=compile COMPILE-COMMAND... SOURCEFILE
Compile a source file into a libtool library object.
This mode accepts the following additional options:
-o OUTPUT-FILE set the output file name to OUTPUT-FILE
-no-suppress do not suppress compiler output for multiple passes
-prefer-pic try to build PIC objects only
-prefer-non-pic try to build non-PIC objects only
-shared do not build a '.o' file suitable for static linking
-static only build a '.o' file suitable for static linking
-Wc,FLAG pass FLAG directly to the compiler
COMPILE-COMMAND is a command to be used in creating a 'standard' object file
from the given SOURCEFILE.
The output file name is determined by removing the directory component from
SOURCEFILE, then substituting the C source code suffix '.c' with the
library object suffix, '.lo'."
;;
execute)
$ECHO \
"Usage: $progname [OPTION]... --mode=execute COMMAND [ARGS]...
Automatically set library path, then run a program.
This mode accepts the following additional options:
-dlopen FILE add the directory containing FILE to the library path
This mode sets the library path environment variable according to '-dlopen'
flags.
If any of the ARGS are libtool executable wrappers, then they are translated
into their corresponding uninstalled binary, and any of their required library
directories are added to the library path.
Then, COMMAND is executed, with ARGS as arguments."
;;
finish)
$ECHO \
"Usage: $progname [OPTION]... --mode=finish [LIBDIR]...
Complete the installation of libtool libraries.
Each LIBDIR is a directory that contains libtool libraries.
The commands that this mode executes may require superuser privileges. Use
the '--dry-run' option if you just want to see what would be executed."
;;
install)
$ECHO \
"Usage: $progname [OPTION]... --mode=install INSTALL-COMMAND...
Install executables or libraries.
INSTALL-COMMAND is the installation command. The first component should be
either the 'install' or 'cp' program.
The following components of INSTALL-COMMAND are treated specially:
-inst-prefix-dir PREFIX-DIR Use PREFIX-DIR as a staging area for installation
The rest of the components are interpreted as arguments to that command (only
BSD-compatible install options are recognized)."
;;
link)
$ECHO \
"Usage: $progname [OPTION]... --mode=link LINK-COMMAND...
Link object files or libraries together to form another library, or to
create an executable program.
LINK-COMMAND is a command using the C compiler that you would use to create
a program from several object files.
The following components of LINK-COMMAND are treated specially:
-all-static do not do any dynamic linking at all
-avoid-version do not add a version suffix if possible
-bindir BINDIR specify path to binaries directory (for systems where
libraries must be found in the PATH setting at runtime)
-dlopen FILE '-dlpreopen' FILE if it cannot be dlopened at runtime
-dlpreopen FILE link in FILE and add its symbols to lt_preloaded_symbols
-export-dynamic allow symbols from OUTPUT-FILE to be resolved with dlsym(3)
-export-symbols SYMFILE
try to export only the symbols listed in SYMFILE
-export-symbols-regex REGEX
try to export only the symbols matching REGEX
-LLIBDIR search LIBDIR for required installed libraries
-lNAME OUTPUT-FILE requires the installed library libNAME
-module build a library that can dlopened
-no-fast-install disable the fast-install mode
-no-install link a not-installable executable
-no-undefined declare that a library does not refer to external symbols
-o OUTPUT-FILE create OUTPUT-FILE from the specified objects
-objectlist FILE Use a list of object files found in FILE to specify objects
-precious-files-regex REGEX
don't remove output files matching REGEX
-release RELEASE specify package release information
-rpath LIBDIR the created library will eventually be installed in LIBDIR
-R[ ]LIBDIR add LIBDIR to the runtime path of programs and libraries
-shared only do dynamic linking of libtool libraries
-shrext SUFFIX override the standard shared library file extension
-static do not do any dynamic linking of uninstalled libtool libraries
-static-libtool-libs
do not do any dynamic linking of libtool libraries
-version-info CURRENT[:REVISION[:AGE]]
specify library version info [each variable defaults to 0]
-weak LIBNAME declare that the target provides the LIBNAME interface
-Wc,FLAG
-Xcompiler FLAG pass linker-specific FLAG directly to the compiler
-Wl,FLAG
-Xlinker FLAG pass linker-specific FLAG directly to the linker
-XCClinker FLAG pass link-specific FLAG to the compiler driver (CC)
All other options (arguments beginning with '-') are ignored.
Every other argument is treated as a filename. Files ending in '.la' are
treated as uninstalled libtool libraries, other files are standard or library
object files.
If the OUTPUT-FILE ends in '.la', then a libtool library is created,
only library objects ('.lo' files) may be specified, and '-rpath' is
required, except when creating a convenience library.
If OUTPUT-FILE ends in '.a' or '.lib', then a standard library is created
using 'ar' and 'ranlib', or on Windows using 'lib'.
If OUTPUT-FILE ends in '.lo' or '.$objext', then a reloadable object file
is created, otherwise an executable program is created."
;;
uninstall)
$ECHO \
"Usage: $progname [OPTION]... --mode=uninstall RM [RM-OPTION]... FILE...
Remove libraries from an installation directory.
RM is the name of the program to use to delete files associated with each FILE
(typically '/bin/rm'). RM-OPTIONS are options (such as '-f') to be passed
to RM.
If FILE is a libtool library, all the files associated with it are deleted.
Otherwise, only FILE itself is deleted using RM."
;;
*)
func_fatal_help "invalid operation mode '$opt_mode'"
;;
esac
echo
$ECHO "Try '$progname --help' for more information about other modes."
}
# Now that we've collected a possible --mode arg, show help if necessary
if $opt_help; then
if test : = "$opt_help"; then
func_mode_help
else
{
func_help noexit
for opt_mode in compile link execute install finish uninstall clean; do
func_mode_help
done
} | $SED -n '1p; 2,$s/^Usage:/ or: /p'
{
func_help noexit
for opt_mode in compile link execute install finish uninstall clean; do
echo
func_mode_help
done
} |
$SED '1d
/^When reporting/,/^Report/{
H
d
}
$x
/information about other modes/d
/more detailed .*MODE/d
s/^Usage:.*--mode=\([^ ]*\) .*/Description of \1 mode:/'
fi
exit $?
fi
# func_mode_execute arg...
func_mode_execute ()
{
$debug_cmd
# The first argument is the command name.
cmd=$nonopt
test -z "$cmd" && \
func_fatal_help "you must specify a COMMAND"
# Handle -dlopen flags immediately.
for file in $opt_dlopen; do
test -f "$file" \
|| func_fatal_help "'$file' is not a file"
dir=
case $file in
*.la)
func_resolve_sysroot "$file"
file=$func_resolve_sysroot_result
# Check to see that this really is a libtool archive.
func_lalib_unsafe_p "$file" \
|| func_fatal_help "'$lib' is not a valid libtool archive"
# Read the libtool library.
dlname=
library_names=
func_source "$file"
# Skip this library if it cannot be dlopened.
if test -z "$dlname"; then
# Warn if it was a shared library.
test -n "$library_names" && \
func_warning "'$file' was not linked with '-export-dynamic'"
continue
fi
func_dirname "$file" "" "."
dir=$func_dirname_result
if test -f "$dir/$objdir/$dlname"; then
func_append dir "/$objdir"
else
if test ! -f "$dir/$dlname"; then
func_fatal_error "cannot find '$dlname' in '$dir' or '$dir/$objdir'"
fi
fi
;;
*.lo)
# Just add the directory containing the .lo file.
func_dirname "$file" "" "."
dir=$func_dirname_result
;;
*)
func_warning "'-dlopen' is ignored for non-libtool libraries and objects"
continue
;;
esac
# Get the absolute pathname.
absdir=`cd "$dir" && pwd`
test -n "$absdir" && dir=$absdir
# Now add the directory to shlibpath_var.
if eval "test -z \"\$$shlibpath_var\""; then
eval "$shlibpath_var=\"\$dir\""
else
eval "$shlibpath_var=\"\$dir:\$$shlibpath_var\""
fi
done
# This variable tells wrapper scripts just to set shlibpath_var
# rather than running their programs.
libtool_execute_magic=$magic
# Check if any of the arguments is a wrapper script.
args=
for file
do
case $file in
-* | *.la | *.lo ) ;;
*)
# Do a test to see if this is really a libtool program.
if func_ltwrapper_script_p "$file"; then
func_source "$file"
# Transform arg to wrapped name.
file=$progdir/$program
elif func_ltwrapper_executable_p "$file"; then
func_ltwrapper_scriptname "$file"
func_source "$func_ltwrapper_scriptname_result"
# Transform arg to wrapped name.
file=$progdir/$program
fi
;;
esac
# Quote arguments (to preserve shell metacharacters).
func_append_quoted args "$file"
done
if $opt_dry_run; then
# Display what would be done.
if test -n "$shlibpath_var"; then
eval "\$ECHO \"\$shlibpath_var=\$$shlibpath_var\""
echo "export $shlibpath_var"
fi
$ECHO "$cmd$args"
exit $EXIT_SUCCESS
else
if test -n "$shlibpath_var"; then
# Export the shlibpath_var.
eval "export $shlibpath_var"
fi
# Restore saved environment variables
for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES
do
eval "if test \"\${save_$lt_var+set}\" = set; then
$lt_var=\$save_$lt_var; export $lt_var
else
$lt_unset $lt_var
fi"
done
# Now prepare to actually exec the command.
exec_cmd=\$cmd$args
fi
}
test execute = "$opt_mode" && func_mode_execute ${1+"$@"}
# func_mode_finish arg...
func_mode_finish ()
{
$debug_cmd
libs=
libdirs=
admincmds=
for opt in "$nonopt" ${1+"$@"}
do
if test -d "$opt"; then
func_append libdirs " $opt"
elif test -f "$opt"; then
if func_lalib_unsafe_p "$opt"; then
func_append libs " $opt"
else
func_warning "'$opt' is not a valid libtool archive"
fi
else
func_fatal_error "invalid argument '$opt'"
fi
done
if test -n "$libs"; then
if test -n "$lt_sysroot"; then
sysroot_regex=`$ECHO "$lt_sysroot" | $SED "$sed_make_literal_regex"`
sysroot_cmd="s/\([ ']\)$sysroot_regex/\1/g;"
else
sysroot_cmd=
fi
# Remove sysroot references
if $opt_dry_run; then
for lib in $libs; do
echo "removing references to $lt_sysroot and '=' prefixes from $lib"
done
else
tmpdir=`func_mktempdir`
for lib in $libs; do
$SED -e "$sysroot_cmd s/\([ ']-[LR]\)=/\1/g; s/\([ ']\)=/\1/g" $lib \
> $tmpdir/tmp-la
mv -f $tmpdir/tmp-la $lib
done
${RM}r "$tmpdir"
fi
fi
if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then
for libdir in $libdirs; do
if test -n "$finish_cmds"; then
# Do each command in the finish commands.
func_execute_cmds "$finish_cmds" 'admincmds="$admincmds
'"$cmd"'"'
fi
if test -n "$finish_eval"; then
# Do the single finish_eval.
eval cmds=\"$finish_eval\"
$opt_dry_run || eval "$cmds" || func_append admincmds "
$cmds"
fi
done
fi
# Exit here if they wanted silent mode.
$opt_quiet && exit $EXIT_SUCCESS
if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then
echo "----------------------------------------------------------------------"
echo "Libraries have been installed in:"
for libdir in $libdirs; do
$ECHO " $libdir"
done
echo
echo "If you ever happen to want to link against installed libraries"
echo "in a given directory, LIBDIR, you must either use libtool, and"
echo "specify the full pathname of the library, or use the '-LLIBDIR'"
echo "flag during linking and do at least one of the following:"
if test -n "$shlibpath_var"; then
echo " - add LIBDIR to the '$shlibpath_var' environment variable"
echo " during execution"
fi
if test -n "$runpath_var"; then
echo " - add LIBDIR to the '$runpath_var' environment variable"
echo " during linking"
fi
if test -n "$hardcode_libdir_flag_spec"; then
libdir=LIBDIR
eval flag=\"$hardcode_libdir_flag_spec\"
$ECHO " - use the '$flag' linker flag"
fi
if test -n "$admincmds"; then
$ECHO " - have your system administrator run these commands:$admincmds"
fi
if test -f /etc/ld.so.conf; then
echo " - have your system administrator add LIBDIR to '/etc/ld.so.conf'"
fi
echo
echo "See any operating system documentation about shared libraries for"
case $host in
solaris2.[6789]|solaris2.1[0-9])
echo "more information, such as the ld(1), crle(1) and ld.so(8) manual"
echo "pages."
;;
*)
echo "more information, such as the ld(1) and ld.so(8) manual pages."
;;
esac
echo "----------------------------------------------------------------------"
fi
exit $EXIT_SUCCESS
}
test finish = "$opt_mode" && func_mode_finish ${1+"$@"}
# func_mode_install arg...
func_mode_install ()
{
$debug_cmd
# There may be an optional sh(1) argument at the beginning of
# install_prog (especially on Windows NT).
if test "$SHELL" = "$nonopt" || test /bin/sh = "$nonopt" ||
# Allow the use of GNU shtool's install command.
case $nonopt in *shtool*) :;; *) false;; esac
then
# Aesthetically quote it.
func_quote_for_eval "$nonopt"
install_prog="$func_quote_for_eval_result "
arg=$1
shift
else
install_prog=
arg=$nonopt
fi
# The real first argument should be the name of the installation program.
# Aesthetically quote it.
func_quote_for_eval "$arg"
func_append install_prog "$func_quote_for_eval_result"
install_shared_prog=$install_prog
case " $install_prog " in
*[\\\ /]cp\ *) install_cp=: ;;
*) install_cp=false ;;
esac
# We need to accept at least all the BSD install flags.
dest=
files=
opts=
prev=
install_type=
isdir=false
stripme=
no_mode=:
for arg
do
arg2=
if test -n "$dest"; then
func_append files " $dest"
dest=$arg
continue
fi
case $arg in
-d) isdir=: ;;
-f)
if $install_cp; then :; else
prev=$arg
fi
;;
-g | -m | -o)
prev=$arg
;;
-s)
stripme=" -s"
continue
;;
-*)
;;
*)
# If the previous option needed an argument, then skip it.
if test -n "$prev"; then
if test X-m = "X$prev" && test -n "$install_override_mode"; then
arg2=$install_override_mode
no_mode=false
fi
prev=
else
dest=$arg
continue
fi
;;
esac
# Aesthetically quote the argument.
func_quote_for_eval "$arg"
func_append install_prog " $func_quote_for_eval_result"
if test -n "$arg2"; then
func_quote_for_eval "$arg2"
fi
func_append install_shared_prog " $func_quote_for_eval_result"
done
test -z "$install_prog" && \
func_fatal_help "you must specify an install program"
test -n "$prev" && \
func_fatal_help "the '$prev' option requires an argument"
if test -n "$install_override_mode" && $no_mode; then
if $install_cp; then :; else
func_quote_for_eval "$install_override_mode"
func_append install_shared_prog " -m $func_quote_for_eval_result"
fi
fi
if test -z "$files"; then
if test -z "$dest"; then
func_fatal_help "no file or destination specified"
else
func_fatal_help "you must specify a destination"
fi
fi
# Strip any trailing slash from the destination.
func_stripname '' '/' "$dest"
dest=$func_stripname_result
# Check to see that the destination is a directory.
test -d "$dest" && isdir=:
if $isdir; then
destdir=$dest
destname=
else
func_dirname_and_basename "$dest" "" "."
destdir=$func_dirname_result
destname=$func_basename_result
# Not a directory, so check to see that there is only one file specified.
set dummy $files; shift
test "$#" -gt 1 && \
func_fatal_help "'$dest' is not a directory"
fi
case $destdir in
[\\/]* | [A-Za-z]:[\\/]*) ;;
*)
for file in $files; do
case $file in
*.lo) ;;
*)
func_fatal_help "'$destdir' must be an absolute directory name"
;;
esac
done
;;
esac
# This variable tells wrapper scripts just to set variables rather
# than running their programs.
libtool_install_magic=$magic
staticlibs=
future_libdirs=
current_libdirs=
for file in $files; do
# Do each installation.
case $file in
*.$libext)
# Do the static libraries later.
func_append staticlibs " $file"
;;
*.la)
func_resolve_sysroot "$file"
file=$func_resolve_sysroot_result
# Check to see that this really is a libtool archive.
func_lalib_unsafe_p "$file" \
|| func_fatal_help "'$file' is not a valid libtool archive"
library_names=
old_library=
relink_command=
func_source "$file"
# Add the libdir to current_libdirs if it is the destination.
if test "X$destdir" = "X$libdir"; then
case "$current_libdirs " in
*" $libdir "*) ;;
*) func_append current_libdirs " $libdir" ;;
esac
else
# Note the libdir as a future libdir.
case "$future_libdirs " in
*" $libdir "*) ;;
*) func_append future_libdirs " $libdir" ;;
esac
fi
func_dirname "$file" "/" ""
dir=$func_dirname_result
func_append dir "$objdir"
if test -n "$relink_command"; then
# Determine the prefix the user has applied to our future dir.
inst_prefix_dir=`$ECHO "$destdir" | $SED -e "s%$libdir\$%%"`
# Don't allow the user to place us outside of our expected
# location b/c this prevents finding dependent libraries that
# are installed to the same prefix.
# At present, this check doesn't affect windows .dll's that
# are installed into $libdir/../bin (currently, that works fine)
# but it's something to keep an eye on.
test "$inst_prefix_dir" = "$destdir" && \
func_fatal_error "error: cannot install '$file' to a directory not ending in $libdir"
if test -n "$inst_prefix_dir"; then
# Stick the inst_prefix_dir data into the link command.
relink_command=`$ECHO "$relink_command" | $SED "s%@inst_prefix_dir@%-inst-prefix-dir $inst_prefix_dir%"`
else
relink_command=`$ECHO "$relink_command" | $SED "s%@inst_prefix_dir@%%"`
fi
func_warning "relinking '$file'"
func_show_eval "$relink_command" \
'func_fatal_error "error: relink '\''$file'\'' with the above command before installing it"'
fi
# See the names of the shared library.
set dummy $library_names; shift
if test -n "$1"; then
realname=$1
shift
srcname=$realname
test -n "$relink_command" && srcname=${realname}T
# Install the shared library and build the symlinks.
func_show_eval "$install_shared_prog $dir/$srcname $destdir/$realname" \
'exit $?'
tstripme=$stripme
case $host_os in
cygwin* | mingw* | pw32* | cegcc*)
case $realname in
*.dll.a)
tstripme=
;;
esac
;;
esac
if test -n "$tstripme" && test -n "$striplib"; then
func_show_eval "$striplib $destdir/$realname" 'exit $?'
fi
if test "$#" -gt 0; then
# Delete the old symlinks, and create new ones.
# Try 'ln -sf' first, because the 'ln' binary might depend on
# the symlink we replace! Solaris /bin/ln does not understand -f,
# so we also need to try rm && ln -s.
for linkname
do
test "$linkname" != "$realname" \
&& func_show_eval "(cd $destdir && { $LN_S -f $realname $linkname || { $RM $linkname && $LN_S $realname $linkname; }; })"
done
fi
# Do each command in the postinstall commands.
lib=$destdir/$realname
func_execute_cmds "$postinstall_cmds" 'exit $?'
fi
# Install the pseudo-library for information purposes.
func_basename "$file"
name=$func_basename_result
instname=$dir/${name}i
func_show_eval "$install_prog $instname $destdir/$name" 'exit $?'
# Maybe install the static library, too.
test -n "$old_library" && func_append staticlibs " $dir/$old_library"
;;
*.lo)
# Install (i.e. copy) a libtool object.
# Figure out destination file name, if it wasn't already specified.
if test -n "$destname"; then
destfile=$destdir/$destname
else
func_basename "$file"
destfile=$func_basename_result
destfile=$destdir/$destfile
fi
# Deduce the name of the destination old-style object file.
case $destfile in
*.lo)
func_lo2o "$destfile"
staticdest=$func_lo2o_result
;;
*.$objext)
staticdest=$destfile
destfile=
;;
*)
func_fatal_help "cannot copy a libtool object to '$destfile'"
;;
esac
# Install the libtool object if requested.
test -n "$destfile" && \
func_show_eval "$install_prog $file $destfile" 'exit $?'
# Install the old object if enabled.
if test yes = "$build_old_libs"; then
# Deduce the name of the old-style object file.
func_lo2o "$file"
staticobj=$func_lo2o_result
func_show_eval "$install_prog \$staticobj \$staticdest" 'exit $?'
fi
exit $EXIT_SUCCESS
;;
*)
# Figure out destination file name, if it wasn't already specified.
if test -n "$destname"; then
destfile=$destdir/$destname
else
func_basename "$file"
destfile=$func_basename_result
destfile=$destdir/$destfile
fi
# If the file is missing, and there is a .exe on the end, strip it
# because it is most likely a libtool script we actually want to
# install
stripped_ext=
case $file in
*.exe)
if test ! -f "$file"; then
func_stripname '' '.exe' "$file"
file=$func_stripname_result
stripped_ext=.exe
fi
;;
esac
# Do a test to see if this is really a libtool program.
case $host in
*cygwin* | *mingw*)
if func_ltwrapper_executable_p "$file"; then
func_ltwrapper_scriptname "$file"
wrapper=$func_ltwrapper_scriptname_result
else
func_stripname '' '.exe' "$file"
wrapper=$func_stripname_result
fi
;;
*)
wrapper=$file
;;
esac
if func_ltwrapper_script_p "$wrapper"; then
notinst_deplibs=
relink_command=
func_source "$wrapper"
# Check the variables that should have been set.
test -z "$generated_by_libtool_version" && \
func_fatal_error "invalid libtool wrapper script '$wrapper'"
finalize=:
for lib in $notinst_deplibs; do
# Check to see that each library is installed.
libdir=
if test -f "$lib"; then
func_source "$lib"
fi
libfile=$libdir/`$ECHO "$lib" | $SED 's%^.*/%%g'`
if test -n "$libdir" && test ! -f "$libfile"; then
func_warning "'$lib' has not been installed in '$libdir'"
finalize=false
fi
done
relink_command=
func_source "$wrapper"
outputname=
if test no = "$fast_install" && test -n "$relink_command"; then
$opt_dry_run || {
if $finalize; then
tmpdir=`func_mktempdir`
func_basename "$file$stripped_ext"
file=$func_basename_result
outputname=$tmpdir/$file
# Replace the output file specification.
relink_command=`$ECHO "$relink_command" | $SED 's%@OUTPUT@%'"$outputname"'%g'`
$opt_quiet || {
func_quote_for_expand "$relink_command"
eval "func_echo $func_quote_for_expand_result"
}
if eval "$relink_command"; then :
else
func_error "error: relink '$file' with the above command before installing it"
$opt_dry_run || ${RM}r "$tmpdir"
continue
fi
file=$outputname
else
func_warning "cannot relink '$file'"
fi
}
else
# Install the binary that we compiled earlier.
file=`$ECHO "$file$stripped_ext" | $SED "s%\([^/]*\)$%$objdir/\1%"`
fi
fi
# remove .exe since cygwin /usr/bin/install will append another
# one anyway
case $install_prog,$host in
*/usr/bin/install*,*cygwin*)
case $file:$destfile in
*.exe:*.exe)
# this is ok
;;
*.exe:*)
destfile=$destfile.exe
;;
*:*.exe)
func_stripname '' '.exe' "$destfile"
destfile=$func_stripname_result
;;
esac
;;
esac
func_show_eval "$install_prog\$stripme \$file \$destfile" 'exit $?'
$opt_dry_run || if test -n "$outputname"; then
${RM}r "$tmpdir"
fi
;;
esac
done
for file in $staticlibs; do
func_basename "$file"
name=$func_basename_result
# Set up the ranlib parameters.
oldlib=$destdir/$name
func_to_tool_file "$oldlib" func_convert_file_msys_to_w32
tool_oldlib=$func_to_tool_file_result
func_show_eval "$install_prog \$file \$oldlib" 'exit $?'
if test -n "$stripme" && test -n "$old_striplib"; then
func_show_eval "$old_striplib $tool_oldlib" 'exit $?'
fi
# Do each command in the postinstall commands.
func_execute_cmds "$old_postinstall_cmds" 'exit $?'
done
test -n "$future_libdirs" && \
func_warning "remember to run '$progname --finish$future_libdirs'"
if test -n "$current_libdirs"; then
# Maybe just do a dry run.
$opt_dry_run && current_libdirs=" -n$current_libdirs"
exec_cmd='$SHELL "$progpath" $preserve_args --finish$current_libdirs'
else
exit $EXIT_SUCCESS
fi
}
test install = "$opt_mode" && func_mode_install ${1+"$@"}
# func_generate_dlsyms outputname originator pic_p
# Extract symbols from dlprefiles and create ${outputname}S.o with
# a dlpreopen symbol table.
func_generate_dlsyms ()
{
$debug_cmd
my_outputname=$1
my_originator=$2
my_pic_p=${3-false}
my_prefix=`$ECHO "$my_originator" | $SED 's%[^a-zA-Z0-9]%_%g'`
my_dlsyms=
if test -n "$dlfiles$dlprefiles" || test no != "$dlself"; then
if test -n "$NM" && test -n "$global_symbol_pipe"; then
my_dlsyms=${my_outputname}S.c
else
func_error "not configured to extract global symbols from dlpreopened files"
fi
fi
if test -n "$my_dlsyms"; then
case $my_dlsyms in
"") ;;
*.c)
# Discover the nlist of each of the dlfiles.
nlist=$output_objdir/$my_outputname.nm
func_show_eval "$RM $nlist ${nlist}S ${nlist}T"
# Parse the name list into a source file.
func_verbose "creating $output_objdir/$my_dlsyms"
$opt_dry_run || $ECHO > "$output_objdir/$my_dlsyms" "\
/* $my_dlsyms - symbol resolution table for '$my_outputname' dlsym emulation. */
/* Generated by $PROGRAM (GNU $PACKAGE) $VERSION */
#ifdef __cplusplus
extern \"C\" {
#endif
#if defined __GNUC__ && (((__GNUC__ == 4) && (__GNUC_MINOR__ >= 4)) || (__GNUC__ > 4))
#pragma GCC diagnostic ignored \"-Wstrict-prototypes\"
#endif
/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
#if defined _WIN32 || defined __CYGWIN__ || defined _WIN32_WCE
/* DATA imports from DLLs on WIN32 can't be const, because runtime
relocations are performed -- see ld's documentation on pseudo-relocs. */
# define LT_DLSYM_CONST
#elif defined __osf__
/* This system does not cope well with relocations in const data. */
# define LT_DLSYM_CONST
#else
# define LT_DLSYM_CONST const
#endif
#define STREQ(s1, s2) (strcmp ((s1), (s2)) == 0)
/* External symbol declarations for the compiler. */\
"
if test yes = "$dlself"; then
func_verbose "generating symbol list for '$output'"
$opt_dry_run || echo ': @PROGRAM@ ' > "$nlist"
# Add our own program objects to the symbol list.
progfiles=`$ECHO "$objs$old_deplibs" | $SP2NL | $SED "$lo2o" | $NL2SP`
for progfile in $progfiles; do
func_to_tool_file "$progfile" func_convert_file_msys_to_w32
func_verbose "extracting global C symbols from '$func_to_tool_file_result'"
$opt_dry_run || eval "$NM $func_to_tool_file_result | $global_symbol_pipe >> '$nlist'"
done
if test -n "$exclude_expsyms"; then
$opt_dry_run || {
eval '$EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T'
eval '$MV "$nlist"T "$nlist"'
}
fi
if test -n "$export_symbols_regex"; then
$opt_dry_run || {
eval '$EGREP -e "$export_symbols_regex" "$nlist" > "$nlist"T'
eval '$MV "$nlist"T "$nlist"'
}
fi
# Prepare the list of exported symbols
if test -z "$export_symbols"; then
export_symbols=$output_objdir/$outputname.exp
$opt_dry_run || {
$RM $export_symbols
eval "$SED -n -e '/^: @PROGRAM@ $/d' -e 's/^.* \(.*\)$/\1/p' "'< "$nlist" > "$export_symbols"'
case $host in
*cygwin* | *mingw* | *cegcc* )
eval "echo EXPORTS "'> "$output_objdir/$outputname.def"'
eval 'cat "$export_symbols" >> "$output_objdir/$outputname.def"'
;;
esac
}
else
$opt_dry_run || {
eval "$SED -e 's/\([].[*^$]\)/\\\\\1/g' -e 's/^/ /' -e 's/$/$/'"' < "$export_symbols" > "$output_objdir/$outputname.exp"'
eval '$GREP -f "$output_objdir/$outputname.exp" < "$nlist" > "$nlist"T'
eval '$MV "$nlist"T "$nlist"'
case $host in
*cygwin* | *mingw* | *cegcc* )
eval "echo EXPORTS "'> "$output_objdir/$outputname.def"'
eval 'cat "$nlist" >> "$output_objdir/$outputname.def"'
;;
esac
}
fi
fi
for dlprefile in $dlprefiles; do
func_verbose "extracting global C symbols from '$dlprefile'"
func_basename "$dlprefile"
name=$func_basename_result
case $host in
*cygwin* | *mingw* | *cegcc* )
# if an import library, we need to obtain dlname
if func_win32_import_lib_p "$dlprefile"; then
func_tr_sh "$dlprefile"
eval "curr_lafile=\$libfile_$func_tr_sh_result"
dlprefile_dlbasename=
if test -n "$curr_lafile" && func_lalib_p "$curr_lafile"; then
# Use subshell, to avoid clobbering current variable values
dlprefile_dlname=`source "$curr_lafile" && echo "$dlname"`
if test -n "$dlprefile_dlname"; then
func_basename "$dlprefile_dlname"
dlprefile_dlbasename=$func_basename_result
else
# no lafile. user explicitly requested -dlpreopen <import library>.
$sharedlib_from_linklib_cmd "$dlprefile"
dlprefile_dlbasename=$sharedlib_from_linklib_result
fi
fi
$opt_dry_run || {
if test -n "$dlprefile_dlbasename"; then
eval '$ECHO ": $dlprefile_dlbasename" >> "$nlist"'
else
func_warning "Could not compute DLL name from $name"
eval '$ECHO ": $name " >> "$nlist"'
fi
func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe |
$SED -e '/I __imp/d' -e 's/I __nm_/D /;s/_nm__//' >> '$nlist'"
}
else # not an import lib
$opt_dry_run || {
eval '$ECHO ": $name " >> "$nlist"'
func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'"
}
fi
;;
*)
$opt_dry_run || {
eval '$ECHO ": $name " >> "$nlist"'
func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'"
}
;;
esac
done
$opt_dry_run || {
# Make sure we have at least an empty file.
test -f "$nlist" || : > "$nlist"
if test -n "$exclude_expsyms"; then
$EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T
$MV "$nlist"T "$nlist"
fi
# Try sorting and uniquifying the output.
if $GREP -v "^: " < "$nlist" |
if sort -k 3 </dev/null >/dev/null 2>&1; then
sort -k 3
else
sort +2
fi |
uniq > "$nlist"S; then
:
else
$GREP -v "^: " < "$nlist" > "$nlist"S
fi
if test -f "$nlist"S; then
eval "$global_symbol_to_cdecl"' < "$nlist"S >> "$output_objdir/$my_dlsyms"'
else
echo '/* NONE */' >> "$output_objdir/$my_dlsyms"
fi
func_show_eval '$RM "${nlist}I"'
if test -n "$global_symbol_to_import"; then
eval "$global_symbol_to_import"' < "$nlist"S > "$nlist"I'
fi
echo >> "$output_objdir/$my_dlsyms" "\
/* The mapping between symbol names and symbols. */
typedef struct {
const char *name;
void *address;
} lt_dlsymlist;
extern LT_DLSYM_CONST lt_dlsymlist
lt_${my_prefix}_LTX_preloaded_symbols[];\
"
if test -s "$nlist"I; then
echo >> "$output_objdir/$my_dlsyms" "\
static void lt_syminit(void)
{
LT_DLSYM_CONST lt_dlsymlist *symbol = lt_${my_prefix}_LTX_preloaded_symbols;
for (; symbol->name; ++symbol)
{"
$SED 's/.*/ if (STREQ (symbol->name, \"&\")) symbol->address = (void *) \&&;/' < "$nlist"I >> "$output_objdir/$my_dlsyms"
echo >> "$output_objdir/$my_dlsyms" "\
}
}"
fi
echo >> "$output_objdir/$my_dlsyms" "\
LT_DLSYM_CONST lt_dlsymlist
lt_${my_prefix}_LTX_preloaded_symbols[] =
{ {\"$my_originator\", (void *) 0},"
if test -s "$nlist"I; then
echo >> "$output_objdir/$my_dlsyms" "\
{\"@INIT@\", (void *) <_syminit},"
fi
case $need_lib_prefix in
no)
eval "$global_symbol_to_c_name_address" < "$nlist" >> "$output_objdir/$my_dlsyms"
;;
*)
eval "$global_symbol_to_c_name_address_lib_prefix" < "$nlist" >> "$output_objdir/$my_dlsyms"
;;
esac
echo >> "$output_objdir/$my_dlsyms" "\
{0, (void *) 0}
};
/* This works around a problem in FreeBSD linker */
#ifdef FREEBSD_WORKAROUND
static const void *lt_preloaded_setup() {
return lt_${my_prefix}_LTX_preloaded_symbols;
}
#endif
#ifdef __cplusplus
}
#endif\
"
} # !$opt_dry_run
pic_flag_for_symtable=
case "$compile_command " in
*" -static "*) ;;
*)
case $host in
# compiling the symbol table file with pic_flag works around
# a FreeBSD bug that causes programs to crash when -lm is
# linked before any other PIC object. But we must not use
# pic_flag when linking with -static. The problem exists in
# FreeBSD 2.2.6 and is fixed in FreeBSD 3.1.
*-*-freebsd2.*|*-*-freebsd3.0*|*-*-freebsdelf3.0*)
pic_flag_for_symtable=" $pic_flag -DFREEBSD_WORKAROUND" ;;
*-*-hpux*)
pic_flag_for_symtable=" $pic_flag" ;;
*)
$my_pic_p && pic_flag_for_symtable=" $pic_flag"
;;
esac
;;
esac
symtab_cflags=
for arg in $LTCFLAGS; do
case $arg in
-pie | -fpie | -fPIE) ;;
*) func_append symtab_cflags " $arg" ;;
esac
done
# Now compile the dynamic symbol file.
func_show_eval '(cd $output_objdir && $LTCC$symtab_cflags -c$no_builtin_flag$pic_flag_for_symtable "$my_dlsyms")' 'exit $?'
# Clean up the generated files.
func_show_eval '$RM "$output_objdir/$my_dlsyms" "$nlist" "${nlist}S" "${nlist}T" "${nlist}I"'
# Transform the symbol file into the correct name.
symfileobj=$output_objdir/${my_outputname}S.$objext
case $host in
*cygwin* | *mingw* | *cegcc* )
if test -f "$output_objdir/$my_outputname.def"; then
compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"`
finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"`
else
compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$symfileobj%"`
finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$symfileobj%"`
fi
;;
*)
compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$symfileobj%"`
finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$symfileobj%"`
;;
esac
;;
*)
func_fatal_error "unknown suffix for '$my_dlsyms'"
;;
esac
else
# We keep going just in case the user didn't refer to
# lt_preloaded_symbols. The linker will fail if global_symbol_pipe
# really was required.
# Nullify the symbol file.
compile_command=`$ECHO "$compile_command" | $SED "s% @SYMFILE@%%"`
finalize_command=`$ECHO "$finalize_command" | $SED "s% @SYMFILE@%%"`
fi
}
# func_cygming_gnu_implib_p ARG
# This predicate returns with zero status (TRUE) if
# ARG is a GNU/binutils-style import library. Returns
# with nonzero status (FALSE) otherwise.
func_cygming_gnu_implib_p ()
{
$debug_cmd
func_to_tool_file "$1" func_convert_file_msys_to_w32
func_cygming_gnu_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $EGREP ' (_head_[A-Za-z0-9_]+_[ad]l*|[A-Za-z0-9_]+_[ad]l*_iname)$'`
test -n "$func_cygming_gnu_implib_tmp"
}
# func_cygming_ms_implib_p ARG
# This predicate returns with zero status (TRUE) if
# ARG is an MS-style import library. Returns
# with nonzero status (FALSE) otherwise.
func_cygming_ms_implib_p ()
{
$debug_cmd
func_to_tool_file "$1" func_convert_file_msys_to_w32
func_cygming_ms_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $GREP '_NULL_IMPORT_DESCRIPTOR'`
test -n "$func_cygming_ms_implib_tmp"
}
# func_win32_libid arg
# return the library type of file 'arg'
#
# Need a lot of goo to handle *both* DLLs and import libs
# Has to be a shell function in order to 'eat' the argument
# that is supplied when $file_magic_command is called.
# Despite the name, also deal with 64 bit binaries.
func_win32_libid ()
{
$debug_cmd
win32_libid_type=unknown
win32_fileres=`file -L $1 2>/dev/null`
case $win32_fileres in
*ar\ archive\ import\ library*) # definitely import
win32_libid_type="x86 archive import"
;;
*ar\ archive*) # could be an import, or static
# Keep the egrep pattern in sync with the one in _LT_CHECK_MAGIC_METHOD.
if eval $OBJDUMP -f $1 | $SED -e '10q' 2>/dev/null |
$EGREP 'file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' >/dev/null; then
case $nm_interface in
"MS dumpbin")
if func_cygming_ms_implib_p "$1" ||
func_cygming_gnu_implib_p "$1"
then
win32_nmres=import
else
win32_nmres=
fi
;;
*)
func_to_tool_file "$1" func_convert_file_msys_to_w32
win32_nmres=`eval $NM -f posix -A \"$func_to_tool_file_result\" |
$SED -n -e '
1,100{
/ I /{
s|.*|import|
p
q
}
}'`
;;
esac
case $win32_nmres in
import*) win32_libid_type="x86 archive import";;
*) win32_libid_type="x86 archive static";;
esac
fi
;;
*DLL*)
win32_libid_type="x86 DLL"
;;
*executable*) # but shell scripts are "executable" too...
case $win32_fileres in
*MS\ Windows\ PE\ Intel*)
win32_libid_type="x86 DLL"
;;
esac
;;
esac
$ECHO "$win32_libid_type"
}
# func_cygming_dll_for_implib ARG
#
# Platform-specific function to extract the
# name of the DLL associated with the specified
# import library ARG.
# Invoked by eval'ing the libtool variable
# $sharedlib_from_linklib_cmd
# Result is available in the variable
# $sharedlib_from_linklib_result
func_cygming_dll_for_implib ()
{
$debug_cmd
sharedlib_from_linklib_result=`$DLLTOOL --identify-strict --identify "$1"`
}
# func_cygming_dll_for_implib_fallback_core SECTION_NAME LIBNAMEs
#
# The is the core of a fallback implementation of a
# platform-specific function to extract the name of the
# DLL associated with the specified import library LIBNAME.
#
# SECTION_NAME is either .idata$6 or .idata$7, depending
# on the platform and compiler that created the implib.
#
# Echos the name of the DLL associated with the
# specified import library.
func_cygming_dll_for_implib_fallback_core ()
{
$debug_cmd
match_literal=`$ECHO "$1" | $SED "$sed_make_literal_regex"`
$OBJDUMP -s --section "$1" "$2" 2>/dev/null |
$SED '/^Contents of section '"$match_literal"':/{
# Place marker at beginning of archive member dllname section
s/.*/====MARK====/
p
d
}
# These lines can sometimes be longer than 43 characters, but
# are always uninteresting
/:[ ]*file format pe[i]\{,1\}-/d
/^In archive [^:]*:/d
# Ensure marker is printed
/^====MARK====/p
# Remove all lines with less than 43 characters
/^.\{43\}/!d
# From remaining lines, remove first 43 characters
s/^.\{43\}//' |
$SED -n '
# Join marker and all lines until next marker into a single line
/^====MARK====/ b para
H
$ b para
b
:para
x
s/\n//g
# Remove the marker
s/^====MARK====//
# Remove trailing dots and whitespace
s/[\. \t]*$//
# Print
/./p' |
# we now have a list, one entry per line, of the stringified
# contents of the appropriate section of all members of the
# archive that possess that section. Heuristic: eliminate
# all those that have a first or second character that is
# a '.' (that is, objdump's representation of an unprintable
# character.) This should work for all archives with less than
# 0x302f exports -- but will fail for DLLs whose name actually
# begins with a literal '.' or a single character followed by
# a '.'.
#
# Of those that remain, print the first one.
$SED -e '/^\./d;/^.\./d;q'
}
# func_cygming_dll_for_implib_fallback ARG
# Platform-specific function to extract the
# name of the DLL associated with the specified
# import library ARG.
#
# This fallback implementation is for use when $DLLTOOL
# does not support the --identify-strict option.
# Invoked by eval'ing the libtool variable
# $sharedlib_from_linklib_cmd
# Result is available in the variable
# $sharedlib_from_linklib_result
func_cygming_dll_for_implib_fallback ()
{
$debug_cmd
if func_cygming_gnu_implib_p "$1"; then
# binutils import library
sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$7' "$1"`
elif func_cygming_ms_implib_p "$1"; then
# ms-generated import library
sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$6' "$1"`
else
# unknown
sharedlib_from_linklib_result=
fi
}
# func_extract_an_archive dir oldlib
func_extract_an_archive ()
{
$debug_cmd
f_ex_an_ar_dir=$1; shift
f_ex_an_ar_oldlib=$1
if test yes = "$lock_old_archive_extraction"; then
lockfile=$f_ex_an_ar_oldlib.lock
until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do
func_echo "Waiting for $lockfile to be removed"
sleep 2
done
fi
func_show_eval "(cd \$f_ex_an_ar_dir && $AR x \"\$f_ex_an_ar_oldlib\")" \
'stat=$?; rm -f "$lockfile"; exit $stat'
if test yes = "$lock_old_archive_extraction"; then
$opt_dry_run || rm -f "$lockfile"
fi
if ($AR t "$f_ex_an_ar_oldlib" | sort | sort -uc >/dev/null 2>&1); then
:
else
func_fatal_error "object name conflicts in archive: $f_ex_an_ar_dir/$f_ex_an_ar_oldlib"
fi
}
# func_extract_archives gentop oldlib ...
func_extract_archives ()
{
$debug_cmd
my_gentop=$1; shift
my_oldlibs=${1+"$@"}
my_oldobjs=
my_xlib=
my_xabs=
my_xdir=
for my_xlib in $my_oldlibs; do
# Extract the objects.
case $my_xlib in
[\\/]* | [A-Za-z]:[\\/]*) my_xabs=$my_xlib ;;
*) my_xabs=`pwd`"/$my_xlib" ;;
esac
func_basename "$my_xlib"
my_xlib=$func_basename_result
my_xlib_u=$my_xlib
while :; do
case " $extracted_archives " in
*" $my_xlib_u "*)
func_arith $extracted_serial + 1
extracted_serial=$func_arith_result
my_xlib_u=lt$extracted_serial-$my_xlib ;;
*) break ;;
esac
done
extracted_archives="$extracted_archives $my_xlib_u"
my_xdir=$my_gentop/$my_xlib_u
func_mkdir_p "$my_xdir"
case $host in
*-darwin*)
func_verbose "Extracting $my_xabs"
# Do not bother doing anything if just a dry run
$opt_dry_run || {
darwin_orig_dir=`pwd`
cd $my_xdir || exit $?
darwin_archive=$my_xabs
darwin_curdir=`pwd`
func_basename "$darwin_archive"
darwin_base_archive=$func_basename_result
darwin_arches=`$LIPO -info "$darwin_archive" 2>/dev/null | $GREP Architectures 2>/dev/null || true`
if test -n "$darwin_arches"; then
darwin_arches=`$ECHO "$darwin_arches" | $SED -e 's/.*are://'`
darwin_arch=
func_verbose "$darwin_base_archive has multiple architectures $darwin_arches"
for darwin_arch in $darwin_arches; do
func_mkdir_p "unfat-$$/$darwin_base_archive-$darwin_arch"
$LIPO -thin $darwin_arch -output "unfat-$$/$darwin_base_archive-$darwin_arch/$darwin_base_archive" "$darwin_archive"
cd "unfat-$$/$darwin_base_archive-$darwin_arch"
func_extract_an_archive "`pwd`" "$darwin_base_archive"
cd "$darwin_curdir"
$RM "unfat-$$/$darwin_base_archive-$darwin_arch/$darwin_base_archive"
done # $darwin_arches
## Okay now we've a bunch of thin objects, gotta fatten them up :)
darwin_filelist=`find unfat-$$ -type f -name \*.o -print -o -name \*.lo -print | $SED -e "$basename" | sort -u`
darwin_file=
darwin_files=
for darwin_file in $darwin_filelist; do
darwin_files=`find unfat-$$ -name $darwin_file -print | sort | $NL2SP`
$LIPO -create -output "$darwin_file" $darwin_files
done # $darwin_filelist
$RM -rf unfat-$$
cd "$darwin_orig_dir"
else
cd $darwin_orig_dir
func_extract_an_archive "$my_xdir" "$my_xabs"
fi # $darwin_arches
} # !$opt_dry_run
;;
*)
func_extract_an_archive "$my_xdir" "$my_xabs"
;;
esac
my_oldobjs="$my_oldobjs "`find $my_xdir -name \*.$objext -print -o -name \*.lo -print | sort | $NL2SP`
done
func_extract_archives_result=$my_oldobjs
}
# func_emit_wrapper [arg=no]
#
# Emit a libtool wrapper script on stdout.
# Don't directly open a file because we may want to
# incorporate the script contents within a cygwin/mingw
# wrapper executable. Must ONLY be called from within
# func_mode_link because it depends on a number of variables
# set therein.
#
# ARG is the value that the WRAPPER_SCRIPT_BELONGS_IN_OBJDIR
# variable will take. If 'yes', then the emitted script
# will assume that the directory where it is stored is
# the $objdir directory. This is a cygwin/mingw-specific
# behavior.
func_emit_wrapper ()
{
func_emit_wrapper_arg1=${1-no}
$ECHO "\
#! $SHELL
# $output - temporary wrapper script for $objdir/$outputname
# Generated by $PROGRAM (GNU $PACKAGE) $VERSION
#
# The $output program cannot be directly executed until all the libtool
# libraries that it depends on are installed.
#
# This wrapper script should never be moved out of the build directory.
# If it is, it will not operate correctly.
# Sed substitution that helps us do robust quoting. It backslashifies
# metacharacters that are still active within double-quoted strings.
sed_quote_subst='$sed_quote_subst'
# Be Bourne compatible
if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then
emulate sh
NULLCMD=:
# Zsh 3.x and 4.x performs word splitting on \${1+\"\$@\"}, which
# is contrary to our usage. Disable this feature.
alias -g '\${1+\"\$@\"}'='\"\$@\"'
setopt NO_GLOB_SUBST
else
case \`(set -o) 2>/dev/null\` in *posix*) set -o posix;; esac
fi
BIN_SH=xpg4; export BIN_SH # for Tru64
DUALCASE=1; export DUALCASE # for MKS sh
# The HP-UX ksh and POSIX shell print the target directory to stdout
# if CDPATH is set.
(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
relink_command=\"$relink_command\"
# This environment variable determines our operation mode.
if test \"\$libtool_install_magic\" = \"$magic\"; then
# install mode needs the following variables:
generated_by_libtool_version='$macro_version'
notinst_deplibs='$notinst_deplibs'
else
# When we are sourced in execute mode, \$file and \$ECHO are already set.
if test \"\$libtool_execute_magic\" != \"$magic\"; then
file=\"\$0\""
qECHO=`$ECHO "$ECHO" | $SED "$sed_quote_subst"`
$ECHO "\
# A function that is used when there is no print builtin or printf.
func_fallback_echo ()
{
eval 'cat <<_LTECHO_EOF
\$1
_LTECHO_EOF'
}
ECHO=\"$qECHO\"
fi
# Very basic option parsing. These options are (a) specific to
# the libtool wrapper, (b) are identical between the wrapper
# /script/ and the wrapper /executable/ that is used only on
# windows platforms, and (c) all begin with the string "--lt-"
# (application programs are unlikely to have options that match
# this pattern).
#
# There are only two supported options: --lt-debug and
# --lt-dump-script. There is, deliberately, no --lt-help.
#
# The first argument to this parsing function should be the
# script's $0 value, followed by "$@".
lt_option_debug=
func_parse_lt_options ()
{
lt_script_arg0=\$0
shift
for lt_opt
do
case \"\$lt_opt\" in
--lt-debug) lt_option_debug=1 ;;
--lt-dump-script)
lt_dump_D=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%/[^/]*$%%'\`
test \"X\$lt_dump_D\" = \"X\$lt_script_arg0\" && lt_dump_D=.
lt_dump_F=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%^.*/%%'\`
cat \"\$lt_dump_D/\$lt_dump_F\"
exit 0
;;
--lt-*)
\$ECHO \"Unrecognized --lt- option: '\$lt_opt'\" 1>&2
exit 1
;;
esac
done
# Print the debug banner immediately:
if test -n \"\$lt_option_debug\"; then
echo \"$outputname:$output:\$LINENO: libtool wrapper (GNU $PACKAGE) $VERSION\" 1>&2
fi
}
# Used when --lt-debug. Prints its arguments to stdout
# (redirection is the responsibility of the caller)
func_lt_dump_args ()
{
lt_dump_args_N=1;
for lt_arg
do
\$ECHO \"$outputname:$output:\$LINENO: newargv[\$lt_dump_args_N]: \$lt_arg\"
lt_dump_args_N=\`expr \$lt_dump_args_N + 1\`
done
}
# Core function for launching the target application
func_exec_program_core ()
{
"
case $host in
# Backslashes separate directories on plain windows
*-*-mingw | *-*-os2* | *-cegcc*)
$ECHO "\
if test -n \"\$lt_option_debug\"; then
\$ECHO \"$outputname:$output:\$LINENO: newargv[0]: \$progdir\\\\\$program\" 1>&2
func_lt_dump_args \${1+\"\$@\"} 1>&2
fi
exec \"\$progdir\\\\\$program\" \${1+\"\$@\"}
"
;;
*)
$ECHO "\
if test -n \"\$lt_option_debug\"; then
\$ECHO \"$outputname:$output:\$LINENO: newargv[0]: \$progdir/\$program\" 1>&2
func_lt_dump_args \${1+\"\$@\"} 1>&2
fi
exec \"\$progdir/\$program\" \${1+\"\$@\"}
"
;;
esac
$ECHO "\
\$ECHO \"\$0: cannot exec \$program \$*\" 1>&2
exit 1
}
# A function to encapsulate launching the target application
# Strips options in the --lt-* namespace from \$@ and
# launches target application with the remaining arguments.
func_exec_program ()
{
case \" \$* \" in
*\\ --lt-*)
for lt_wr_arg
do
case \$lt_wr_arg in
--lt-*) ;;
*) set x \"\$@\" \"\$lt_wr_arg\"; shift;;
esac
shift
done ;;
esac
func_exec_program_core \${1+\"\$@\"}
}
# Parse options
func_parse_lt_options \"\$0\" \${1+\"\$@\"}
# Find the directory that this script lives in.
thisdir=\`\$ECHO \"\$file\" | $SED 's%/[^/]*$%%'\`
test \"x\$thisdir\" = \"x\$file\" && thisdir=.
# Follow symbolic links until we get to the real thisdir.
file=\`ls -ld \"\$file\" | $SED -n 's/.*-> //p'\`
while test -n \"\$file\"; do
destdir=\`\$ECHO \"\$file\" | $SED 's%/[^/]*\$%%'\`
# If there was a directory component, then change thisdir.
if test \"x\$destdir\" != \"x\$file\"; then
case \"\$destdir\" in
[\\\\/]* | [A-Za-z]:[\\\\/]*) thisdir=\"\$destdir\" ;;
*) thisdir=\"\$thisdir/\$destdir\" ;;
esac
fi
file=\`\$ECHO \"\$file\" | $SED 's%^.*/%%'\`
file=\`ls -ld \"\$thisdir/\$file\" | $SED -n 's/.*-> //p'\`
done
# Usually 'no', except on cygwin/mingw when embedded into
# the cwrapper.
WRAPPER_SCRIPT_BELONGS_IN_OBJDIR=$func_emit_wrapper_arg1
if test \"\$WRAPPER_SCRIPT_BELONGS_IN_OBJDIR\" = \"yes\"; then
# special case for '.'
if test \"\$thisdir\" = \".\"; then
thisdir=\`pwd\`
fi
# remove .libs from thisdir
case \"\$thisdir\" in
*[\\\\/]$objdir ) thisdir=\`\$ECHO \"\$thisdir\" | $SED 's%[\\\\/][^\\\\/]*$%%'\` ;;
$objdir ) thisdir=. ;;
esac
fi
# Try to get the absolute directory name.
absdir=\`cd \"\$thisdir\" && pwd\`
test -n \"\$absdir\" && thisdir=\"\$absdir\"
"
if test yes = "$fast_install"; then
$ECHO "\
program=lt-'$outputname'$exeext
progdir=\"\$thisdir/$objdir\"
if test ! -f \"\$progdir/\$program\" ||
{ file=\`ls -1dt \"\$progdir/\$program\" \"\$progdir/../\$program\" 2>/dev/null | $SED 1q\`; \\
test \"X\$file\" != \"X\$progdir/\$program\"; }; then
file=\"\$\$-\$program\"
if test ! -d \"\$progdir\"; then
$MKDIR \"\$progdir\"
else
$RM \"\$progdir/\$file\"
fi"
$ECHO "\
# relink executable if necessary
if test -n \"\$relink_command\"; then
if relink_command_output=\`eval \$relink_command 2>&1\`; then :
else
$ECHO \"\$relink_command_output\" >&2
$RM \"\$progdir/\$file\"
exit 1
fi
fi
$MV \"\$progdir/\$file\" \"\$progdir/\$program\" 2>/dev/null ||
{ $RM \"\$progdir/\$program\";
$MV \"\$progdir/\$file\" \"\$progdir/\$program\"; }
$RM \"\$progdir/\$file\"
fi"
else
$ECHO "\
program='$outputname'
progdir=\"\$thisdir/$objdir\"
"
fi
$ECHO "\
if test -f \"\$progdir/\$program\"; then"
# fixup the dll searchpath if we need to.
#
# Fix the DLL searchpath if we need to. Do this before prepending
# to shlibpath, because on Windows, both are PATH and uninstalled
# libraries must come first.
if test -n "$dllsearchpath"; then
$ECHO "\
# Add the dll search path components to the executable PATH
PATH=$dllsearchpath:\$PATH
"
fi
# Export our shlibpath_var if we have one.
if test yes = "$shlibpath_overrides_runpath" && test -n "$shlibpath_var" && test -n "$temp_rpath"; then
$ECHO "\
# Add our own library path to $shlibpath_var
$shlibpath_var=\"$temp_rpath\$$shlibpath_var\"
# Some systems cannot cope with colon-terminated $shlibpath_var
# The second colon is a workaround for a bug in BeOS R4 sed
$shlibpath_var=\`\$ECHO \"\$$shlibpath_var\" | $SED 's/::*\$//'\`
export $shlibpath_var
"
fi
$ECHO "\
if test \"\$libtool_execute_magic\" != \"$magic\"; then
# Run the actual program with our arguments.
func_exec_program \${1+\"\$@\"}
fi
else
# The program doesn't exist.
\$ECHO \"\$0: error: '\$progdir/\$program' does not exist\" 1>&2
\$ECHO \"This script is just a wrapper for \$program.\" 1>&2
\$ECHO \"See the $PACKAGE documentation for more information.\" 1>&2
exit 1
fi
fi\
"
}
# func_emit_cwrapperexe_src
# emit the source code for a wrapper executable on stdout
# Must ONLY be called from within func_mode_link because
# it depends on a number of variable set therein.
func_emit_cwrapperexe_src ()
{
cat <<EOF
/* $cwrappersource - temporary wrapper executable for $objdir/$outputname
Generated by $PROGRAM (GNU $PACKAGE) $VERSION
The $output program cannot be directly executed until all the libtool
libraries that it depends on are installed.
This wrapper executable should never be moved out of the build directory.
If it is, it will not operate correctly.
*/
EOF
cat <<"EOF"
#ifdef _MSC_VER
# define _CRT_SECURE_NO_DEPRECATE 1
#endif
#include <stdio.h>
#include <stdlib.h>
#ifdef _MSC_VER
# include <direct.h>
# include <process.h>
# include <io.h>
#else
# include <unistd.h>
# include <stdint.h>
# ifdef __CYGWIN__
# include <io.h>
# endif
#endif
#include <malloc.h>
#include <stdarg.h>
#include <assert.h>
#include <string.h>
#include <ctype.h>
#include <errno.h>
#include <fcntl.h>
#include <sys/stat.h>
#define STREQ(s1, s2) (strcmp ((s1), (s2)) == 0)
/* declarations of non-ANSI functions */
#if defined __MINGW32__
# ifdef __STRICT_ANSI__
int _putenv (const char *);
# endif
#elif defined __CYGWIN__
# ifdef __STRICT_ANSI__
char *realpath (const char *, char *);
int putenv (char *);
int setenv (const char *, const char *, int);
# endif
/* #elif defined other_platform || defined ... */
#endif
/* portability defines, excluding path handling macros */
#if defined _MSC_VER
# define setmode _setmode
# define stat _stat
# define chmod _chmod
# define getcwd _getcwd
# define putenv _putenv
# define S_IXUSR _S_IEXEC
#elif defined __MINGW32__
# define setmode _setmode
# define stat _stat
# define chmod _chmod
# define getcwd _getcwd
# define putenv _putenv
#elif defined __CYGWIN__
# define HAVE_SETENV
# define FOPEN_WB "wb"
/* #elif defined other platforms ... */
#endif
#if defined PATH_MAX
# define LT_PATHMAX PATH_MAX
#elif defined MAXPATHLEN
# define LT_PATHMAX MAXPATHLEN
#else
# define LT_PATHMAX 1024
#endif
#ifndef S_IXOTH
# define S_IXOTH 0
#endif
#ifndef S_IXGRP
# define S_IXGRP 0
#endif
/* path handling portability macros */
#ifndef DIR_SEPARATOR
# define DIR_SEPARATOR '/'
# define PATH_SEPARATOR ':'
#endif
#if defined _WIN32 || defined __MSDOS__ || defined __DJGPP__ || \
defined __OS2__
# define HAVE_DOS_BASED_FILE_SYSTEM
# define FOPEN_WB "wb"
# ifndef DIR_SEPARATOR_2
# define DIR_SEPARATOR_2 '\\'
# endif
# ifndef PATH_SEPARATOR_2
# define PATH_SEPARATOR_2 ';'
# endif
#endif
#ifndef DIR_SEPARATOR_2
# define IS_DIR_SEPARATOR(ch) ((ch) == DIR_SEPARATOR)
#else /* DIR_SEPARATOR_2 */
# define IS_DIR_SEPARATOR(ch) \
(((ch) == DIR_SEPARATOR) || ((ch) == DIR_SEPARATOR_2))
#endif /* DIR_SEPARATOR_2 */
#ifndef PATH_SEPARATOR_2
# define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR)
#else /* PATH_SEPARATOR_2 */
# define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR_2)
#endif /* PATH_SEPARATOR_2 */
#ifndef FOPEN_WB
# define FOPEN_WB "w"
#endif
#ifndef _O_BINARY
# define _O_BINARY 0
#endif
#define XMALLOC(type, num) ((type *) xmalloc ((num) * sizeof(type)))
#define XFREE(stale) do { \
if (stale) { free (stale); stale = 0; } \
} while (0)
#if defined LT_DEBUGWRAPPER
static int lt_debug = 1;
#else
static int lt_debug = 0;
#endif
const char *program_name = "libtool-wrapper"; /* in case xstrdup fails */
void *xmalloc (size_t num);
char *xstrdup (const char *string);
const char *base_name (const char *name);
char *find_executable (const char *wrapper);
char *chase_symlinks (const char *pathspec);
int make_executable (const char *path);
int check_executable (const char *path);
char *strendzap (char *str, const char *pat);
void lt_debugprintf (const char *file, int line, const char *fmt, ...);
void lt_fatal (const char *file, int line, const char *message, ...);
static const char *nonnull (const char *s);
static const char *nonempty (const char *s);
void lt_setenv (const char *name, const char *value);
char *lt_extend_str (const char *orig_value, const char *add, int to_end);
void lt_update_exe_path (const char *name, const char *value);
void lt_update_lib_path (const char *name, const char *value);
char **prepare_spawn (char **argv);
void lt_dump_script (FILE *f);
EOF
cat <<EOF
volatile const char * MAGIC_EXE = "$magic_exe";
const char * LIB_PATH_VARNAME = "$shlibpath_var";
EOF
if test yes = "$shlibpath_overrides_runpath" && test -n "$shlibpath_var" && test -n "$temp_rpath"; then
func_to_host_path "$temp_rpath"
cat <<EOF
const char * LIB_PATH_VALUE = "$func_to_host_path_result";
EOF
else
cat <<"EOF"
const char * LIB_PATH_VALUE = "";
EOF
fi
if test -n "$dllsearchpath"; then
func_to_host_path "$dllsearchpath:"
cat <<EOF
const char * EXE_PATH_VARNAME = "PATH";
const char * EXE_PATH_VALUE = "$func_to_host_path_result";
EOF
else
cat <<"EOF"
const char * EXE_PATH_VARNAME = "";
const char * EXE_PATH_VALUE = "";
EOF
fi
if test yes = "$fast_install"; then
cat <<EOF
const char * TARGET_PROGRAM_NAME = "lt-$outputname"; /* hopefully, no .exe */
EOF
else
cat <<EOF
const char * TARGET_PROGRAM_NAME = "$outputname"; /* hopefully, no .exe */
EOF
fi
cat <<"EOF"
#define LTWRAPPER_OPTION_PREFIX "--lt-"
static const char *ltwrapper_option_prefix = LTWRAPPER_OPTION_PREFIX;
static const char *dumpscript_opt = LTWRAPPER_OPTION_PREFIX "dump-script";
static const char *debug_opt = LTWRAPPER_OPTION_PREFIX "debug";
int
main (int argc, char *argv[])
{
char **newargz;
int newargc;
char *tmp_pathspec;
char *actual_cwrapper_path;
char *actual_cwrapper_name;
char *target_name;
char *lt_argv_zero;
int rval = 127;
int i;
program_name = (char *) xstrdup (base_name (argv[0]));
newargz = XMALLOC (char *, (size_t) argc + 1);
/* very simple arg parsing; don't want to rely on getopt
* also, copy all non cwrapper options to newargz, except
* argz[0], which is handled differently
*/
newargc=0;
for (i = 1; i < argc; i++)
{
if (STREQ (argv[i], dumpscript_opt))
{
EOF
case $host in
*mingw* | *cygwin* )
# make stdout use "unix" line endings
echo " setmode(1,_O_BINARY);"
;;
esac
cat <<"EOF"
lt_dump_script (stdout);
return 0;
}
if (STREQ (argv[i], debug_opt))
{
lt_debug = 1;
continue;
}
if (STREQ (argv[i], ltwrapper_option_prefix))
{
/* however, if there is an option in the LTWRAPPER_OPTION_PREFIX
namespace, but it is not one of the ones we know about and
have already dealt with, above (inluding dump-script), then
report an error. Otherwise, targets might begin to believe
they are allowed to use options in the LTWRAPPER_OPTION_PREFIX
namespace. The first time any user complains about this, we'll
need to make LTWRAPPER_OPTION_PREFIX a configure-time option
or a configure.ac-settable value.
*/
lt_fatal (__FILE__, __LINE__,
"unrecognized %s option: '%s'",
ltwrapper_option_prefix, argv[i]);
}
/* otherwise ... */
newargz[++newargc] = xstrdup (argv[i]);
}
newargz[++newargc] = NULL;
EOF
cat <<EOF
/* The GNU banner must be the first non-error debug message */
lt_debugprintf (__FILE__, __LINE__, "libtool wrapper (GNU $PACKAGE) $VERSION\n");
EOF
cat <<"EOF"
lt_debugprintf (__FILE__, __LINE__, "(main) argv[0]: %s\n", argv[0]);
lt_debugprintf (__FILE__, __LINE__, "(main) program_name: %s\n", program_name);
tmp_pathspec = find_executable (argv[0]);
if (tmp_pathspec == NULL)
lt_fatal (__FILE__, __LINE__, "couldn't find %s", argv[0]);
lt_debugprintf (__FILE__, __LINE__,
"(main) found exe (before symlink chase) at: %s\n",
tmp_pathspec);
actual_cwrapper_path = chase_symlinks (tmp_pathspec);
lt_debugprintf (__FILE__, __LINE__,
"(main) found exe (after symlink chase) at: %s\n",
actual_cwrapper_path);
XFREE (tmp_pathspec);
actual_cwrapper_name = xstrdup (base_name (actual_cwrapper_path));
strendzap (actual_cwrapper_path, actual_cwrapper_name);
/* wrapper name transforms */
strendzap (actual_cwrapper_name, ".exe");
tmp_pathspec = lt_extend_str (actual_cwrapper_name, ".exe", 1);
XFREE (actual_cwrapper_name);
actual_cwrapper_name = tmp_pathspec;
tmp_pathspec = 0;
/* target_name transforms -- use actual target program name; might have lt- prefix */
target_name = xstrdup (base_name (TARGET_PROGRAM_NAME));
strendzap (target_name, ".exe");
tmp_pathspec = lt_extend_str (target_name, ".exe", 1);
XFREE (target_name);
target_name = tmp_pathspec;
tmp_pathspec = 0;
lt_debugprintf (__FILE__, __LINE__,
"(main) libtool target name: %s\n",
target_name);
EOF
cat <<EOF
newargz[0] =
XMALLOC (char, (strlen (actual_cwrapper_path) +
strlen ("$objdir") + 1 + strlen (actual_cwrapper_name) + 1));
strcpy (newargz[0], actual_cwrapper_path);
strcat (newargz[0], "$objdir");
strcat (newargz[0], "/");
EOF
cat <<"EOF"
/* stop here, and copy so we don't have to do this twice */
tmp_pathspec = xstrdup (newargz[0]);
/* do NOT want the lt- prefix here, so use actual_cwrapper_name */
strcat (newargz[0], actual_cwrapper_name);
/* DO want the lt- prefix here if it exists, so use target_name */
lt_argv_zero = lt_extend_str (tmp_pathspec, target_name, 1);
XFREE (tmp_pathspec);
tmp_pathspec = NULL;
EOF
case $host_os in
mingw*)
cat <<"EOF"
{
char* p;
while ((p = strchr (newargz[0], '\\')) != NULL)
{
*p = '/';
}
while ((p = strchr (lt_argv_zero, '\\')) != NULL)
{
*p = '/';
}
}
EOF
;;
esac
cat <<"EOF"
XFREE (target_name);
XFREE (actual_cwrapper_path);
XFREE (actual_cwrapper_name);
lt_setenv ("BIN_SH", "xpg4"); /* for Tru64 */
lt_setenv ("DUALCASE", "1"); /* for MSK sh */
/* Update the DLL searchpath. EXE_PATH_VALUE ($dllsearchpath) must
be prepended before (that is, appear after) LIB_PATH_VALUE ($temp_rpath)
because on Windows, both *_VARNAMEs are PATH but uninstalled
libraries must come first. */
lt_update_exe_path (EXE_PATH_VARNAME, EXE_PATH_VALUE);
lt_update_lib_path (LIB_PATH_VARNAME, LIB_PATH_VALUE);
lt_debugprintf (__FILE__, __LINE__, "(main) lt_argv_zero: %s\n",
nonnull (lt_argv_zero));
for (i = 0; i < newargc; i++)
{
lt_debugprintf (__FILE__, __LINE__, "(main) newargz[%d]: %s\n",
i, nonnull (newargz[i]));
}
EOF
case $host_os in
mingw*)
cat <<"EOF"
/* execv doesn't actually work on mingw as expected on unix */
newargz = prepare_spawn (newargz);
rval = (int) _spawnv (_P_WAIT, lt_argv_zero, (const char * const *) newargz);
if (rval == -1)
{
/* failed to start process */
lt_debugprintf (__FILE__, __LINE__,
"(main) failed to launch target \"%s\": %s\n",
lt_argv_zero, nonnull (strerror (errno)));
return 127;
}
return rval;
EOF
;;
*)
cat <<"EOF"
execv (lt_argv_zero, newargz);
return rval; /* =127, but avoids unused variable warning */
EOF
;;
esac
cat <<"EOF"
}
void *
xmalloc (size_t num)
{
void *p = (void *) malloc (num);
if (!p)
lt_fatal (__FILE__, __LINE__, "memory exhausted");
return p;
}
char *
xstrdup (const char *string)
{
return string ? strcpy ((char *) xmalloc (strlen (string) + 1),
string) : NULL;
}
const char *
base_name (const char *name)
{
const char *base;
#if defined HAVE_DOS_BASED_FILE_SYSTEM
/* Skip over the disk name in MSDOS pathnames. */
if (isalpha ((unsigned char) name[0]) && name[1] == ':')
name += 2;
#endif
for (base = name; *name; name++)
if (IS_DIR_SEPARATOR (*name))
base = name + 1;
return base;
}
int
check_executable (const char *path)
{
struct stat st;
lt_debugprintf (__FILE__, __LINE__, "(check_executable): %s\n",
nonempty (path));
if ((!path) || (!*path))
return 0;
if ((stat (path, &st) >= 0)
&& (st.st_mode & (S_IXUSR | S_IXGRP | S_IXOTH)))
return 1;
else
return 0;
}
int
make_executable (const char *path)
{
int rval = 0;
struct stat st;
lt_debugprintf (__FILE__, __LINE__, "(make_executable): %s\n",
nonempty (path));
if ((!path) || (!*path))
return 0;
if (stat (path, &st) >= 0)
{
rval = chmod (path, st.st_mode | S_IXOTH | S_IXGRP | S_IXUSR);
}
return rval;
}
/* Searches for the full path of the wrapper. Returns
newly allocated full path name if found, NULL otherwise
Does not chase symlinks, even on platforms that support them.
*/
char *
find_executable (const char *wrapper)
{
int has_slash = 0;
const char *p;
const char *p_next;
/* static buffer for getcwd */
char tmp[LT_PATHMAX + 1];
size_t tmp_len;
char *concat_name;
lt_debugprintf (__FILE__, __LINE__, "(find_executable): %s\n",
nonempty (wrapper));
if ((wrapper == NULL) || (*wrapper == '\0'))
return NULL;
/* Absolute path? */
#if defined HAVE_DOS_BASED_FILE_SYSTEM
if (isalpha ((unsigned char) wrapper[0]) && wrapper[1] == ':')
{
concat_name = xstrdup (wrapper);
if (check_executable (concat_name))
return concat_name;
XFREE (concat_name);
}
else
{
#endif
if (IS_DIR_SEPARATOR (wrapper[0]))
{
concat_name = xstrdup (wrapper);
if (check_executable (concat_name))
return concat_name;
XFREE (concat_name);
}
#if defined HAVE_DOS_BASED_FILE_SYSTEM
}
#endif
for (p = wrapper; *p; p++)
if (*p == '/')
{
has_slash = 1;
break;
}
if (!has_slash)
{
/* no slashes; search PATH */
const char *path = getenv ("PATH");
if (path != NULL)
{
for (p = path; *p; p = p_next)
{
const char *q;
size_t p_len;
for (q = p; *q; q++)
if (IS_PATH_SEPARATOR (*q))
break;
p_len = (size_t) (q - p);
p_next = (*q == '\0' ? q : q + 1);
if (p_len == 0)
{
/* empty path: current directory */
if (getcwd (tmp, LT_PATHMAX) == NULL)
lt_fatal (__FILE__, __LINE__, "getcwd failed: %s",
nonnull (strerror (errno)));
tmp_len = strlen (tmp);
concat_name =
XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1);
memcpy (concat_name, tmp, tmp_len);
concat_name[tmp_len] = '/';
strcpy (concat_name + tmp_len + 1, wrapper);
}
else
{
concat_name =
XMALLOC (char, p_len + 1 + strlen (wrapper) + 1);
memcpy (concat_name, p, p_len);
concat_name[p_len] = '/';
strcpy (concat_name + p_len + 1, wrapper);
}
if (check_executable (concat_name))
return concat_name;
XFREE (concat_name);
}
}
/* not found in PATH; assume curdir */
}
/* Relative path | not found in path: prepend cwd */
if (getcwd (tmp, LT_PATHMAX) == NULL)
lt_fatal (__FILE__, __LINE__, "getcwd failed: %s",
nonnull (strerror (errno)));
tmp_len = strlen (tmp);
concat_name = XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1);
memcpy (concat_name, tmp, tmp_len);
concat_name[tmp_len] = '/';
strcpy (concat_name + tmp_len + 1, wrapper);
if (check_executable (concat_name))
return concat_name;
XFREE (concat_name);
return NULL;
}
char *
chase_symlinks (const char *pathspec)
{
#ifndef S_ISLNK
return xstrdup (pathspec);
#else
char buf[LT_PATHMAX];
struct stat s;
char *tmp_pathspec = xstrdup (pathspec);
char *p;
int has_symlinks = 0;
while (strlen (tmp_pathspec) && !has_symlinks)
{
lt_debugprintf (__FILE__, __LINE__,
"checking path component for symlinks: %s\n",
tmp_pathspec);
if (lstat (tmp_pathspec, &s) == 0)
{
if (S_ISLNK (s.st_mode) != 0)
{
has_symlinks = 1;
break;
}
/* search backwards for last DIR_SEPARATOR */
p = tmp_pathspec + strlen (tmp_pathspec) - 1;
while ((p > tmp_pathspec) && (!IS_DIR_SEPARATOR (*p)))
p--;
if ((p == tmp_pathspec) && (!IS_DIR_SEPARATOR (*p)))
{
/* no more DIR_SEPARATORS left */
break;
}
*p = '\0';
}
else
{
lt_fatal (__FILE__, __LINE__,
"error accessing file \"%s\": %s",
tmp_pathspec, nonnull (strerror (errno)));
}
}
XFREE (tmp_pathspec);
if (!has_symlinks)
{
return xstrdup (pathspec);
}
tmp_pathspec = realpath (pathspec, buf);
if (tmp_pathspec == 0)
{
lt_fatal (__FILE__, __LINE__,
"could not follow symlinks for %s", pathspec);
}
return xstrdup (tmp_pathspec);
#endif
}
char *
strendzap (char *str, const char *pat)
{
size_t len, patlen;
assert (str != NULL);
assert (pat != NULL);
len = strlen (str);
patlen = strlen (pat);
if (patlen <= len)
{
str += len - patlen;
if (STREQ (str, pat))
*str = '\0';
}
return str;
}
void
lt_debugprintf (const char *file, int line, const char *fmt, ...)
{
va_list args;
if (lt_debug)
{
(void) fprintf (stderr, "%s:%s:%d: ", program_name, file, line);
va_start (args, fmt);
(void) vfprintf (stderr, fmt, args);
va_end (args);
}
}
static void
lt_error_core (int exit_status, const char *file,
int line, const char *mode,
const char *message, va_list ap)
{
fprintf (stderr, "%s:%s:%d: %s: ", program_name, file, line, mode);
vfprintf (stderr, message, ap);
fprintf (stderr, ".\n");
if (exit_status >= 0)
exit (exit_status);
}
void
lt_fatal (const char *file, int line, const char *message, ...)
{
va_list ap;
va_start (ap, message);
lt_error_core (EXIT_FAILURE, file, line, "FATAL", message, ap);
va_end (ap);
}
static const char *
nonnull (const char *s)
{
return s ? s : "(null)";
}
static const char *
nonempty (const char *s)
{
return (s && !*s) ? "(empty)" : nonnull (s);
}
void
lt_setenv (const char *name, const char *value)
{
lt_debugprintf (__FILE__, __LINE__,
"(lt_setenv) setting '%s' to '%s'\n",
nonnull (name), nonnull (value));
{
#ifdef HAVE_SETENV
/* always make a copy, for consistency with !HAVE_SETENV */
char *str = xstrdup (value);
setenv (name, str, 1);
#else
size_t len = strlen (name) + 1 + strlen (value) + 1;
char *str = XMALLOC (char, len);
sprintf (str, "%s=%s", name, value);
if (putenv (str) != EXIT_SUCCESS)
{
XFREE (str);
}
#endif
}
}
char *
lt_extend_str (const char *orig_value, const char *add, int to_end)
{
char *new_value;
if (orig_value && *orig_value)
{
size_t orig_value_len = strlen (orig_value);
size_t add_len = strlen (add);
new_value = XMALLOC (char, add_len + orig_value_len + 1);
if (to_end)
{
strcpy (new_value, orig_value);
strcpy (new_value + orig_value_len, add);
}
else
{
strcpy (new_value, add);
strcpy (new_value + add_len, orig_value);
}
}
else
{
new_value = xstrdup (add);
}
return new_value;
}
void
lt_update_exe_path (const char *name, const char *value)
{
lt_debugprintf (__FILE__, __LINE__,
"(lt_update_exe_path) modifying '%s' by prepending '%s'\n",
nonnull (name), nonnull (value));
if (name && *name && value && *value)
{
char *new_value = lt_extend_str (getenv (name), value, 0);
/* some systems can't cope with a ':'-terminated path #' */
size_t len = strlen (new_value);
while ((len > 0) && IS_PATH_SEPARATOR (new_value[len-1]))
{
new_value[--len] = '\0';
}
lt_setenv (name, new_value);
XFREE (new_value);
}
}
void
lt_update_lib_path (const char *name, const char *value)
{
lt_debugprintf (__FILE__, __LINE__,
"(lt_update_lib_path) modifying '%s' by prepending '%s'\n",
nonnull (name), nonnull (value));
if (name && *name && value && *value)
{
char *new_value = lt_extend_str (getenv (name), value, 0);
lt_setenv (name, new_value);
XFREE (new_value);
}
}
EOF
case $host_os in
mingw*)
cat <<"EOF"
/* Prepares an argument vector before calling spawn().
Note that spawn() does not by itself call the command interpreter
(getenv ("COMSPEC") != NULL ? getenv ("COMSPEC") :
({ OSVERSIONINFO v; v.dwOSVersionInfoSize = sizeof(OSVERSIONINFO);
GetVersionEx(&v);
v.dwPlatformId == VER_PLATFORM_WIN32_NT;
}) ? "cmd.exe" : "command.com").
Instead it simply concatenates the arguments, separated by ' ', and calls
CreateProcess(). We must quote the arguments since Win32 CreateProcess()
interprets characters like ' ', '\t', '\\', '"' (but not '<' and '>') in a
special way:
- Space and tab are interpreted as delimiters. They are not treated as
delimiters if they are surrounded by double quotes: "...".
- Unescaped double quotes are removed from the input. Their only effect is
that within double quotes, space and tab are treated like normal
characters.
- Backslashes not followed by double quotes are not special.
- But 2*n+1 backslashes followed by a double quote become
n backslashes followed by a double quote (n >= 0):
\" -> "
\\\" -> \"
\\\\\" -> \\"
*/
#define SHELL_SPECIAL_CHARS "\"\\ \001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037"
#define SHELL_SPACE_CHARS " \001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037"
char **
prepare_spawn (char **argv)
{
size_t argc;
char **new_argv;
size_t i;
/* Count number of arguments. */
for (argc = 0; argv[argc] != NULL; argc++)
;
/* Allocate new argument vector. */
new_argv = XMALLOC (char *, argc + 1);
/* Put quoted arguments into the new argument vector. */
for (i = 0; i < argc; i++)
{
const char *string = argv[i];
if (string[0] == '\0')
new_argv[i] = xstrdup ("\"\"");
else if (strpbrk (string, SHELL_SPECIAL_CHARS) != NULL)
{
int quote_around = (strpbrk (string, SHELL_SPACE_CHARS) != NULL);
size_t length;
unsigned int backslashes;
const char *s;
char *quoted_string;
char *p;
length = 0;
backslashes = 0;
if (quote_around)
length++;
for (s = string; *s != '\0'; s++)
{
char c = *s;
if (c == '"')
length += backslashes + 1;
length++;
if (c == '\\')
backslashes++;
else
backslashes = 0;
}
if (quote_around)
length += backslashes + 1;
quoted_string = XMALLOC (char, length + 1);
p = quoted_string;
backslashes = 0;
if (quote_around)
*p++ = '"';
for (s = string; *s != '\0'; s++)
{
char c = *s;
if (c == '"')
{
unsigned int j;
for (j = backslashes + 1; j > 0; j--)
*p++ = '\\';
}
*p++ = c;
if (c == '\\')
backslashes++;
else
backslashes = 0;
}
if (quote_around)
{
unsigned int j;
for (j = backslashes; j > 0; j--)
*p++ = '\\';
*p++ = '"';
}
*p = '\0';
new_argv[i] = quoted_string;
}
else
new_argv[i] = (char *) string;
}
new_argv[argc] = NULL;
return new_argv;
}
EOF
;;
esac
cat <<"EOF"
void lt_dump_script (FILE* f)
{
EOF
func_emit_wrapper yes |
$SED -n -e '
s/^\(.\{79\}\)\(..*\)/\1\
\2/
h
s/\([\\"]\)/\\\1/g
s/$/\\n/
s/\([^\n]*\).*/ fputs ("\1", f);/p
g
D'
cat <<"EOF"
}
EOF
}
# end: func_emit_cwrapperexe_src
# func_win32_import_lib_p ARG
# True if ARG is an import lib, as indicated by $file_magic_cmd
func_win32_import_lib_p ()
{
$debug_cmd
case `eval $file_magic_cmd \"\$1\" 2>/dev/null | $SED -e 10q` in
*import*) : ;;
*) false ;;
esac
}
# func_mode_link arg...
func_mode_link ()
{
$debug_cmd
case $host in
*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*)
# It is impossible to link a dll without this setting, and
# we shouldn't force the makefile maintainer to figure out
# what system we are compiling for in order to pass an extra
# flag for every libtool invocation.
# allow_undefined=no
# FIXME: Unfortunately, there are problems with the above when trying
# to make a dll that has undefined symbols, in which case not
# even a static library is built. For now, we need to specify
# -no-undefined on the libtool link line when we can be certain
# that all symbols are satisfied, otherwise we get a static library.
allow_undefined=yes
;;
*)
allow_undefined=yes
;;
esac
libtool_args=$nonopt
base_compile="$nonopt $@"
compile_command=$nonopt
finalize_command=$nonopt
compile_rpath=
finalize_rpath=
compile_shlibpath=
finalize_shlibpath=
convenience=
old_convenience=
deplibs=
old_deplibs=
compiler_flags=
linker_flags=
dllsearchpath=
lib_search_path=`pwd`
inst_prefix_dir=
new_inherited_linker_flags=
avoid_version=no
bindir=
dlfiles=
dlprefiles=
dlself=no
export_dynamic=no
export_symbols=
export_symbols_regex=
generated=
libobjs=
ltlibs=
module=no
no_install=no
objs=
non_pic_objects=
precious_files_regex=
prefer_static_libs=no
preload=false
prev=
prevarg=
release=
rpath=
xrpath=
perm_rpath=
temp_rpath=
thread_safe=no
vinfo=
vinfo_number=no
weak_libs=
single_module=$wl-single_module
func_infer_tag $base_compile
# We need to know -static, to get the right output filenames.
for arg
do
case $arg in
-shared)
test yes != "$build_libtool_libs" \
&& func_fatal_configuration "cannot build a shared library"
build_old_libs=no
break
;;
-all-static | -static | -static-libtool-libs)
case $arg in
-all-static)
if test yes = "$build_libtool_libs" && test -z "$link_static_flag"; then
func_warning "complete static linking is impossible in this configuration"
fi
if test -n "$link_static_flag"; then
dlopen_self=$dlopen_self_static
fi
prefer_static_libs=yes
;;
-static)
if test -z "$pic_flag" && test -n "$link_static_flag"; then
dlopen_self=$dlopen_self_static
fi
prefer_static_libs=built
;;
-static-libtool-libs)
if test -z "$pic_flag" && test -n "$link_static_flag"; then
dlopen_self=$dlopen_self_static
fi
prefer_static_libs=yes
;;
esac
build_libtool_libs=no
build_old_libs=yes
break
;;
esac
done
# See if our shared archives depend on static archives.
test -n "$old_archive_from_new_cmds" && build_old_libs=yes
# Go through the arguments, transforming them on the way.
while test "$#" -gt 0; do
arg=$1
shift
func_quote_for_eval "$arg"
qarg=$func_quote_for_eval_unquoted_result
func_append libtool_args " $func_quote_for_eval_result"
# If the previous option needs an argument, assign it.
if test -n "$prev"; then
case $prev in
output)
func_append compile_command " @OUTPUT@"
func_append finalize_command " @OUTPUT@"
;;
esac
case $prev in
bindir)
bindir=$arg
prev=
continue
;;
dlfiles|dlprefiles)
$preload || {
# Add the symbol object into the linking commands.
func_append compile_command " @SYMFILE@"
func_append finalize_command " @SYMFILE@"
preload=:
}
case $arg in
*.la | *.lo) ;; # We handle these cases below.
force)
if test no = "$dlself"; then
dlself=needless
export_dynamic=yes
fi
prev=
continue
;;
self)
if test dlprefiles = "$prev"; then
dlself=yes
elif test dlfiles = "$prev" && test yes != "$dlopen_self"; then
dlself=yes
else
dlself=needless
export_dynamic=yes
fi
prev=
continue
;;
*)
if test dlfiles = "$prev"; then
func_append dlfiles " $arg"
else
func_append dlprefiles " $arg"
fi
prev=
continue
;;
esac
;;
expsyms)
export_symbols=$arg
test -f "$arg" \
|| func_fatal_error "symbol file '$arg' does not exist"
prev=
continue
;;
expsyms_regex)
export_symbols_regex=$arg
prev=
continue
;;
framework)
case $host in
*-*-darwin*)
case "$deplibs " in
*" $qarg.ltframework "*) ;;
*) func_append deplibs " $qarg.ltframework" # this is fixed later
;;
esac
;;
esac
prev=
continue
;;
inst_prefix)
inst_prefix_dir=$arg
prev=
continue
;;
mllvm)
# Clang does not use LLVM to link, so we can simply discard any
# '-mllvm $arg' options when doing the link step.
prev=
continue
;;
objectlist)
if test -f "$arg"; then
save_arg=$arg
moreargs=
for fil in `cat "$save_arg"`
do
# func_append moreargs " $fil"
arg=$fil
# A libtool-controlled object.
# Check to see that this really is a libtool object.
if func_lalib_unsafe_p "$arg"; then
pic_object=
non_pic_object=
# Read the .lo file
func_source "$arg"
if test -z "$pic_object" ||
test -z "$non_pic_object" ||
test none = "$pic_object" &&
test none = "$non_pic_object"; then
func_fatal_error "cannot find name of object for '$arg'"
fi
# Extract subdirectory from the argument.
func_dirname "$arg" "/" ""
xdir=$func_dirname_result
if test none != "$pic_object"; then
# Prepend the subdirectory the object is found in.
pic_object=$xdir$pic_object
if test dlfiles = "$prev"; then
if test yes = "$build_libtool_libs" && test yes = "$dlopen_support"; then
func_append dlfiles " $pic_object"
prev=
continue
else
# If libtool objects are unsupported, then we need to preload.
prev=dlprefiles
fi
fi
# CHECK ME: I think I busted this. -Ossama
if test dlprefiles = "$prev"; then
# Preload the old-style object.
func_append dlprefiles " $pic_object"
prev=
fi
# A PIC object.
func_append libobjs " $pic_object"
arg=$pic_object
fi
# Non-PIC object.
if test none != "$non_pic_object"; then
# Prepend the subdirectory the object is found in.
non_pic_object=$xdir$non_pic_object
# A standard non-PIC object
func_append non_pic_objects " $non_pic_object"
if test -z "$pic_object" || test none = "$pic_object"; then
arg=$non_pic_object
fi
else
# If the PIC object exists, use it instead.
# $xdir was prepended to $pic_object above.
non_pic_object=$pic_object
func_append non_pic_objects " $non_pic_object"
fi
else
# Only an error if not doing a dry-run.
if $opt_dry_run; then
# Extract subdirectory from the argument.
func_dirname "$arg" "/" ""
xdir=$func_dirname_result
func_lo2o "$arg"
pic_object=$xdir$objdir/$func_lo2o_result
non_pic_object=$xdir$func_lo2o_result
func_append libobjs " $pic_object"
func_append non_pic_objects " $non_pic_object"
else
func_fatal_error "'$arg' is not a valid libtool object"
fi
fi
done
else
func_fatal_error "link input file '$arg' does not exist"
fi
arg=$save_arg
prev=
continue
;;
precious_regex)
precious_files_regex=$arg
prev=
continue
;;
release)
release=-$arg
prev=
continue
;;
rpath | xrpath)
# We need an absolute path.
case $arg in
[\\/]* | [A-Za-z]:[\\/]*) ;;
*)
func_fatal_error "only absolute run-paths are allowed"
;;
esac
if test rpath = "$prev"; then
case "$rpath " in
*" $arg "*) ;;
*) func_append rpath " $arg" ;;
esac
else
case "$xrpath " in
*" $arg "*) ;;
*) func_append xrpath " $arg" ;;
esac
fi
prev=
continue
;;
shrext)
shrext_cmds=$arg
prev=
continue
;;
weak)
func_append weak_libs " $arg"
prev=
continue
;;
xcclinker)
func_append linker_flags " $qarg"
func_append compiler_flags " $qarg"
prev=
func_append compile_command " $qarg"
func_append finalize_command " $qarg"
continue
;;
xcompiler)
func_append compiler_flags " $qarg"
prev=
func_append compile_command " $qarg"
func_append finalize_command " $qarg"
continue
;;
xlinker)
func_append linker_flags " $qarg"
func_append compiler_flags " $wl$qarg"
prev=
func_append compile_command " $wl$qarg"
func_append finalize_command " $wl$qarg"
continue
;;
*)
eval "$prev=\"\$arg\""
prev=
continue
;;
esac
fi # test -n "$prev"
prevarg=$arg
case $arg in
-all-static)
if test -n "$link_static_flag"; then
# See comment for -static flag below, for more details.
func_append compile_command " $link_static_flag"
func_append finalize_command " $link_static_flag"
fi
continue
;;
-allow-undefined)
# FIXME: remove this flag sometime in the future.
func_fatal_error "'-allow-undefined' must not be used because it is the default"
;;
-avoid-version)
avoid_version=yes
continue
;;
-bindir)
prev=bindir
continue
;;
-dlopen)
prev=dlfiles
continue
;;
-dlpreopen)
prev=dlprefiles
continue
;;
-export-dynamic)
export_dynamic=yes
continue
;;
-export-symbols | -export-symbols-regex)
if test -n "$export_symbols" || test -n "$export_symbols_regex"; then
func_fatal_error "more than one -exported-symbols argument is not allowed"
fi
if test X-export-symbols = "X$arg"; then
prev=expsyms
else
prev=expsyms_regex
fi
continue
;;
-framework)
prev=framework
continue
;;
-inst-prefix-dir)
prev=inst_prefix
continue
;;
# The native IRIX linker understands -LANG:*, -LIST:* and -LNO:*
# so, if we see these flags be careful not to treat them like -L
-L[A-Z][A-Z]*:*)
case $with_gcc/$host in
no/*-*-irix* | /*-*-irix*)
func_append compile_command " $arg"
func_append finalize_command " $arg"
;;
esac
continue
;;
-L*)
func_stripname "-L" '' "$arg"
if test -z "$func_stripname_result"; then
if test "$#" -gt 0; then
func_fatal_error "require no space between '-L' and '$1'"
else
func_fatal_error "need path for '-L' option"
fi
fi
func_resolve_sysroot "$func_stripname_result"
dir=$func_resolve_sysroot_result
# We need an absolute path.
case $dir in
[\\/]* | [A-Za-z]:[\\/]*) ;;
*)
absdir=`cd "$dir" && pwd`
test -z "$absdir" && \
func_fatal_error "cannot determine absolute directory name of '$dir'"
dir=$absdir
;;
esac
case "$deplibs " in
*" -L$dir "* | *" $arg "*)
# Will only happen for absolute or sysroot arguments
;;
*)
# Preserve sysroot, but never include relative directories
case $dir in
[\\/]* | [A-Za-z]:[\\/]* | =*) func_append deplibs " $arg" ;;
*) func_append deplibs " -L$dir" ;;
esac
func_append lib_search_path " $dir"
;;
esac
case $host in
*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*)
testbindir=`$ECHO "$dir" | $SED 's*/lib$*/bin*'`
case :$dllsearchpath: in
*":$dir:"*) ;;
::) dllsearchpath=$dir;;
*) func_append dllsearchpath ":$dir";;
esac
case :$dllsearchpath: in
*":$testbindir:"*) ;;
::) dllsearchpath=$testbindir;;
*) func_append dllsearchpath ":$testbindir";;
esac
;;
esac
continue
;;
-l*)
if test X-lc = "X$arg" || test X-lm = "X$arg"; then
case $host in
*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-beos* | *-cegcc* | *-*-haiku*)
# These systems don't actually have a C or math library (as such)
continue
;;
*-*-os2*)
# These systems don't actually have a C library (as such)
test X-lc = "X$arg" && continue
;;
*-*-openbsd* | *-*-freebsd* | *-*-dragonfly* | *-*-bitrig*)
# Do not include libc due to us having libc/libc_r.
test X-lc = "X$arg" && continue
;;
*-*-rhapsody* | *-*-darwin1.[012])
# Rhapsody C and math libraries are in the System framework
func_append deplibs " System.ltframework"
continue
;;
*-*-sco3.2v5* | *-*-sco5v6*)
# Causes problems with __ctype
test X-lc = "X$arg" && continue
;;
*-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*)
# Compiler inserts libc in the correct place for threads to work
test X-lc = "X$arg" && continue
;;
esac
elif test X-lc_r = "X$arg"; then
case $host in
*-*-openbsd* | *-*-freebsd* | *-*-dragonfly* | *-*-bitrig*)
# Do not include libc_r directly, use -pthread flag.
continue
;;
esac
fi
func_append deplibs " $arg"
continue
;;
-mllvm)
prev=mllvm
continue
;;
-module)
module=yes
continue
;;
# Tru64 UNIX uses -model [arg] to determine the layout of C++
# classes, name mangling, and exception handling.
# Darwin uses the -arch flag to determine output architecture.
-model|-arch|-isysroot|--sysroot)
func_append compiler_flags " $arg"
func_append compile_command " $arg"
func_append finalize_command " $arg"
prev=xcompiler
continue
;;
-mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe \
|-threads|-fopenmp|-openmp|-mp|-xopenmp|-omp|-qsmp=*)
func_append compiler_flags " $arg"
func_append compile_command " $arg"
func_append finalize_command " $arg"
case "$new_inherited_linker_flags " in
*" $arg "*) ;;
* ) func_append new_inherited_linker_flags " $arg" ;;
esac
continue
;;
-multi_module)
single_module=$wl-multi_module
continue
;;
-no-fast-install)
fast_install=no
continue
;;
-no-install)
case $host in
*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-darwin* | *-cegcc*)
# The PATH hackery in wrapper scripts is required on Windows
# and Darwin in order for the loader to find any dlls it needs.
func_warning "'-no-install' is ignored for $host"
func_warning "assuming '-no-fast-install' instead"
fast_install=no
;;
*) no_install=yes ;;
esac
continue
;;
-no-undefined)
allow_undefined=no
continue
;;
-objectlist)
prev=objectlist
continue
;;
-o) prev=output ;;
-precious-files-regex)
prev=precious_regex
continue
;;
-release)
prev=release
continue
;;
-rpath)
prev=rpath
continue
;;
-R)
prev=xrpath
continue
;;
-R*)
func_stripname '-R' '' "$arg"
dir=$func_stripname_result
# We need an absolute path.
case $dir in
[\\/]* | [A-Za-z]:[\\/]*) ;;
=*)
func_stripname '=' '' "$dir"
dir=$lt_sysroot$func_stripname_result
;;
*)
func_fatal_error "only absolute run-paths are allowed"
;;
esac
case "$xrpath " in
*" $dir "*) ;;
*) func_append xrpath " $dir" ;;
esac
continue
;;
-shared)
# The effects of -shared are defined in a previous loop.
continue
;;
-shrext)
prev=shrext
continue
;;
-static | -static-libtool-libs)
# The effects of -static are defined in a previous loop.
# We used to do the same as -all-static on platforms that
# didn't have a PIC flag, but the assumption that the effects
# would be equivalent was wrong. It would break on at least
# Digital Unix and AIX.
continue
;;
-thread-safe)
thread_safe=yes
continue
;;
-version-info)
prev=vinfo
continue
;;
-version-number)
prev=vinfo
vinfo_number=yes
continue
;;
-weak)
prev=weak
continue
;;
-Wc,*)
func_stripname '-Wc,' '' "$arg"
args=$func_stripname_result
arg=
save_ifs=$IFS; IFS=,
for flag in $args; do
IFS=$save_ifs
func_quote_for_eval "$flag"
func_append arg " $func_quote_for_eval_result"
func_append compiler_flags " $func_quote_for_eval_result"
done
IFS=$save_ifs
func_stripname ' ' '' "$arg"
arg=$func_stripname_result
;;
-Wl,*)
func_stripname '-Wl,' '' "$arg"
args=$func_stripname_result
arg=
save_ifs=$IFS; IFS=,
for flag in $args; do
IFS=$save_ifs
func_quote_for_eval "$flag"
func_append arg " $wl$func_quote_for_eval_result"
func_append compiler_flags " $wl$func_quote_for_eval_result"
func_append linker_flags " $func_quote_for_eval_result"
done
IFS=$save_ifs
func_stripname ' ' '' "$arg"
arg=$func_stripname_result
;;
-Xcompiler)
prev=xcompiler
continue
;;
-Xlinker)
prev=xlinker
continue
;;
-XCClinker)
prev=xcclinker
continue
;;
# -msg_* for osf cc
-msg_*)
func_quote_for_eval "$arg"
arg=$func_quote_for_eval_result
;;
# Flags to be passed through unchanged, with rationale:
# -64, -mips[0-9] enable 64-bit mode for the SGI compiler
# -r[0-9][0-9]* specify processor for the SGI compiler
# -xarch=*, -xtarget=* enable 64-bit mode for the Sun compiler
# +DA*, +DD* enable 64-bit mode for the HP compiler
# -q* compiler args for the IBM compiler
# -m*, -t[45]*, -txscale* architecture-specific flags for GCC
# -F/path path to uninstalled frameworks, gcc on darwin
# -p, -pg, --coverage, -fprofile-* profiling flags for GCC
# @file GCC response files
# -tp=* Portland pgcc target processor selection
# --sysroot=* for sysroot support
# -O*, -g*, -flto*, -fwhopr*, -fuse-linker-plugin GCC link-time optimization
# -stdlib=* select c++ std lib with clang
-64|-mips[0-9]|-r[0-9][0-9]*|-xarch=*|-xtarget=*|+DA*|+DD*|-q*|-m*| \
-t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*|-tp=*|--sysroot=*| \
-O*|-g*|-flto*|-fwhopr*|-fuse-linker-plugin|-stdlib=*)
func_quote_for_eval "$arg"
arg=$func_quote_for_eval_result
func_append compile_command " $arg"
func_append finalize_command " $arg"
func_append compiler_flags " $arg"
continue
;;
# Some other compiler flag.
-* | +*)
func_quote_for_eval "$arg"
arg=$func_quote_for_eval_result
;;
*.$objext)
# A standard object.
func_append objs " $arg"
;;
*.lo)
# A libtool-controlled object.
# Check to see that this really is a libtool object.
if func_lalib_unsafe_p "$arg"; then
pic_object=
non_pic_object=
# Read the .lo file
func_source "$arg"
if test -z "$pic_object" ||
test -z "$non_pic_object" ||
test none = "$pic_object" &&
test none = "$non_pic_object"; then
func_fatal_error "cannot find name of object for '$arg'"
fi
# Extract subdirectory from the argument.
func_dirname "$arg" "/" ""
xdir=$func_dirname_result
test none = "$pic_object" || {
# Prepend the subdirectory the object is found in.
pic_object=$xdir$pic_object
if test dlfiles = "$prev"; then
if test yes = "$build_libtool_libs" && test yes = "$dlopen_support"; then
func_append dlfiles " $pic_object"
prev=
continue
else
# If libtool objects are unsupported, then we need to preload.
prev=dlprefiles
fi
fi
# CHECK ME: I think I busted this. -Ossama
if test dlprefiles = "$prev"; then
# Preload the old-style object.
func_append dlprefiles " $pic_object"
prev=
fi
# A PIC object.
func_append libobjs " $pic_object"
arg=$pic_object
}
# Non-PIC object.
if test none != "$non_pic_object"; then
# Prepend the subdirectory the object is found in.
non_pic_object=$xdir$non_pic_object
# A standard non-PIC object
func_append non_pic_objects " $non_pic_object"
if test -z "$pic_object" || test none = "$pic_object"; then
arg=$non_pic_object
fi
else
# If the PIC object exists, use it instead.
# $xdir was prepended to $pic_object above.
non_pic_object=$pic_object
func_append non_pic_objects " $non_pic_object"
fi
else
# Only an error if not doing a dry-run.
if $opt_dry_run; then
# Extract subdirectory from the argument.
func_dirname "$arg" "/" ""
xdir=$func_dirname_result
func_lo2o "$arg"
pic_object=$xdir$objdir/$func_lo2o_result
non_pic_object=$xdir$func_lo2o_result
func_append libobjs " $pic_object"
func_append non_pic_objects " $non_pic_object"
else
func_fatal_error "'$arg' is not a valid libtool object"
fi
fi
;;
*.$libext)
# An archive.
func_append deplibs " $arg"
func_append old_deplibs " $arg"
continue
;;
*.la)
# A libtool-controlled library.
func_resolve_sysroot "$arg"
if test dlfiles = "$prev"; then
# This library was specified with -dlopen.
func_append dlfiles " $func_resolve_sysroot_result"
prev=
elif test dlprefiles = "$prev"; then
# The library was specified with -dlpreopen.
func_append dlprefiles " $func_resolve_sysroot_result"
prev=
else
func_append deplibs " $func_resolve_sysroot_result"
fi
continue
;;
# Some other compiler argument.
*)
# Unknown arguments in both finalize_command and compile_command need
# to be aesthetically quoted because they are evaled later.
func_quote_for_eval "$arg"
arg=$func_quote_for_eval_result
;;
esac # arg
# Now actually substitute the argument into the commands.
if test -n "$arg"; then
func_append compile_command " $arg"
func_append finalize_command " $arg"
fi
done # argument parsing loop
test -n "$prev" && \
func_fatal_help "the '$prevarg' option requires an argument"
if test yes = "$export_dynamic" && test -n "$export_dynamic_flag_spec"; then
eval arg=\"$export_dynamic_flag_spec\"
func_append compile_command " $arg"
func_append finalize_command " $arg"
fi
oldlibs=
# calculate the name of the file, without its directory
func_basename "$output"
outputname=$func_basename_result
libobjs_save=$libobjs
if test -n "$shlibpath_var"; then
# get the directories listed in $shlibpath_var
eval shlib_search_path=\`\$ECHO \"\$$shlibpath_var\" \| \$SED \'s/:/ /g\'\`
else
shlib_search_path=
fi
eval sys_lib_search_path=\"$sys_lib_search_path_spec\"
eval sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\"
func_dirname "$output" "/" ""
output_objdir=$func_dirname_result$objdir
func_to_tool_file "$output_objdir/"
tool_output_objdir=$func_to_tool_file_result
# Create the object directory.
func_mkdir_p "$output_objdir"
# Determine the type of output
case $output in
"")
func_fatal_help "you must specify an output file"
;;
*.$libext) linkmode=oldlib ;;
*.lo | *.$objext) linkmode=obj ;;
*.la) linkmode=lib ;;
*) linkmode=prog ;; # Anything else should be a program.
esac
specialdeplibs=
libs=
# Find all interdependent deplibs by searching for libraries
# that are linked more than once (e.g. -la -lb -la)
for deplib in $deplibs; do
if $opt_preserve_dup_deps; then
case "$libs " in
*" $deplib "*) func_append specialdeplibs " $deplib" ;;
esac
fi
func_append libs " $deplib"
done
if test lib = "$linkmode"; then
libs="$predeps $libs $compiler_lib_search_path $postdeps"
# Compute libraries that are listed more than once in $predeps
# $postdeps and mark them as special (i.e., whose duplicates are
# not to be eliminated).
pre_post_deps=
if $opt_duplicate_compiler_generated_deps; then
for pre_post_dep in $predeps $postdeps; do
case "$pre_post_deps " in
*" $pre_post_dep "*) func_append specialdeplibs " $pre_post_deps" ;;
esac
func_append pre_post_deps " $pre_post_dep"
done
fi
pre_post_deps=
fi
deplibs=
newdependency_libs=
newlib_search_path=
need_relink=no # whether we're linking any uninstalled libtool libraries
notinst_deplibs= # not-installed libtool libraries
notinst_path= # paths that contain not-installed libtool libraries
case $linkmode in
lib)
passes="conv dlpreopen link"
for file in $dlfiles $dlprefiles; do
case $file in
*.la) ;;
*)
func_fatal_help "libraries can '-dlopen' only libtool libraries: $file"
;;
esac
done
;;
prog)
compile_deplibs=
finalize_deplibs=
alldeplibs=false
newdlfiles=
newdlprefiles=
passes="conv scan dlopen dlpreopen link"
;;
*) passes="conv"
;;
esac
for pass in $passes; do
# The preopen pass in lib mode reverses $deplibs; put it back here
# so that -L comes before libs that need it for instance...
if test lib,link = "$linkmode,$pass"; then
## FIXME: Find the place where the list is rebuilt in the wrong
## order, and fix it there properly
tmp_deplibs=
for deplib in $deplibs; do
tmp_deplibs="$deplib $tmp_deplibs"
done
deplibs=$tmp_deplibs
fi
if test lib,link = "$linkmode,$pass" ||
test prog,scan = "$linkmode,$pass"; then
libs=$deplibs
deplibs=
fi
if test prog = "$linkmode"; then
case $pass in
dlopen) libs=$dlfiles ;;
dlpreopen) libs=$dlprefiles ;;
link) libs="$deplibs %DEPLIBS% $dependency_libs" ;;
esac
fi
if test lib,dlpreopen = "$linkmode,$pass"; then
# Collect and forward deplibs of preopened libtool libs
for lib in $dlprefiles; do
# Ignore non-libtool-libs
dependency_libs=
func_resolve_sysroot "$lib"
case $lib in
*.la) func_source "$func_resolve_sysroot_result" ;;
esac
# Collect preopened libtool deplibs, except any this library
# has declared as weak libs
for deplib in $dependency_libs; do
func_basename "$deplib"
deplib_base=$func_basename_result
case " $weak_libs " in
*" $deplib_base "*) ;;
*) func_append deplibs " $deplib" ;;
esac
done
done
libs=$dlprefiles
fi
if test dlopen = "$pass"; then
# Collect dlpreopened libraries
save_deplibs=$deplibs
deplibs=
fi
for deplib in $libs; do
lib=
found=false
case $deplib in
-mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe \
|-threads|-fopenmp|-openmp|-mp|-xopenmp|-omp|-qsmp=*)
if test prog,link = "$linkmode,$pass"; then
compile_deplibs="$deplib $compile_deplibs"
finalize_deplibs="$deplib $finalize_deplibs"
else
func_append compiler_flags " $deplib"
if test lib = "$linkmode"; then
case "$new_inherited_linker_flags " in
*" $deplib "*) ;;
* ) func_append new_inherited_linker_flags " $deplib" ;;
esac
fi
fi
continue
;;
-l*)
if test lib != "$linkmode" && test prog != "$linkmode"; then
func_warning "'-l' is ignored for archives/objects"
continue
fi
func_stripname '-l' '' "$deplib"
name=$func_stripname_result
if test lib = "$linkmode"; then
searchdirs="$newlib_search_path $lib_search_path $compiler_lib_search_dirs $sys_lib_search_path $shlib_search_path"
else
searchdirs="$newlib_search_path $lib_search_path $sys_lib_search_path $shlib_search_path"
fi
for searchdir in $searchdirs; do
for search_ext in .la $std_shrext .so .a; do
# Search the libtool library
lib=$searchdir/lib$name$search_ext
if test -f "$lib"; then
if test .la = "$search_ext"; then
found=:
else
found=false
fi
break 2
fi
done
done
if $found; then
# deplib is a libtool library
# If $allow_libtool_libs_with_static_runtimes && $deplib is a stdlib,
# We need to do some special things here, and not later.
if test yes = "$allow_libtool_libs_with_static_runtimes"; then
case " $predeps $postdeps " in
*" $deplib "*)
if func_lalib_p "$lib"; then
library_names=
old_library=
func_source "$lib"
for l in $old_library $library_names; do
ll=$l
done
if test "X$ll" = "X$old_library"; then # only static version available
found=false
func_dirname "$lib" "" "."
ladir=$func_dirname_result
lib=$ladir/$old_library
if test prog,link = "$linkmode,$pass"; then
compile_deplibs="$deplib $compile_deplibs"
finalize_deplibs="$deplib $finalize_deplibs"
else
deplibs="$deplib $deplibs"
test lib = "$linkmode" && newdependency_libs="$deplib $newdependency_libs"
fi
continue
fi
fi
;;
*) ;;
esac
fi
else
# deplib doesn't seem to be a libtool library
if test prog,link = "$linkmode,$pass"; then
compile_deplibs="$deplib $compile_deplibs"
finalize_deplibs="$deplib $finalize_deplibs"
else
deplibs="$deplib $deplibs"
test lib = "$linkmode" && newdependency_libs="$deplib $newdependency_libs"
fi
continue
fi
;; # -l
*.ltframework)
if test prog,link = "$linkmode,$pass"; then
compile_deplibs="$deplib $compile_deplibs"
finalize_deplibs="$deplib $finalize_deplibs"
else
deplibs="$deplib $deplibs"
if test lib = "$linkmode"; then
case "$new_inherited_linker_flags " in
*" $deplib "*) ;;
* ) func_append new_inherited_linker_flags " $deplib" ;;
esac
fi
fi
continue
;;
-L*)
case $linkmode in
lib)
deplibs="$deplib $deplibs"
test conv = "$pass" && continue
newdependency_libs="$deplib $newdependency_libs"
func_stripname '-L' '' "$deplib"
func_resolve_sysroot "$func_stripname_result"
func_append newlib_search_path " $func_resolve_sysroot_result"
;;
prog)
if test conv = "$pass"; then
deplibs="$deplib $deplibs"
continue
fi
if test scan = "$pass"; then
deplibs="$deplib $deplibs"
else
compile_deplibs="$deplib $compile_deplibs"
finalize_deplibs="$deplib $finalize_deplibs"
fi
func_stripname '-L' '' "$deplib"
func_resolve_sysroot "$func_stripname_result"
func_append newlib_search_path " $func_resolve_sysroot_result"
;;
*)
func_warning "'-L' is ignored for archives/objects"
;;
esac # linkmode
continue
;; # -L
-R*)
if test link = "$pass"; then
func_stripname '-R' '' "$deplib"
func_resolve_sysroot "$func_stripname_result"
dir=$func_resolve_sysroot_result
# Make sure the xrpath contains only unique directories.
case "$xrpath " in
*" $dir "*) ;;
*) func_append xrpath " $dir" ;;
esac
fi
deplibs="$deplib $deplibs"
continue
;;
*.la)
func_resolve_sysroot "$deplib"
lib=$func_resolve_sysroot_result
;;
*.$libext)
if test conv = "$pass"; then
deplibs="$deplib $deplibs"
continue
fi
case $linkmode in
lib)
# Linking convenience modules into shared libraries is allowed,
# but linking other static libraries is non-portable.
case " $dlpreconveniencelibs " in
*" $deplib "*) ;;
*)
valid_a_lib=false
case $deplibs_check_method in
match_pattern*)
set dummy $deplibs_check_method; shift
match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
if eval "\$ECHO \"$deplib\"" 2>/dev/null | $SED 10q \
| $EGREP "$match_pattern_regex" > /dev/null; then
valid_a_lib=:
fi
;;
pass_all)
valid_a_lib=:
;;
esac
if $valid_a_lib; then
echo
$ECHO "*** Warning: Linking the shared library $output against the"
$ECHO "*** static library $deplib is not portable!"
deplibs="$deplib $deplibs"
else
echo
$ECHO "*** Warning: Trying to link with static lib archive $deplib."
echo "*** I have the capability to make that library automatically link in when"
echo "*** you link to this library. But I can only do this if you have a"
echo "*** shared version of the library, which you do not appear to have"
echo "*** because the file extensions .$libext of this argument makes me believe"
echo "*** that it is just a static archive that I should not use here."
fi
;;
esac
continue
;;
prog)
if test link != "$pass"; then
deplibs="$deplib $deplibs"
else
compile_deplibs="$deplib $compile_deplibs"
finalize_deplibs="$deplib $finalize_deplibs"
fi
continue
;;
esac # linkmode
;; # *.$libext
*.lo | *.$objext)
if test conv = "$pass"; then
deplibs="$deplib $deplibs"
elif test prog = "$linkmode"; then
if test dlpreopen = "$pass" || test yes != "$dlopen_support" || test no = "$build_libtool_libs"; then
# If there is no dlopen support or we're linking statically,
# we need to preload.
func_append newdlprefiles " $deplib"
compile_deplibs="$deplib $compile_deplibs"
finalize_deplibs="$deplib $finalize_deplibs"
else
func_append newdlfiles " $deplib"
fi
fi
continue
;;
%DEPLIBS%)
alldeplibs=:
continue
;;
esac # case $deplib
$found || test -f "$lib" \
|| func_fatal_error "cannot find the library '$lib' or unhandled argument '$deplib'"
# Check to see that this really is a libtool archive.
func_lalib_unsafe_p "$lib" \
|| func_fatal_error "'$lib' is not a valid libtool archive"
func_dirname "$lib" "" "."
ladir=$func_dirname_result
dlname=
dlopen=
dlpreopen=
libdir=
library_names=
old_library=
inherited_linker_flags=
# If the library was installed with an old release of libtool,
# it will not redefine variables installed, or shouldnotlink
installed=yes
shouldnotlink=no
avoidtemprpath=
# Read the .la file
func_source "$lib"
# Convert "-framework foo" to "foo.ltframework"
if test -n "$inherited_linker_flags"; then
tmp_inherited_linker_flags=`$ECHO "$inherited_linker_flags" | $SED 's/-framework \([^ $]*\)/\1.ltframework/g'`
for tmp_inherited_linker_flag in $tmp_inherited_linker_flags; do
case " $new_inherited_linker_flags " in
*" $tmp_inherited_linker_flag "*) ;;
*) func_append new_inherited_linker_flags " $tmp_inherited_linker_flag";;
esac
done
fi
dependency_libs=`$ECHO " $dependency_libs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
if test lib,link = "$linkmode,$pass" ||
test prog,scan = "$linkmode,$pass" ||
{ test prog != "$linkmode" && test lib != "$linkmode"; }; then
test -n "$dlopen" && func_append dlfiles " $dlopen"
test -n "$dlpreopen" && func_append dlprefiles " $dlpreopen"
fi
if test conv = "$pass"; then
# Only check for convenience libraries
deplibs="$lib $deplibs"
if test -z "$libdir"; then
if test -z "$old_library"; then
func_fatal_error "cannot find name of link library for '$lib'"
fi
# It is a libtool convenience library, so add in its objects.
func_append convenience " $ladir/$objdir/$old_library"
func_append old_convenience " $ladir/$objdir/$old_library"
elif test prog != "$linkmode" && test lib != "$linkmode"; then
func_fatal_error "'$lib' is not a convenience library"
fi
tmp_libs=
for deplib in $dependency_libs; do
deplibs="$deplib $deplibs"
if $opt_preserve_dup_deps; then
case "$tmp_libs " in
*" $deplib "*) func_append specialdeplibs " $deplib" ;;
esac
fi
func_append tmp_libs " $deplib"
done
continue
fi # $pass = conv
# Get the name of the library we link against.
linklib=
if test -n "$old_library" &&
{ test yes = "$prefer_static_libs" ||
test built,no = "$prefer_static_libs,$installed"; }; then
linklib=$old_library
else
for l in $old_library $library_names; do
linklib=$l
done
fi
if test -z "$linklib"; then
func_fatal_error "cannot find name of link library for '$lib'"
fi
# This library was specified with -dlopen.
if test dlopen = "$pass"; then
test -z "$libdir" \
&& func_fatal_error "cannot -dlopen a convenience library: '$lib'"
if test -z "$dlname" ||
test yes != "$dlopen_support" ||
test no = "$build_libtool_libs"
then
# If there is no dlname, no dlopen support or we're linking
# statically, we need to preload. We also need to preload any
# dependent libraries so libltdl's deplib preloader doesn't
# bomb out in the load deplibs phase.
func_append dlprefiles " $lib $dependency_libs"
else
func_append newdlfiles " $lib"
fi
continue
fi # $pass = dlopen
# We need an absolute path.
case $ladir in
[\\/]* | [A-Za-z]:[\\/]*) abs_ladir=$ladir ;;
*)
abs_ladir=`cd "$ladir" && pwd`
if test -z "$abs_ladir"; then
func_warning "cannot determine absolute directory name of '$ladir'"
func_warning "passing it literally to the linker, although it might fail"
abs_ladir=$ladir
fi
;;
esac
func_basename "$lib"
laname=$func_basename_result
# Find the relevant object directory and library name.
if test yes = "$installed"; then
if test ! -f "$lt_sysroot$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then
func_warning "library '$lib' was moved."
dir=$ladir
absdir=$abs_ladir
libdir=$abs_ladir
else
dir=$lt_sysroot$libdir
absdir=$lt_sysroot$libdir
fi
test yes = "$hardcode_automatic" && avoidtemprpath=yes
else
if test ! -f "$ladir/$objdir/$linklib" && test -f "$abs_ladir/$linklib"; then
dir=$ladir
absdir=$abs_ladir
# Remove this search path later
func_append notinst_path " $abs_ladir"
else
dir=$ladir/$objdir
absdir=$abs_ladir/$objdir
# Remove this search path later
func_append notinst_path " $abs_ladir"
fi
fi # $installed = yes
func_stripname 'lib' '.la' "$laname"
name=$func_stripname_result
# This library was specified with -dlpreopen.
if test dlpreopen = "$pass"; then
if test -z "$libdir" && test prog = "$linkmode"; then
func_fatal_error "only libraries may -dlpreopen a convenience library: '$lib'"
fi
case $host in
# special handling for platforms with PE-DLLs.
*cygwin* | *mingw* | *cegcc* )
# Linker will automatically link against shared library if both
# static and shared are present. Therefore, ensure we extract
# symbols from the import library if a shared library is present
# (otherwise, the dlopen module name will be incorrect). We do
# this by putting the import library name into $newdlprefiles.
# We recover the dlopen module name by 'saving' the la file
# name in a special purpose variable, and (later) extracting the
# dlname from the la file.
if test -n "$dlname"; then
func_tr_sh "$dir/$linklib"
eval "libfile_$func_tr_sh_result=\$abs_ladir/\$laname"
func_append newdlprefiles " $dir/$linklib"
else
func_append newdlprefiles " $dir/$old_library"
# Keep a list of preopened convenience libraries to check
# that they are being used correctly in the link pass.
test -z "$libdir" && \
func_append dlpreconveniencelibs " $dir/$old_library"
fi
;;
* )
# Prefer using a static library (so that no silly _DYNAMIC symbols
# are required to link).
if test -n "$old_library"; then
func_append newdlprefiles " $dir/$old_library"
# Keep a list of preopened convenience libraries to check
# that they are being used correctly in the link pass.
test -z "$libdir" && \
func_append dlpreconveniencelibs " $dir/$old_library"
# Otherwise, use the dlname, so that lt_dlopen finds it.
elif test -n "$dlname"; then
func_append newdlprefiles " $dir/$dlname"
else
func_append newdlprefiles " $dir/$linklib"
fi
;;
esac
fi # $pass = dlpreopen
if test -z "$libdir"; then
# Link the convenience library
if test lib = "$linkmode"; then
deplibs="$dir/$old_library $deplibs"
elif test prog,link = "$linkmode,$pass"; then
compile_deplibs="$dir/$old_library $compile_deplibs"
finalize_deplibs="$dir/$old_library $finalize_deplibs"
else
deplibs="$lib $deplibs" # used for prog,scan pass
fi
continue
fi
if test prog = "$linkmode" && test link != "$pass"; then
func_append newlib_search_path " $ladir"
deplibs="$lib $deplibs"
linkalldeplibs=false
if test no != "$link_all_deplibs" || test -z "$library_names" ||
test no = "$build_libtool_libs"; then
linkalldeplibs=:
fi
tmp_libs=
for deplib in $dependency_libs; do
case $deplib in
-L*) func_stripname '-L' '' "$deplib"
func_resolve_sysroot "$func_stripname_result"
func_append newlib_search_path " $func_resolve_sysroot_result"
;;
esac
# Need to link against all dependency_libs?
if $linkalldeplibs; then
deplibs="$deplib $deplibs"
else
# Need to hardcode shared library paths
# or/and link against static libraries
newdependency_libs="$deplib $newdependency_libs"
fi
if $opt_preserve_dup_deps; then
case "$tmp_libs " in
*" $deplib "*) func_append specialdeplibs " $deplib" ;;
esac
fi
func_append tmp_libs " $deplib"
done # for deplib
continue
fi # $linkmode = prog...
if test prog,link = "$linkmode,$pass"; then
if test -n "$library_names" &&
{ { test no = "$prefer_static_libs" ||
test built,yes = "$prefer_static_libs,$installed"; } ||
test -z "$old_library"; }; then
# We need to hardcode the library path
if test -n "$shlibpath_var" && test -z "$avoidtemprpath"; then
# Make sure the rpath contains only unique directories.
case $temp_rpath: in
*"$absdir:"*) ;;
*) func_append temp_rpath "$absdir:" ;;
esac
fi
# Hardcode the library path.
# Skip directories that are in the system default run-time
# search path.
case " $sys_lib_dlsearch_path " in
*" $absdir "*) ;;
*)
case "$compile_rpath " in
*" $absdir "*) ;;
*) func_append compile_rpath " $absdir" ;;
esac
;;
esac
case " $sys_lib_dlsearch_path " in
*" $libdir "*) ;;
*)
case "$finalize_rpath " in
*" $libdir "*) ;;
*) func_append finalize_rpath " $libdir" ;;
esac
;;
esac
fi # $linkmode,$pass = prog,link...
if $alldeplibs &&
{ test pass_all = "$deplibs_check_method" ||
{ test yes = "$build_libtool_libs" &&
test -n "$library_names"; }; }; then
# We only need to search for static libraries
continue
fi
fi
link_static=no # Whether the deplib will be linked statically
use_static_libs=$prefer_static_libs
if test built = "$use_static_libs" && test yes = "$installed"; then
use_static_libs=no
fi
if test -n "$library_names" &&
{ test no = "$use_static_libs" || test -z "$old_library"; }; then
case $host in
*cygwin* | *mingw* | *cegcc*)
# No point in relinking DLLs because paths are not encoded
func_append notinst_deplibs " $lib"
need_relink=no
;;
*)
if test no = "$installed"; then
func_append notinst_deplibs " $lib"
need_relink=yes
fi
;;
esac
# This is a shared library
# Warn about portability, can't link against -module's on some
# systems (darwin). Don't bleat about dlopened modules though!
dlopenmodule=
for dlpremoduletest in $dlprefiles; do
if test "X$dlpremoduletest" = "X$lib"; then
dlopenmodule=$dlpremoduletest
break
fi
done
if test -z "$dlopenmodule" && test yes = "$shouldnotlink" && test link = "$pass"; then
echo
if test prog = "$linkmode"; then
$ECHO "*** Warning: Linking the executable $output against the loadable module"
else
$ECHO "*** Warning: Linking the shared library $output against the loadable module"
fi
$ECHO "*** $linklib is not portable!"
fi
if test lib = "$linkmode" &&
test yes = "$hardcode_into_libs"; then
# Hardcode the library path.
# Skip directories that are in the system default run-time
# search path.
case " $sys_lib_dlsearch_path " in
*" $absdir "*) ;;
*)
case "$compile_rpath " in
*" $absdir "*) ;;
*) func_append compile_rpath " $absdir" ;;
esac
;;
esac
case " $sys_lib_dlsearch_path " in
*" $libdir "*) ;;
*)
case "$finalize_rpath " in
*" $libdir "*) ;;
*) func_append finalize_rpath " $libdir" ;;
esac
;;
esac
fi
if test -n "$old_archive_from_expsyms_cmds"; then
# figure out the soname
set dummy $library_names
shift
realname=$1
shift
libname=`eval "\\$ECHO \"$libname_spec\""`
# use dlname if we got it. it's perfectly good, no?
if test -n "$dlname"; then
soname=$dlname
elif test -n "$soname_spec"; then
# bleh windows
case $host in
*cygwin* | mingw* | *cegcc*)
func_arith $current - $age
major=$func_arith_result
versuffix=-$major
;;
esac
eval soname=\"$soname_spec\"
else
soname=$realname
fi
# Make a new name for the extract_expsyms_cmds to use
soroot=$soname
func_basename "$soroot"
soname=$func_basename_result
func_stripname 'lib' '.dll' "$soname"
newlib=libimp-$func_stripname_result.a
# If the library has no export list, then create one now
if test -f "$output_objdir/$soname-def"; then :
else
func_verbose "extracting exported symbol list from '$soname'"
func_execute_cmds "$extract_expsyms_cmds" 'exit $?'
fi
# Create $newlib
if test -f "$output_objdir/$newlib"; then :; else
func_verbose "generating import library for '$soname'"
func_execute_cmds "$old_archive_from_expsyms_cmds" 'exit $?'
fi
# make sure the library variables are pointing to the new library
dir=$output_objdir
linklib=$newlib
fi # test -n "$old_archive_from_expsyms_cmds"
if test prog = "$linkmode" || test relink != "$opt_mode"; then
add_shlibpath=
add_dir=
add=
lib_linked=yes
case $hardcode_action in
immediate | unsupported)
if test no = "$hardcode_direct"; then
add=$dir/$linklib
case $host in
*-*-sco3.2v5.0.[024]*) add_dir=-L$dir ;;
*-*-sysv4*uw2*) add_dir=-L$dir ;;
*-*-sysv5OpenUNIX* | *-*-sysv5UnixWare7.[01].[10]* | \
*-*-unixware7*) add_dir=-L$dir ;;
*-*-darwin* )
# if the lib is a (non-dlopened) module then we cannot
# link against it, someone is ignoring the earlier warnings
if /usr/bin/file -L $add 2> /dev/null |
$GREP ": [^:]* bundle" >/dev/null; then
if test "X$dlopenmodule" != "X$lib"; then
$ECHO "*** Warning: lib $linklib is a module, not a shared library"
if test -z "$old_library"; then
echo
echo "*** And there doesn't seem to be a static archive available"
echo "*** The link will probably fail, sorry"
else
add=$dir/$old_library
fi
elif test -n "$old_library"; then
add=$dir/$old_library
fi
fi
esac
elif test no = "$hardcode_minus_L"; then
case $host in
*-*-sunos*) add_shlibpath=$dir ;;
esac
add_dir=-L$dir
add=-l$name
elif test no = "$hardcode_shlibpath_var"; then
add_shlibpath=$dir
add=-l$name
else
lib_linked=no
fi
;;
relink)
if test yes = "$hardcode_direct" &&
test no = "$hardcode_direct_absolute"; then
add=$dir/$linklib
elif test yes = "$hardcode_minus_L"; then
add_dir=-L$absdir
# Try looking first in the location we're being installed to.
if test -n "$inst_prefix_dir"; then
case $libdir in
[\\/]*)
func_append add_dir " -L$inst_prefix_dir$libdir"
;;
esac
fi
add=-l$name
elif test yes = "$hardcode_shlibpath_var"; then
add_shlibpath=$dir
add=-l$name
else
lib_linked=no
fi
;;
*) lib_linked=no ;;
esac
if test yes != "$lib_linked"; then
func_fatal_configuration "unsupported hardcode properties"
fi
if test -n "$add_shlibpath"; then
case :$compile_shlibpath: in
*":$add_shlibpath:"*) ;;
*) func_append compile_shlibpath "$add_shlibpath:" ;;
esac
fi
if test prog = "$linkmode"; then
test -n "$add_dir" && compile_deplibs="$add_dir $compile_deplibs"
test -n "$add" && compile_deplibs="$add $compile_deplibs"
else
test -n "$add_dir" && deplibs="$add_dir $deplibs"
test -n "$add" && deplibs="$add $deplibs"
if test yes != "$hardcode_direct" &&
test yes != "$hardcode_minus_L" &&
test yes = "$hardcode_shlibpath_var"; then
case :$finalize_shlibpath: in
*":$libdir:"*) ;;
*) func_append finalize_shlibpath "$libdir:" ;;
esac
fi
fi
fi
if test prog = "$linkmode" || test relink = "$opt_mode"; then
add_shlibpath=
add_dir=
add=
# Finalize command for both is simple: just hardcode it.
if test yes = "$hardcode_direct" &&
test no = "$hardcode_direct_absolute"; then
add=$libdir/$linklib
elif test yes = "$hardcode_minus_L"; then
add_dir=-L$libdir
add=-l$name
elif test yes = "$hardcode_shlibpath_var"; then
case :$finalize_shlibpath: in
*":$libdir:"*) ;;
*) func_append finalize_shlibpath "$libdir:" ;;
esac
add=-l$name
elif test yes = "$hardcode_automatic"; then
if test -n "$inst_prefix_dir" &&
test -f "$inst_prefix_dir$libdir/$linklib"; then
add=$inst_prefix_dir$libdir/$linklib
else
add=$libdir/$linklib
fi
else
# We cannot seem to hardcode it, guess we'll fake it.
add_dir=-L$libdir
# Try looking first in the location we're being installed to.
if test -n "$inst_prefix_dir"; then
case $libdir in
[\\/]*)
func_append add_dir " -L$inst_prefix_dir$libdir"
;;
esac
fi
add=-l$name
fi
if test prog = "$linkmode"; then
test -n "$add_dir" && finalize_deplibs="$add_dir $finalize_deplibs"
test -n "$add" && finalize_deplibs="$add $finalize_deplibs"
else
test -n "$add_dir" && deplibs="$add_dir $deplibs"
test -n "$add" && deplibs="$add $deplibs"
fi
fi
elif test prog = "$linkmode"; then
# Here we assume that one of hardcode_direct or hardcode_minus_L
# is not unsupported. This is valid on all known static and
# shared platforms.
if test unsupported != "$hardcode_direct"; then
test -n "$old_library" && linklib=$old_library
compile_deplibs="$dir/$linklib $compile_deplibs"
finalize_deplibs="$dir/$linklib $finalize_deplibs"
else
compile_deplibs="-l$name -L$dir $compile_deplibs"
finalize_deplibs="-l$name -L$dir $finalize_deplibs"
fi
elif test yes = "$build_libtool_libs"; then
# Not a shared library
if test pass_all != "$deplibs_check_method"; then
# We're trying link a shared library against a static one
# but the system doesn't support it.
# Just print a warning and add the library to dependency_libs so
# that the program can be linked against the static library.
echo
$ECHO "*** Warning: This system cannot link to static lib archive $lib."
echo "*** I have the capability to make that library automatically link in when"
echo "*** you link to this library. But I can only do this if you have a"
echo "*** shared version of the library, which you do not appear to have."
if test yes = "$module"; then
echo "*** But as you try to build a module library, libtool will still create "
echo "*** a static module, that should work as long as the dlopening application"
echo "*** is linked with the -dlopen flag to resolve symbols at runtime."
if test -z "$global_symbol_pipe"; then
echo
echo "*** However, this would only work if libtool was able to extract symbol"
echo "*** lists from a program, using 'nm' or equivalent, but libtool could"
echo "*** not find such a program. So, this module is probably useless."
echo "*** 'nm' from GNU binutils and a full rebuild may help."
fi
if test no = "$build_old_libs"; then
build_libtool_libs=module
build_old_libs=yes
else
build_libtool_libs=no
fi
fi
else
deplibs="$dir/$old_library $deplibs"
link_static=yes
fi
fi # link shared/static library?
if test lib = "$linkmode"; then
if test -n "$dependency_libs" &&
{ test yes != "$hardcode_into_libs" ||
test yes = "$build_old_libs" ||
test yes = "$link_static"; }; then
# Extract -R from dependency_libs
temp_deplibs=
for libdir in $dependency_libs; do
case $libdir in
-R*) func_stripname '-R' '' "$libdir"
temp_xrpath=$func_stripname_result
case " $xrpath " in
*" $temp_xrpath "*) ;;
*) func_append xrpath " $temp_xrpath";;
esac;;
*) func_append temp_deplibs " $libdir";;
esac
done
dependency_libs=$temp_deplibs
fi
func_append newlib_search_path " $absdir"
# Link against this library
test no = "$link_static" && newdependency_libs="$abs_ladir/$laname $newdependency_libs"
# ... and its dependency_libs
tmp_libs=
for deplib in $dependency_libs; do
newdependency_libs="$deplib $newdependency_libs"
case $deplib in
-L*) func_stripname '-L' '' "$deplib"
func_resolve_sysroot "$func_stripname_result";;
*) func_resolve_sysroot "$deplib" ;;
esac
if $opt_preserve_dup_deps; then
case "$tmp_libs " in
*" $func_resolve_sysroot_result "*)
func_append specialdeplibs " $func_resolve_sysroot_result" ;;
esac
fi
func_append tmp_libs " $func_resolve_sysroot_result"
done
if test no != "$link_all_deplibs"; then
# Add the search paths of all dependency libraries
for deplib in $dependency_libs; do
path=
case $deplib in
-L*) path=$deplib ;;
*.la)
func_resolve_sysroot "$deplib"
deplib=$func_resolve_sysroot_result
func_dirname "$deplib" "" "."
dir=$func_dirname_result
# We need an absolute path.
case $dir in
[\\/]* | [A-Za-z]:[\\/]*) absdir=$dir ;;
*)
absdir=`cd "$dir" && pwd`
if test -z "$absdir"; then
func_warning "cannot determine absolute directory name of '$dir'"
absdir=$dir
fi
;;
esac
if $GREP "^installed=no" $deplib > /dev/null; then
case $host in
*-*-darwin*)
depdepl=
eval deplibrary_names=`$SED -n -e 's/^library_names=\(.*\)$/\1/p' $deplib`
if test -n "$deplibrary_names"; then
for tmp in $deplibrary_names; do
depdepl=$tmp
done
if test -f "$absdir/$objdir/$depdepl"; then
depdepl=$absdir/$objdir/$depdepl
darwin_install_name=`$OTOOL -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'`
if test -z "$darwin_install_name"; then
darwin_install_name=`$OTOOL64 -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'`
fi
func_append compiler_flags " $wl-dylib_file $wl$darwin_install_name:$depdepl"
func_append linker_flags " -dylib_file $darwin_install_name:$depdepl"
path=
fi
fi
;;
*)
path=-L$absdir/$objdir
;;
esac
else
eval libdir=`$SED -n -e 's/^libdir=\(.*\)$/\1/p' $deplib`
test -z "$libdir" && \
func_fatal_error "'$deplib' is not a valid libtool archive"
test "$absdir" != "$libdir" && \
func_warning "'$deplib' seems to be moved"
path=-L$absdir
fi
;;
esac
case " $deplibs " in
*" $path "*) ;;
*) deplibs="$path $deplibs" ;;
esac
done
fi # link_all_deplibs != no
fi # linkmode = lib
done # for deplib in $libs
if test link = "$pass"; then
if test prog = "$linkmode"; then
compile_deplibs="$new_inherited_linker_flags $compile_deplibs"
finalize_deplibs="$new_inherited_linker_flags $finalize_deplibs"
else
compiler_flags="$compiler_flags "`$ECHO " $new_inherited_linker_flags" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
fi
fi
dependency_libs=$newdependency_libs
if test dlpreopen = "$pass"; then
# Link the dlpreopened libraries before other libraries
for deplib in $save_deplibs; do
deplibs="$deplib $deplibs"
done
fi
if test dlopen != "$pass"; then
test conv = "$pass" || {
# Make sure lib_search_path contains only unique directories.
lib_search_path=
for dir in $newlib_search_path; do
case "$lib_search_path " in
*" $dir "*) ;;
*) func_append lib_search_path " $dir" ;;
esac
done
newlib_search_path=
}
if test prog,link = "$linkmode,$pass"; then
vars="compile_deplibs finalize_deplibs"
else
vars=deplibs
fi
for var in $vars dependency_libs; do
# Add libraries to $var in reverse order
eval tmp_libs=\"\$$var\"
new_libs=
for deplib in $tmp_libs; do
# FIXME: Pedantically, this is the right thing to do, so
# that some nasty dependency loop isn't accidentally
# broken:
#new_libs="$deplib $new_libs"
# Pragmatically, this seems to cause very few problems in
# practice:
case $deplib in
-L*) new_libs="$deplib $new_libs" ;;
-R*) ;;
*)
# And here is the reason: when a library appears more
# than once as an explicit dependence of a library, or
# is implicitly linked in more than once by the
# compiler, it is considered special, and multiple
# occurrences thereof are not removed. Compare this
# with having the same library being listed as a
# dependency of multiple other libraries: in this case,
# we know (pedantically, we assume) the library does not
# need to be listed more than once, so we keep only the
# last copy. This is not always right, but it is rare
# enough that we require users that really mean to play
# such unportable linking tricks to link the library
# using -Wl,-lname, so that libtool does not consider it
# for duplicate removal.
case " $specialdeplibs " in
*" $deplib "*) new_libs="$deplib $new_libs" ;;
*)
case " $new_libs " in
*" $deplib "*) ;;
*) new_libs="$deplib $new_libs" ;;
esac
;;
esac
;;
esac
done
tmp_libs=
for deplib in $new_libs; do
case $deplib in
-L*)
case " $tmp_libs " in
*" $deplib "*) ;;
*) func_append tmp_libs " $deplib" ;;
esac
;;
*) func_append tmp_libs " $deplib" ;;
esac
done
eval $var=\"$tmp_libs\"
done # for var
fi
# Last step: remove runtime libs from dependency_libs
# (they stay in deplibs)
tmp_libs=
for i in $dependency_libs; do
case " $predeps $postdeps $compiler_lib_search_path " in
*" $i "*)
i=
;;
esac
if test -n "$i"; then
func_append tmp_libs " $i"
fi
done
dependency_libs=$tmp_libs
done # for pass
if test prog = "$linkmode"; then
dlfiles=$newdlfiles
fi
if test prog = "$linkmode" || test lib = "$linkmode"; then
dlprefiles=$newdlprefiles
fi
case $linkmode in
oldlib)
if test -n "$dlfiles$dlprefiles" || test no != "$dlself"; then
func_warning "'-dlopen' is ignored for archives"
fi
case " $deplibs" in
*\ -l* | *\ -L*)
func_warning "'-l' and '-L' are ignored for archives" ;;
esac
test -n "$rpath" && \
func_warning "'-rpath' is ignored for archives"
test -n "$xrpath" && \
func_warning "'-R' is ignored for archives"
test -n "$vinfo" && \
func_warning "'-version-info/-version-number' is ignored for archives"
test -n "$release" && \
func_warning "'-release' is ignored for archives"
test -n "$export_symbols$export_symbols_regex" && \
func_warning "'-export-symbols' is ignored for archives"
# Now set the variables for building old libraries.
build_libtool_libs=no
oldlibs=$output
func_append objs "$old_deplibs"
;;
lib)
# Make sure we only generate libraries of the form 'libNAME.la'.
case $outputname in
lib*)
func_stripname 'lib' '.la' "$outputname"
name=$func_stripname_result
eval shared_ext=\"$shrext_cmds\"
eval libname=\"$libname_spec\"
;;
*)
test no = "$module" \
&& func_fatal_help "libtool library '$output' must begin with 'lib'"
if test no != "$need_lib_prefix"; then
# Add the "lib" prefix for modules if required
func_stripname '' '.la' "$outputname"
name=$func_stripname_result
eval shared_ext=\"$shrext_cmds\"
eval libname=\"$libname_spec\"
else
func_stripname '' '.la' "$outputname"
libname=$func_stripname_result
fi
;;
esac
if test -n "$objs"; then
if test pass_all != "$deplibs_check_method"; then
func_fatal_error "cannot build libtool library '$output' from non-libtool objects on this host:$objs"
else
echo
$ECHO "*** Warning: Linking the shared library $output against the non-libtool"
$ECHO "*** objects $objs is not portable!"
func_append libobjs " $objs"
fi
fi
test no = "$dlself" \
|| func_warning "'-dlopen self' is ignored for libtool libraries"
set dummy $rpath
shift
test 1 -lt "$#" \
&& func_warning "ignoring multiple '-rpath's for a libtool library"
install_libdir=$1
oldlibs=
if test -z "$rpath"; then
if test yes = "$build_libtool_libs"; then
# Building a libtool convenience library.
# Some compilers have problems with a '.al' extension so
# convenience libraries should have the same extension an
# archive normally would.
oldlibs="$output_objdir/$libname.$libext $oldlibs"
build_libtool_libs=convenience
build_old_libs=yes
fi
test -n "$vinfo" && \
func_warning "'-version-info/-version-number' is ignored for convenience libraries"
test -n "$release" && \
func_warning "'-release' is ignored for convenience libraries"
else
# Parse the version information argument.
save_ifs=$IFS; IFS=:
set dummy $vinfo 0 0 0
shift
IFS=$save_ifs
test -n "$7" && \
func_fatal_help "too many parameters to '-version-info'"
# convert absolute version numbers to libtool ages
# this retains compatibility with .la files and attempts
# to make the code below a bit more comprehensible
case $vinfo_number in
yes)
number_major=$1
number_minor=$2
number_revision=$3
#
# There are really only two kinds -- those that
# use the current revision as the major version
# and those that subtract age and use age as
# a minor version. But, then there is irix
# that has an extra 1 added just for fun
#
case $version_type in
# correct linux to gnu/linux during the next big refactor
darwin|linux|osf|windows|none)
func_arith $number_major + $number_minor
current=$func_arith_result
age=$number_minor
revision=$number_revision
;;
freebsd-aout|freebsd-elf|qnx|sunos)
current=$number_major
revision=$number_minor
age=0
;;
irix|nonstopux)
func_arith $number_major + $number_minor
current=$func_arith_result
age=$number_minor
revision=$number_minor
lt_irix_increment=no
;;
esac
;;
no)
current=$1
revision=$2
age=$3
;;
esac
# Check that each of the things are valid numbers.
case $current in
0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
*)
func_error "CURRENT '$current' must be a nonnegative integer"
func_fatal_error "'$vinfo' is not valid version information"
;;
esac
case $revision in
0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
*)
func_error "REVISION '$revision' must be a nonnegative integer"
func_fatal_error "'$vinfo' is not valid version information"
;;
esac
case $age in
0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
*)
func_error "AGE '$age' must be a nonnegative integer"
func_fatal_error "'$vinfo' is not valid version information"
;;
esac
if test "$age" -gt "$current"; then
func_error "AGE '$age' is greater than the current interface number '$current'"
func_fatal_error "'$vinfo' is not valid version information"
fi
# Calculate the version variables.
major=
versuffix=
verstring=
case $version_type in
none) ;;
darwin)
# Like Linux, but with the current version available in
# verstring for coding it into the library header
func_arith $current - $age
major=.$func_arith_result
versuffix=$major.$age.$revision
# Darwin ld doesn't like 0 for these options...
func_arith $current + 1
minor_current=$func_arith_result
xlcverstring="$wl-compatibility_version $wl$minor_current $wl-current_version $wl$minor_current.$revision"
verstring="-compatibility_version $minor_current -current_version $minor_current.$revision"
# On Darwin other compilers
case $CC in
nagfor*)
verstring="$wl-compatibility_version $wl$minor_current $wl-current_version $wl$minor_current.$revision"
;;
*)
verstring="-compatibility_version $minor_current -current_version $minor_current.$revision"
;;
esac
;;
freebsd-aout)
major=.$current
versuffix=.$current.$revision
;;
freebsd-elf)
major=.$current
versuffix=.$current
;;
irix | nonstopux)
if test no = "$lt_irix_increment"; then
func_arith $current - $age
else
func_arith $current - $age + 1
fi
major=$func_arith_result
case $version_type in
nonstopux) verstring_prefix=nonstopux ;;
*) verstring_prefix=sgi ;;
esac
verstring=$verstring_prefix$major.$revision
# Add in all the interfaces that we are compatible with.
loop=$revision
while test 0 -ne "$loop"; do
func_arith $revision - $loop
iface=$func_arith_result
func_arith $loop - 1
loop=$func_arith_result
verstring=$verstring_prefix$major.$iface:$verstring
done
# Before this point, $major must not contain '.'.
major=.$major
versuffix=$major.$revision
;;
linux) # correct to gnu/linux during the next big refactor
func_arith $current - $age
major=.$func_arith_result
versuffix=$major.$age.$revision
;;
osf)
func_arith $current - $age
major=.$func_arith_result
versuffix=.$current.$age.$revision
verstring=$current.$age.$revision
# Add in all the interfaces that we are compatible with.
loop=$age
while test 0 -ne "$loop"; do
func_arith $current - $loop
iface=$func_arith_result
func_arith $loop - 1
loop=$func_arith_result
verstring=$verstring:$iface.0
done
# Make executables depend on our current version.
func_append verstring ":$current.0"
;;
qnx)
major=.$current
versuffix=.$current
;;
sunos)
major=.$current
versuffix=.$current.$revision
;;
windows)
# Use '-' rather than '.', since we only want one
# extension on DOS 8.3 file systems.
func_arith $current - $age
major=$func_arith_result
versuffix=-$major
;;
*)
func_fatal_configuration "unknown library version type '$version_type'"
;;
esac
# Clear the version info if we defaulted, and they specified a release.
if test -z "$vinfo" && test -n "$release"; then
major=
case $version_type in
darwin)
# we can't check for "0.0" in archive_cmds due to quoting
# problems, so we reset it completely
verstring=
;;
*)
verstring=0.0
;;
esac
if test no = "$need_version"; then
versuffix=
else
versuffix=.0.0
fi
fi
# Remove version info from name if versioning should be avoided
if test yes,no = "$avoid_version,$need_version"; then
major=
versuffix=
verstring=
fi
# Check to see if the archive will have undefined symbols.
if test yes = "$allow_undefined"; then
if test unsupported = "$allow_undefined_flag"; then
if test yes = "$build_old_libs"; then
func_warning "undefined symbols not allowed in $host shared libraries; building static only"
build_libtool_libs=no
else
func_fatal_error "can't build $host shared library unless -no-undefined is specified"
fi
fi
else
# Don't allow undefined symbols.
allow_undefined_flag=$no_undefined_flag
fi
fi
func_generate_dlsyms "$libname" "$libname" :
func_append libobjs " $symfileobj"
test " " = "$libobjs" && libobjs=
if test relink != "$opt_mode"; then
# Remove our outputs, but don't remove object files since they
# may have been created when compiling PIC objects.
removelist=
tempremovelist=`$ECHO "$output_objdir/*"`
for p in $tempremovelist; do
case $p in
*.$objext | *.gcno)
;;
$output_objdir/$outputname | $output_objdir/$libname.* | $output_objdir/$libname$release.*)
if test -n "$precious_files_regex"; then
if $ECHO "$p" | $EGREP -e "$precious_files_regex" >/dev/null 2>&1
then
continue
fi
fi
func_append removelist " $p"
;;
*) ;;
esac
done
test -n "$removelist" && \
func_show_eval "${RM}r \$removelist"
fi
# Now set the variables for building old libraries.
if test yes = "$build_old_libs" && test convenience != "$build_libtool_libs"; then
func_append oldlibs " $output_objdir/$libname.$libext"
# Transform .lo files to .o files.
oldobjs="$objs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.$libext$/d; $lo2o" | $NL2SP`
fi
# Eliminate all temporary directories.
#for path in $notinst_path; do
# lib_search_path=`$ECHO "$lib_search_path " | $SED "s% $path % %g"`
# deplibs=`$ECHO "$deplibs " | $SED "s% -L$path % %g"`
# dependency_libs=`$ECHO "$dependency_libs " | $SED "s% -L$path % %g"`
#done
if test -n "$xrpath"; then
# If the user specified any rpath flags, then add them.
temp_xrpath=
for libdir in $xrpath; do
func_replace_sysroot "$libdir"
func_append temp_xrpath " -R$func_replace_sysroot_result"
case "$finalize_rpath " in
*" $libdir "*) ;;
*) func_append finalize_rpath " $libdir" ;;
esac
done
if test yes != "$hardcode_into_libs" || test yes = "$build_old_libs"; then
dependency_libs="$temp_xrpath $dependency_libs"
fi
fi
# Make sure dlfiles contains only unique files that won't be dlpreopened
old_dlfiles=$dlfiles
dlfiles=
for lib in $old_dlfiles; do
case " $dlprefiles $dlfiles " in
*" $lib "*) ;;
*) func_append dlfiles " $lib" ;;
esac
done
# Make sure dlprefiles contains only unique files
old_dlprefiles=$dlprefiles
dlprefiles=
for lib in $old_dlprefiles; do
case "$dlprefiles " in
*" $lib "*) ;;
*) func_append dlprefiles " $lib" ;;
esac
done
if test yes = "$build_libtool_libs"; then
if test -n "$rpath"; then
case $host in
*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-beos* | *-cegcc* | *-*-haiku*)
# these systems don't actually have a c library (as such)!
;;
*-*-rhapsody* | *-*-darwin1.[012])
# Rhapsody C library is in the System framework
func_append deplibs " System.ltframework"
;;
*-*-netbsd*)
# Don't link with libc until the a.out ld.so is fixed.
;;
*-*-openbsd* | *-*-freebsd* | *-*-dragonfly*)
# Do not include libc due to us having libc/libc_r.
;;
*-*-sco3.2v5* | *-*-sco5v6*)
# Causes problems with __ctype
;;
*-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*)
# Compiler inserts libc in the correct place for threads to work
;;
*)
# Add libc to deplibs on all other systems if necessary.
if test yes = "$build_libtool_need_lc"; then
func_append deplibs " -lc"
fi
;;
esac
fi
# Transform deplibs into only deplibs that can be linked in shared.
name_save=$name
libname_save=$libname
release_save=$release
versuffix_save=$versuffix
major_save=$major
# I'm not sure if I'm treating the release correctly. I think
# release should show up in the -l (ie -lgmp5) so we don't want to
# add it in twice. Is that correct?
release=
versuffix=
major=
newdeplibs=
droppeddeps=no
case $deplibs_check_method in
pass_all)
# Don't check for shared/static. Everything works.
# This might be a little naive. We might want to check
# whether the library exists or not. But this is on
# osf3 & osf4 and I'm not really sure... Just
# implementing what was already the behavior.
newdeplibs=$deplibs
;;
test_compile)
# This code stresses the "libraries are programs" paradigm to its
# limits. Maybe even breaks it. We compile a program, linking it
# against the deplibs as a proxy for the library. Then we can check
# whether they linked in statically or dynamically with ldd.
$opt_dry_run || $RM conftest.c
cat > conftest.c <<EOF
int main() { return 0; }
EOF
$opt_dry_run || $RM conftest
if $LTCC $LTCFLAGS -o conftest conftest.c $deplibs; then
ldd_output=`ldd conftest`
for i in $deplibs; do
case $i in
-l*)
func_stripname -l '' "$i"
name=$func_stripname_result
if test yes = "$allow_libtool_libs_with_static_runtimes"; then
case " $predeps $postdeps " in
*" $i "*)
func_append newdeplibs " $i"
i=
;;
esac
fi
if test -n "$i"; then
libname=`eval "\\$ECHO \"$libname_spec\""`
deplib_matches=`eval "\\$ECHO \"$library_names_spec\""`
set dummy $deplib_matches; shift
deplib_match=$1
if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0; then
func_append newdeplibs " $i"
else
droppeddeps=yes
echo
$ECHO "*** Warning: dynamic linker does not accept needed library $i."
echo "*** I have the capability to make that library automatically link in when"
echo "*** you link to this library. But I can only do this if you have a"
echo "*** shared version of the library, which I believe you do not have"
echo "*** because a test_compile did reveal that the linker did not use it for"
echo "*** its dynamic dependency list that programs get resolved with at runtime."
fi
fi
;;
*)
func_append newdeplibs " $i"
;;
esac
done
else
# Error occurred in the first compile. Let's try to salvage
# the situation: Compile a separate program for each library.
for i in $deplibs; do
case $i in
-l*)
func_stripname -l '' "$i"
name=$func_stripname_result
$opt_dry_run || $RM conftest
if $LTCC $LTCFLAGS -o conftest conftest.c $i; then
ldd_output=`ldd conftest`
if test yes = "$allow_libtool_libs_with_static_runtimes"; then
case " $predeps $postdeps " in
*" $i "*)
func_append newdeplibs " $i"
i=
;;
esac
fi
if test -n "$i"; then
libname=`eval "\\$ECHO \"$libname_spec\""`
deplib_matches=`eval "\\$ECHO \"$library_names_spec\""`
set dummy $deplib_matches; shift
deplib_match=$1
if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0; then
func_append newdeplibs " $i"
else
droppeddeps=yes
echo
$ECHO "*** Warning: dynamic linker does not accept needed library $i."
echo "*** I have the capability to make that library automatically link in when"
echo "*** you link to this library. But I can only do this if you have a"
echo "*** shared version of the library, which you do not appear to have"
echo "*** because a test_compile did reveal that the linker did not use this one"
echo "*** as a dynamic dependency that programs can get resolved with at runtime."
fi
fi
else
droppeddeps=yes
echo
$ECHO "*** Warning! Library $i is needed by this library but I was not able to"
echo "*** make it link in! You will probably need to install it or some"
echo "*** library that it depends on before this library will be fully"
echo "*** functional. Installing it before continuing would be even better."
fi
;;
*)
func_append newdeplibs " $i"
;;
esac
done
fi
;;
file_magic*)
set dummy $deplibs_check_method; shift
file_magic_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
for a_deplib in $deplibs; do
case $a_deplib in
-l*)
func_stripname -l '' "$a_deplib"
name=$func_stripname_result
if test yes = "$allow_libtool_libs_with_static_runtimes"; then
case " $predeps $postdeps " in
*" $a_deplib "*)
func_append newdeplibs " $a_deplib"
a_deplib=
;;
esac
fi
if test -n "$a_deplib"; then
libname=`eval "\\$ECHO \"$libname_spec\""`
if test -n "$file_magic_glob"; then
libnameglob=`func_echo_all "$libname" | $SED -e $file_magic_glob`
else
libnameglob=$libname
fi
test yes = "$want_nocaseglob" && nocaseglob=`shopt -p nocaseglob`
for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do
if test yes = "$want_nocaseglob"; then
shopt -s nocaseglob
potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null`
$nocaseglob
else
potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null`
fi
for potent_lib in $potential_libs; do
# Follow soft links.
if ls -lLd "$potent_lib" 2>/dev/null |
$GREP " -> " >/dev/null; then
continue
fi
# The statement above tries to avoid entering an
# endless loop below, in case of cyclic links.
# We might still enter an endless loop, since a link
# loop can be closed while we follow links,
# but so what?
potlib=$potent_lib
while test -h "$potlib" 2>/dev/null; do
potliblink=`ls -ld $potlib | $SED 's/.* -> //'`
case $potliblink in
[\\/]* | [A-Za-z]:[\\/]*) potlib=$potliblink;;
*) potlib=`$ECHO "$potlib" | $SED 's|[^/]*$||'`"$potliblink";;
esac
done
if eval $file_magic_cmd \"\$potlib\" 2>/dev/null |
$SED -e 10q |
$EGREP "$file_magic_regex" > /dev/null; then
func_append newdeplibs " $a_deplib"
a_deplib=
break 2
fi
done
done
fi
if test -n "$a_deplib"; then
droppeddeps=yes
echo
$ECHO "*** Warning: linker path does not have real file for library $a_deplib."
echo "*** I have the capability to make that library automatically link in when"
echo "*** you link to this library. But I can only do this if you have a"
echo "*** shared version of the library, which you do not appear to have"
echo "*** because I did check the linker path looking for a file starting"
if test -z "$potlib"; then
$ECHO "*** with $libname but no candidates were found. (...for file magic test)"
else
$ECHO "*** with $libname and none of the candidates passed a file format test"
$ECHO "*** using a file magic. Last file checked: $potlib"
fi
fi
;;
*)
# Add a -L argument.
func_append newdeplibs " $a_deplib"
;;
esac
done # Gone through all deplibs.
;;
match_pattern*)
set dummy $deplibs_check_method; shift
match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
for a_deplib in $deplibs; do
case $a_deplib in
-l*)
func_stripname -l '' "$a_deplib"
name=$func_stripname_result
if test yes = "$allow_libtool_libs_with_static_runtimes"; then
case " $predeps $postdeps " in
*" $a_deplib "*)
func_append newdeplibs " $a_deplib"
a_deplib=
;;
esac
fi
if test -n "$a_deplib"; then
libname=`eval "\\$ECHO \"$libname_spec\""`
for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do
potential_libs=`ls $i/$libname[.-]* 2>/dev/null`
for potent_lib in $potential_libs; do
potlib=$potent_lib # see symlink-check above in file_magic test
if eval "\$ECHO \"$potent_lib\"" 2>/dev/null | $SED 10q | \
$EGREP "$match_pattern_regex" > /dev/null; then
func_append newdeplibs " $a_deplib"
a_deplib=
break 2
fi
done
done
fi
if test -n "$a_deplib"; then
droppeddeps=yes
echo
$ECHO "*** Warning: linker path does not have real file for library $a_deplib."
echo "*** I have the capability to make that library automatically link in when"
echo "*** you link to this library. But I can only do this if you have a"
echo "*** shared version of the library, which you do not appear to have"
echo "*** because I did check the linker path looking for a file starting"
if test -z "$potlib"; then
$ECHO "*** with $libname but no candidates were found. (...for regex pattern test)"
else
$ECHO "*** with $libname and none of the candidates passed a file format test"
$ECHO "*** using a regex pattern. Last file checked: $potlib"
fi
fi
;;
*)
# Add a -L argument.
func_append newdeplibs " $a_deplib"
;;
esac
done # Gone through all deplibs.
;;
none | unknown | *)
newdeplibs=
tmp_deplibs=`$ECHO " $deplibs" | $SED 's/ -lc$//; s/ -[LR][^ ]*//g'`
if test yes = "$allow_libtool_libs_with_static_runtimes"; then
for i in $predeps $postdeps; do
# can't use Xsed below, because $i might contain '/'
tmp_deplibs=`$ECHO " $tmp_deplibs" | $SED "s|$i||"`
done
fi
case $tmp_deplibs in
*[!\ \ ]*)
echo
if test none = "$deplibs_check_method"; then
echo "*** Warning: inter-library dependencies are not supported in this platform."
else
echo "*** Warning: inter-library dependencies are not known to be supported."
fi
echo "*** All declared inter-library dependencies are being dropped."
droppeddeps=yes
;;
esac
;;
esac
versuffix=$versuffix_save
major=$major_save
release=$release_save
libname=$libname_save
name=$name_save
case $host in
*-*-rhapsody* | *-*-darwin1.[012])
# On Rhapsody replace the C library with the System framework
newdeplibs=`$ECHO " $newdeplibs" | $SED 's/ -lc / System.ltframework /'`
;;
esac
if test yes = "$droppeddeps"; then
if test yes = "$module"; then
echo
echo "*** Warning: libtool could not satisfy all declared inter-library"
$ECHO "*** dependencies of module $libname. Therefore, libtool will create"
echo "*** a static module, that should work as long as the dlopening"
echo "*** application is linked with the -dlopen flag."
if test -z "$global_symbol_pipe"; then
echo
echo "*** However, this would only work if libtool was able to extract symbol"
echo "*** lists from a program, using 'nm' or equivalent, but libtool could"
echo "*** not find such a program. So, this module is probably useless."
echo "*** 'nm' from GNU binutils and a full rebuild may help."
fi
if test no = "$build_old_libs"; then
oldlibs=$output_objdir/$libname.$libext
build_libtool_libs=module
build_old_libs=yes
else
build_libtool_libs=no
fi
else
echo "*** The inter-library dependencies that have been dropped here will be"
echo "*** automatically added whenever a program is linked with this library"
echo "*** or is declared to -dlopen it."
if test no = "$allow_undefined"; then
echo
echo "*** Since this library must not contain undefined symbols,"
echo "*** because either the platform does not support them or"
echo "*** it was explicitly requested with -no-undefined,"
echo "*** libtool will only create a static version of it."
if test no = "$build_old_libs"; then
oldlibs=$output_objdir/$libname.$libext
build_libtool_libs=module
build_old_libs=yes
else
build_libtool_libs=no
fi
fi
fi
fi
# Done checking deplibs!
deplibs=$newdeplibs
fi
# Time to change all our "foo.ltframework" stuff back to "-framework foo"
case $host in
*-*-darwin*)
newdeplibs=`$ECHO " $newdeplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
new_inherited_linker_flags=`$ECHO " $new_inherited_linker_flags" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
deplibs=`$ECHO " $deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
;;
esac
# move library search paths that coincide with paths to not yet
# installed libraries to the beginning of the library search list
new_libs=
for path in $notinst_path; do
case " $new_libs " in
*" -L$path/$objdir "*) ;;
*)
case " $deplibs " in
*" -L$path/$objdir "*)
func_append new_libs " -L$path/$objdir" ;;
esac
;;
esac
done
for deplib in $deplibs; do
case $deplib in
-L*)
case " $new_libs " in
*" $deplib "*) ;;
*) func_append new_libs " $deplib" ;;
esac
;;
*) func_append new_libs " $deplib" ;;
esac
done
deplibs=$new_libs
# All the library-specific variables (install_libdir is set above).
library_names=
old_library=
dlname=
# Test again, we may have decided not to build it any more
if test yes = "$build_libtool_libs"; then
# Remove $wl instances when linking with ld.
# FIXME: should test the right _cmds variable.
case $archive_cmds in
*\$LD\ *) wl= ;;
esac
if test yes = "$hardcode_into_libs"; then
# Hardcode the library paths
hardcode_libdirs=
dep_rpath=
rpath=$finalize_rpath
test relink = "$opt_mode" || rpath=$compile_rpath$rpath
for libdir in $rpath; do
if test -n "$hardcode_libdir_flag_spec"; then
if test -n "$hardcode_libdir_separator"; then
func_replace_sysroot "$libdir"
libdir=$func_replace_sysroot_result
if test -z "$hardcode_libdirs"; then
hardcode_libdirs=$libdir
else
# Just accumulate the unique libdirs.
case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
*"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
;;
*)
func_append hardcode_libdirs "$hardcode_libdir_separator$libdir"
;;
esac
fi
else
eval flag=\"$hardcode_libdir_flag_spec\"
func_append dep_rpath " $flag"
fi
elif test -n "$runpath_var"; then
case "$perm_rpath " in
*" $libdir "*) ;;
*) func_append perm_rpath " $libdir" ;;
esac
fi
done
# Substitute the hardcoded libdirs into the rpath.
if test -n "$hardcode_libdir_separator" &&
test -n "$hardcode_libdirs"; then
libdir=$hardcode_libdirs
eval "dep_rpath=\"$hardcode_libdir_flag_spec\""
fi
if test -n "$runpath_var" && test -n "$perm_rpath"; then
# We should set the runpath_var.
rpath=
for dir in $perm_rpath; do
func_append rpath "$dir:"
done
eval "$runpath_var='$rpath\$$runpath_var'; export $runpath_var"
fi
test -n "$dep_rpath" && deplibs="$dep_rpath $deplibs"
fi
shlibpath=$finalize_shlibpath
test relink = "$opt_mode" || shlibpath=$compile_shlibpath$shlibpath
if test -n "$shlibpath"; then
eval "$shlibpath_var='$shlibpath\$$shlibpath_var'; export $shlibpath_var"
fi
# Get the real and link names of the library.
eval shared_ext=\"$shrext_cmds\"
eval library_names=\"$library_names_spec\"
set dummy $library_names
shift
realname=$1
shift
if test -n "$soname_spec"; then
eval soname=\"$soname_spec\"
else
soname=$realname
fi
if test -z "$dlname"; then
dlname=$soname
fi
lib=$output_objdir/$realname
linknames=
for link
do
func_append linknames " $link"
done
# Use standard objects if they are pic
test -z "$pic_flag" && libobjs=`$ECHO "$libobjs" | $SP2NL | $SED "$lo2o" | $NL2SP`
test "X$libobjs" = "X " && libobjs=
delfiles=
if test -n "$export_symbols" && test -n "$include_expsyms"; then
$opt_dry_run || cp "$export_symbols" "$output_objdir/$libname.uexp"
export_symbols=$output_objdir/$libname.uexp
func_append delfiles " $export_symbols"
fi
orig_export_symbols=
case $host_os in
cygwin* | mingw* | cegcc*)
if test -n "$export_symbols" && test -z "$export_symbols_regex"; then
# exporting using user supplied symfile
func_dll_def_p "$export_symbols" || {
# and it's NOT already a .def file. Must figure out
# which of the given symbols are data symbols and tag
# them as such. So, trigger use of export_symbols_cmds.
# export_symbols gets reassigned inside the "prepare
# the list of exported symbols" if statement, so the
# include_expsyms logic still works.
orig_export_symbols=$export_symbols
export_symbols=
always_export_symbols=yes
}
fi
;;
esac
# Prepare the list of exported symbols
if test -z "$export_symbols"; then
if test yes = "$always_export_symbols" || test -n "$export_symbols_regex"; then
func_verbose "generating symbol list for '$libname.la'"
export_symbols=$output_objdir/$libname.exp
$opt_dry_run || $RM $export_symbols
cmds=$export_symbols_cmds
save_ifs=$IFS; IFS='~'
for cmd1 in $cmds; do
IFS=$save_ifs
# Take the normal branch if the nm_file_list_spec branch
# doesn't work or if tool conversion is not needed.
case $nm_file_list_spec~$to_tool_file_cmd in
*~func_convert_file_noop | *~func_convert_file_msys_to_w32 | ~*)
try_normal_branch=yes
eval cmd=\"$cmd1\"
func_len " $cmd"
len=$func_len_result
;;
*)
try_normal_branch=no
;;
esac
if test yes = "$try_normal_branch" \
&& { test "$len" -lt "$max_cmd_len" \
|| test "$max_cmd_len" -le -1; }
then
func_show_eval "$cmd" 'exit $?'
skipped_export=false
elif test -n "$nm_file_list_spec"; then
func_basename "$output"
output_la=$func_basename_result
save_libobjs=$libobjs
save_output=$output
output=$output_objdir/$output_la.nm
func_to_tool_file "$output"
libobjs=$nm_file_list_spec$func_to_tool_file_result
func_append delfiles " $output"
func_verbose "creating $NM input file list: $output"
for obj in $save_libobjs; do
func_to_tool_file "$obj"
$ECHO "$func_to_tool_file_result"
done > "$output"
eval cmd=\"$cmd1\"
func_show_eval "$cmd" 'exit $?'
output=$save_output
libobjs=$save_libobjs
skipped_export=false
else
# The command line is too long to execute in one step.
func_verbose "using reloadable object file for export list..."
skipped_export=:
# Break out early, otherwise skipped_export may be
# set to false by a later but shorter cmd.
break
fi
done
IFS=$save_ifs
if test -n "$export_symbols_regex" && test : != "$skipped_export"; then
func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"'
func_show_eval '$MV "${export_symbols}T" "$export_symbols"'
fi
fi
fi
if test -n "$export_symbols" && test -n "$include_expsyms"; then
tmp_export_symbols=$export_symbols
test -n "$orig_export_symbols" && tmp_export_symbols=$orig_export_symbols
$opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"'
fi
if test : != "$skipped_export" && test -n "$orig_export_symbols"; then
# The given exports_symbols file has to be filtered, so filter it.
func_verbose "filter symbol list for '$libname.la' to tag DATA exports"
# FIXME: $output_objdir/$libname.filter potentially contains lots of
# 's' commands, which not all seds can handle. GNU sed should be fine
# though. Also, the filter scales superlinearly with the number of
# global variables. join(1) would be nice here, but unfortunately
# isn't a blessed tool.
$opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter
func_append delfiles " $export_symbols $output_objdir/$libname.filter"
export_symbols=$output_objdir/$libname.def
$opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols
fi
tmp_deplibs=
for test_deplib in $deplibs; do
case " $convenience " in
*" $test_deplib "*) ;;
*)
func_append tmp_deplibs " $test_deplib"
;;
esac
done
deplibs=$tmp_deplibs
if test -n "$convenience"; then
if test -n "$whole_archive_flag_spec" &&
test yes = "$compiler_needs_object" &&
test -z "$libobjs"; then
# extract the archives, so we have objects to list.
# TODO: could optimize this to just extract one archive.
whole_archive_flag_spec=
fi
if test -n "$whole_archive_flag_spec"; then
save_libobjs=$libobjs
eval libobjs=\"\$libobjs $whole_archive_flag_spec\"
test "X$libobjs" = "X " && libobjs=
else
gentop=$output_objdir/${outputname}x
func_append generated " $gentop"
func_extract_archives $gentop $convenience
func_append libobjs " $func_extract_archives_result"
test "X$libobjs" = "X " && libobjs=
fi
fi
if test yes = "$thread_safe" && test -n "$thread_safe_flag_spec"; then
eval flag=\"$thread_safe_flag_spec\"
func_append linker_flags " $flag"
fi
# Make a backup of the uninstalled library when relinking
if test relink = "$opt_mode"; then
$opt_dry_run || eval '(cd $output_objdir && $RM ${realname}U && $MV $realname ${realname}U)' || exit $?
fi
# Do each of the archive commands.
if test yes = "$module" && test -n "$module_cmds"; then
if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then
eval test_cmds=\"$module_expsym_cmds\"
cmds=$module_expsym_cmds
else
eval test_cmds=\"$module_cmds\"
cmds=$module_cmds
fi
else
if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then
eval test_cmds=\"$archive_expsym_cmds\"
cmds=$archive_expsym_cmds
else
eval test_cmds=\"$archive_cmds\"
cmds=$archive_cmds
fi
fi
if test : != "$skipped_export" &&
func_len " $test_cmds" &&
len=$func_len_result &&
test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then
:
else
# The command line is too long to link in one step, link piecewise
# or, if using GNU ld and skipped_export is not :, use a linker
# script.
# Save the value of $output and $libobjs because we want to
# use them later. If we have whole_archive_flag_spec, we
# want to use save_libobjs as it was before
# whole_archive_flag_spec was expanded, because we can't
# assume the linker understands whole_archive_flag_spec.
# This may have to be revisited, in case too many
# convenience libraries get linked in and end up exceeding
# the spec.
if test -z "$convenience" || test -z "$whole_archive_flag_spec"; then
save_libobjs=$libobjs
fi
save_output=$output
func_basename "$output"
output_la=$func_basename_result
# Clear the reloadable object creation command queue and
# initialize k to one.
test_cmds=
concat_cmds=
objlist=
last_robj=
k=1
if test -n "$save_libobjs" && test : != "$skipped_export" && test yes = "$with_gnu_ld"; then
output=$output_objdir/$output_la.lnkscript
func_verbose "creating GNU ld script: $output"
echo 'INPUT (' > $output
for obj in $save_libobjs
do
func_to_tool_file "$obj"
$ECHO "$func_to_tool_file_result" >> $output
done
echo ')' >> $output
func_append delfiles " $output"
func_to_tool_file "$output"
output=$func_to_tool_file_result
elif test -n "$save_libobjs" && test : != "$skipped_export" && test -n "$file_list_spec"; then
output=$output_objdir/$output_la.lnk
func_verbose "creating linker input file list: $output"
: > $output
set x $save_libobjs
shift
firstobj=
if test yes = "$compiler_needs_object"; then
firstobj="$1 "
shift
fi
for obj
do
func_to_tool_file "$obj"
$ECHO "$func_to_tool_file_result" >> $output
done
func_append delfiles " $output"
func_to_tool_file "$output"
output=$firstobj\"$file_list_spec$func_to_tool_file_result\"
else
if test -n "$save_libobjs"; then
func_verbose "creating reloadable object files..."
output=$output_objdir/$output_la-$k.$objext
eval test_cmds=\"$reload_cmds\"
func_len " $test_cmds"
len0=$func_len_result
len=$len0
# Loop over the list of objects to be linked.
for obj in $save_libobjs
do
func_len " $obj"
func_arith $len + $func_len_result
len=$func_arith_result
if test -z "$objlist" ||
test "$len" -lt "$max_cmd_len"; then
func_append objlist " $obj"
else
# The command $test_cmds is almost too long, add a
# command to the queue.
if test 1 -eq "$k"; then
# The first file doesn't have a previous command to add.
reload_objs=$objlist
eval concat_cmds=\"$reload_cmds\"
else
# All subsequent reloadable object files will link in
# the last one created.
reload_objs="$objlist $last_robj"
eval concat_cmds=\"\$concat_cmds~$reload_cmds~\$RM $last_robj\"
fi
last_robj=$output_objdir/$output_la-$k.$objext
func_arith $k + 1
k=$func_arith_result
output=$output_objdir/$output_la-$k.$objext
objlist=" $obj"
func_len " $last_robj"
func_arith $len0 + $func_len_result
len=$func_arith_result
fi
done
# Handle the remaining objects by creating one last
# reloadable object file. All subsequent reloadable object
# files will link in the last one created.
test -z "$concat_cmds" || concat_cmds=$concat_cmds~
reload_objs="$objlist $last_robj"
eval concat_cmds=\"\$concat_cmds$reload_cmds\"
if test -n "$last_robj"; then
eval concat_cmds=\"\$concat_cmds~\$RM $last_robj\"
fi
func_append delfiles " $output"
else
output=
fi
${skipped_export-false} && {
func_verbose "generating symbol list for '$libname.la'"
export_symbols=$output_objdir/$libname.exp
$opt_dry_run || $RM $export_symbols
libobjs=$output
# Append the command to create the export file.
test -z "$concat_cmds" || concat_cmds=$concat_cmds~
eval concat_cmds=\"\$concat_cmds$export_symbols_cmds\"
if test -n "$last_robj"; then
eval concat_cmds=\"\$concat_cmds~\$RM $last_robj\"
fi
}
test -n "$save_libobjs" &&
func_verbose "creating a temporary reloadable object file: $output"
# Loop through the commands generated above and execute them.
save_ifs=$IFS; IFS='~'
for cmd in $concat_cmds; do
IFS=$save_ifs
$opt_quiet || {
func_quote_for_expand "$cmd"
eval "func_echo $func_quote_for_expand_result"
}
$opt_dry_run || eval "$cmd" || {
lt_exit=$?
# Restore the uninstalled library and exit
if test relink = "$opt_mode"; then
( cd "$output_objdir" && \
$RM "${realname}T" && \
$MV "${realname}U" "$realname" )
fi
exit $lt_exit
}
done
IFS=$save_ifs
if test -n "$export_symbols_regex" && ${skipped_export-false}; then
func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"'
func_show_eval '$MV "${export_symbols}T" "$export_symbols"'
fi
fi
${skipped_export-false} && {
if test -n "$export_symbols" && test -n "$include_expsyms"; then
tmp_export_symbols=$export_symbols
test -n "$orig_export_symbols" && tmp_export_symbols=$orig_export_symbols
$opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"'
fi
if test -n "$orig_export_symbols"; then
# The given exports_symbols file has to be filtered, so filter it.
func_verbose "filter symbol list for '$libname.la' to tag DATA exports"
# FIXME: $output_objdir/$libname.filter potentially contains lots of
# 's' commands, which not all seds can handle. GNU sed should be fine
# though. Also, the filter scales superlinearly with the number of
# global variables. join(1) would be nice here, but unfortunately
# isn't a blessed tool.
$opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter
func_append delfiles " $export_symbols $output_objdir/$libname.filter"
export_symbols=$output_objdir/$libname.def
$opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols
fi
}
libobjs=$output
# Restore the value of output.
output=$save_output
if test -n "$convenience" && test -n "$whole_archive_flag_spec"; then
eval libobjs=\"\$libobjs $whole_archive_flag_spec\"
test "X$libobjs" = "X " && libobjs=
fi
# Expand the library linking commands again to reset the
# value of $libobjs for piecewise linking.
# Do each of the archive commands.
if test yes = "$module" && test -n "$module_cmds"; then
if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then
cmds=$module_expsym_cmds
else
cmds=$module_cmds
fi
else
if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then
cmds=$archive_expsym_cmds
else
cmds=$archive_cmds
fi
fi
fi
if test -n "$delfiles"; then
# Append the command to remove temporary files to $cmds.
eval cmds=\"\$cmds~\$RM $delfiles\"
fi
# Add any objects from preloaded convenience libraries
if test -n "$dlprefiles"; then
gentop=$output_objdir/${outputname}x
func_append generated " $gentop"
func_extract_archives $gentop $dlprefiles
func_append libobjs " $func_extract_archives_result"
test "X$libobjs" = "X " && libobjs=
fi
save_ifs=$IFS; IFS='~'
for cmd in $cmds; do
IFS=$sp$nl
eval cmd=\"$cmd\"
IFS=$save_ifs
$opt_quiet || {
func_quote_for_expand "$cmd"
eval "func_echo $func_quote_for_expand_result"
}
$opt_dry_run || eval "$cmd" || {
lt_exit=$?
# Restore the uninstalled library and exit
if test relink = "$opt_mode"; then
( cd "$output_objdir" && \
$RM "${realname}T" && \
$MV "${realname}U" "$realname" )
fi
exit $lt_exit
}
done
IFS=$save_ifs
# Restore the uninstalled library and exit
if test relink = "$opt_mode"; then
$opt_dry_run || eval '(cd $output_objdir && $RM ${realname}T && $MV $realname ${realname}T && $MV ${realname}U $realname)' || exit $?
if test -n "$convenience"; then
if test -z "$whole_archive_flag_spec"; then
func_show_eval '${RM}r "$gentop"'
fi
fi
exit $EXIT_SUCCESS
fi
# Create links to the real library.
for linkname in $linknames; do
if test "$realname" != "$linkname"; then
func_show_eval '(cd "$output_objdir" && $RM "$linkname" && $LN_S "$realname" "$linkname")' 'exit $?'
fi
done
# If -module or -export-dynamic was specified, set the dlname.
if test yes = "$module" || test yes = "$export_dynamic"; then
# On all known operating systems, these are identical.
dlname=$soname
fi
fi
;;
obj)
if test -n "$dlfiles$dlprefiles" || test no != "$dlself"; then
func_warning "'-dlopen' is ignored for objects"
fi
case " $deplibs" in
*\ -l* | *\ -L*)
func_warning "'-l' and '-L' are ignored for objects" ;;
esac
test -n "$rpath" && \
func_warning "'-rpath' is ignored for objects"
test -n "$xrpath" && \
func_warning "'-R' is ignored for objects"
test -n "$vinfo" && \
func_warning "'-version-info' is ignored for objects"
test -n "$release" && \
func_warning "'-release' is ignored for objects"
case $output in
*.lo)
test -n "$objs$old_deplibs" && \
func_fatal_error "cannot build library object '$output' from non-libtool objects"
libobj=$output
func_lo2o "$libobj"
obj=$func_lo2o_result
;;
*)
libobj=
obj=$output
;;
esac
# Delete the old objects.
$opt_dry_run || $RM $obj $libobj
# Objects from convenience libraries. This assumes
# single-version convenience libraries. Whenever we create
# different ones for PIC/non-PIC, this we'll have to duplicate
# the extraction.
reload_conv_objs=
gentop=
# if reload_cmds runs $LD directly, get rid of -Wl from
# whole_archive_flag_spec and hope we can get by with turning comma
# into space.
case $reload_cmds in
*\$LD[\ \$]*) wl= ;;
esac
if test -n "$convenience"; then
if test -n "$whole_archive_flag_spec"; then
eval tmp_whole_archive_flags=\"$whole_archive_flag_spec\"
test -n "$wl" || tmp_whole_archive_flags=`$ECHO "$tmp_whole_archive_flags" | $SED 's|,| |g'`
reload_conv_objs=$reload_objs\ $tmp_whole_archive_flags
else
gentop=$output_objdir/${obj}x
func_append generated " $gentop"
func_extract_archives $gentop $convenience
reload_conv_objs="$reload_objs $func_extract_archives_result"
fi
fi
# If we're not building shared, we need to use non_pic_objs
test yes = "$build_libtool_libs" || libobjs=$non_pic_objects
# Create the old-style object.
reload_objs=$objs$old_deplibs' '`$ECHO "$libobjs" | $SP2NL | $SED "/\.$libext$/d; /\.lib$/d; $lo2o" | $NL2SP`' '$reload_conv_objs
output=$obj
func_execute_cmds "$reload_cmds" 'exit $?'
# Exit if we aren't doing a library object file.
if test -z "$libobj"; then
if test -n "$gentop"; then
func_show_eval '${RM}r "$gentop"'
fi
exit $EXIT_SUCCESS
fi
test yes = "$build_libtool_libs" || {
if test -n "$gentop"; then
func_show_eval '${RM}r "$gentop"'
fi
# Create an invalid libtool object if no PIC, so that we don't
# accidentally link it into a program.
# $show "echo timestamp > $libobj"
# $opt_dry_run || eval "echo timestamp > $libobj" || exit $?
exit $EXIT_SUCCESS
}
if test -n "$pic_flag" || test default != "$pic_mode"; then
# Only do commands if we really have different PIC objects.
reload_objs="$libobjs $reload_conv_objs"
output=$libobj
func_execute_cmds "$reload_cmds" 'exit $?'
fi
if test -n "$gentop"; then
func_show_eval '${RM}r "$gentop"'
fi
exit $EXIT_SUCCESS
;;
prog)
case $host in
*cygwin*) func_stripname '' '.exe' "$output"
output=$func_stripname_result.exe;;
esac
test -n "$vinfo" && \
func_warning "'-version-info' is ignored for programs"
test -n "$release" && \
func_warning "'-release' is ignored for programs"
$preload \
&& test unknown,unknown,unknown = "$dlopen_support,$dlopen_self,$dlopen_self_static" \
&& func_warning "'LT_INIT([dlopen])' not used. Assuming no dlopen support."
case $host in
*-*-rhapsody* | *-*-darwin1.[012])
# On Rhapsody replace the C library is the System framework
compile_deplibs=`$ECHO " $compile_deplibs" | $SED 's/ -lc / System.ltframework /'`
finalize_deplibs=`$ECHO " $finalize_deplibs" | $SED 's/ -lc / System.ltframework /'`
;;
esac
case $host in
*-*-darwin*)
# Don't allow lazy linking, it breaks C++ global constructors
# But is supposedly fixed on 10.4 or later (yay!).
if test CXX = "$tagname"; then
case ${MACOSX_DEPLOYMENT_TARGET-10.0} in
10.[0123])
func_append compile_command " $wl-bind_at_load"
func_append finalize_command " $wl-bind_at_load"
;;
esac
fi
# Time to change all our "foo.ltframework" stuff back to "-framework foo"
compile_deplibs=`$ECHO " $compile_deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
finalize_deplibs=`$ECHO " $finalize_deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
;;
esac
# move library search paths that coincide with paths to not yet
# installed libraries to the beginning of the library search list
new_libs=
for path in $notinst_path; do
case " $new_libs " in
*" -L$path/$objdir "*) ;;
*)
case " $compile_deplibs " in
*" -L$path/$objdir "*)
func_append new_libs " -L$path/$objdir" ;;
esac
;;
esac
done
for deplib in $compile_deplibs; do
case $deplib in
-L*)
case " $new_libs " in
*" $deplib "*) ;;
*) func_append new_libs " $deplib" ;;
esac
;;
*) func_append new_libs " $deplib" ;;
esac
done
compile_deplibs=$new_libs
func_append compile_command " $compile_deplibs"
func_append finalize_command " $finalize_deplibs"
if test -n "$rpath$xrpath"; then
# If the user specified any rpath flags, then add them.
for libdir in $rpath $xrpath; do
# This is the magic to use -rpath.
case "$finalize_rpath " in
*" $libdir "*) ;;
*) func_append finalize_rpath " $libdir" ;;
esac
done
fi
# Now hardcode the library paths
rpath=
hardcode_libdirs=
for libdir in $compile_rpath $finalize_rpath; do
if test -n "$hardcode_libdir_flag_spec"; then
if test -n "$hardcode_libdir_separator"; then
if test -z "$hardcode_libdirs"; then
hardcode_libdirs=$libdir
else
# Just accumulate the unique libdirs.
case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
*"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
;;
*)
func_append hardcode_libdirs "$hardcode_libdir_separator$libdir"
;;
esac
fi
else
eval flag=\"$hardcode_libdir_flag_spec\"
func_append rpath " $flag"
fi
elif test -n "$runpath_var"; then
case "$perm_rpath " in
*" $libdir "*) ;;
*) func_append perm_rpath " $libdir" ;;
esac
fi
case $host in
*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*)
testbindir=`$ECHO "$libdir" | $SED -e 's*/lib$*/bin*'`
case :$dllsearchpath: in
*":$libdir:"*) ;;
::) dllsearchpath=$libdir;;
*) func_append dllsearchpath ":$libdir";;
esac
case :$dllsearchpath: in
*":$testbindir:"*) ;;
::) dllsearchpath=$testbindir;;
*) func_append dllsearchpath ":$testbindir";;
esac
;;
esac
done
# Substitute the hardcoded libdirs into the rpath.
if test -n "$hardcode_libdir_separator" &&
test -n "$hardcode_libdirs"; then
libdir=$hardcode_libdirs
eval rpath=\" $hardcode_libdir_flag_spec\"
fi
compile_rpath=$rpath
rpath=
hardcode_libdirs=
for libdir in $finalize_rpath; do
if test -n "$hardcode_libdir_flag_spec"; then
if test -n "$hardcode_libdir_separator"; then
if test -z "$hardcode_libdirs"; then
hardcode_libdirs=$libdir
else
# Just accumulate the unique libdirs.
case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
*"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
;;
*)
func_append hardcode_libdirs "$hardcode_libdir_separator$libdir"
;;
esac
fi
else
eval flag=\"$hardcode_libdir_flag_spec\"
func_append rpath " $flag"
fi
elif test -n "$runpath_var"; then
case "$finalize_perm_rpath " in
*" $libdir "*) ;;
*) func_append finalize_perm_rpath " $libdir" ;;
esac
fi
done
# Substitute the hardcoded libdirs into the rpath.
if test -n "$hardcode_libdir_separator" &&
test -n "$hardcode_libdirs"; then
libdir=$hardcode_libdirs
eval rpath=\" $hardcode_libdir_flag_spec\"
fi
finalize_rpath=$rpath
if test -n "$libobjs" && test yes = "$build_old_libs"; then
# Transform all the library objects into standard objects.
compile_command=`$ECHO "$compile_command" | $SP2NL | $SED "$lo2o" | $NL2SP`
finalize_command=`$ECHO "$finalize_command" | $SP2NL | $SED "$lo2o" | $NL2SP`
fi
func_generate_dlsyms "$outputname" "@PROGRAM@" false
# template prelinking step
if test -n "$prelink_cmds"; then
func_execute_cmds "$prelink_cmds" 'exit $?'
fi
wrappers_required=:
case $host in
*cegcc* | *mingw32ce*)
# Disable wrappers for cegcc and mingw32ce hosts, we are cross compiling anyway.
wrappers_required=false
;;
*cygwin* | *mingw* )
test yes = "$build_libtool_libs" || wrappers_required=false
;;
*)
if test no = "$need_relink" || test yes != "$build_libtool_libs"; then
wrappers_required=false
fi
;;
esac
$wrappers_required || {
# Replace the output file specification.
compile_command=`$ECHO "$compile_command" | $SED 's%@OUTPUT@%'"$output"'%g'`
link_command=$compile_command$compile_rpath
# We have no uninstalled library dependencies, so finalize right now.
exit_status=0
func_show_eval "$link_command" 'exit_status=$?'
if test -n "$postlink_cmds"; then
func_to_tool_file "$output"
postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
func_execute_cmds "$postlink_cmds" 'exit $?'
fi
# Delete the generated files.
if test -f "$output_objdir/${outputname}S.$objext"; then
func_show_eval '$RM "$output_objdir/${outputname}S.$objext"'
fi
exit $exit_status
}
if test -n "$compile_shlibpath$finalize_shlibpath"; then
compile_command="$shlibpath_var=\"$compile_shlibpath$finalize_shlibpath\$$shlibpath_var\" $compile_command"
fi
if test -n "$finalize_shlibpath"; then
finalize_command="$shlibpath_var=\"$finalize_shlibpath\$$shlibpath_var\" $finalize_command"
fi
compile_var=
finalize_var=
if test -n "$runpath_var"; then
if test -n "$perm_rpath"; then
# We should set the runpath_var.
rpath=
for dir in $perm_rpath; do
func_append rpath "$dir:"
done
compile_var="$runpath_var=\"$rpath\$$runpath_var\" "
fi
if test -n "$finalize_perm_rpath"; then
# We should set the runpath_var.
rpath=
for dir in $finalize_perm_rpath; do
func_append rpath "$dir:"
done
finalize_var="$runpath_var=\"$rpath\$$runpath_var\" "
fi
fi
if test yes = "$no_install"; then
# We don't need to create a wrapper script.
link_command=$compile_var$compile_command$compile_rpath
# Replace the output file specification.
link_command=`$ECHO "$link_command" | $SED 's%@OUTPUT@%'"$output"'%g'`
# Delete the old output file.
$opt_dry_run || $RM $output
# Link the executable and exit
func_show_eval "$link_command" 'exit $?'
if test -n "$postlink_cmds"; then
func_to_tool_file "$output"
postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
func_execute_cmds "$postlink_cmds" 'exit $?'
fi
exit $EXIT_SUCCESS
fi
case $hardcode_action,$fast_install in
relink,*)
# Fast installation is not supported
link_command=$compile_var$compile_command$compile_rpath
relink_command=$finalize_var$finalize_command$finalize_rpath
func_warning "this platform does not like uninstalled shared libraries"
func_warning "'$output' will be relinked during installation"
;;
*,yes)
link_command=$finalize_var$compile_command$finalize_rpath
relink_command=`$ECHO "$compile_var$compile_command$compile_rpath" | $SED 's%@OUTPUT@%\$progdir/\$file%g'`
;;
*,no)
link_command=$compile_var$compile_command$compile_rpath
relink_command=$finalize_var$finalize_command$finalize_rpath
;;
*,needless)
link_command=$finalize_var$compile_command$finalize_rpath
relink_command=
;;
esac
# Replace the output file specification.
link_command=`$ECHO "$link_command" | $SED 's%@OUTPUT@%'"$output_objdir/$outputname"'%g'`
# Delete the old output files.
$opt_dry_run || $RM $output $output_objdir/$outputname $output_objdir/lt-$outputname
func_show_eval "$link_command" 'exit $?'
if test -n "$postlink_cmds"; then
func_to_tool_file "$output_objdir/$outputname"
postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output_objdir/$outputname"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
func_execute_cmds "$postlink_cmds" 'exit $?'
fi
# Now create the wrapper script.
func_verbose "creating $output"
# Quote the relink command for shipping.
if test -n "$relink_command"; then
# Preserve any variables that may affect compiler behavior
for var in $variables_saved_for_relink; do
if eval test -z \"\${$var+set}\"; then
relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command"
elif eval var_value=\$$var; test -z "$var_value"; then
relink_command="$var=; export $var; $relink_command"
else
func_quote_for_eval "$var_value"
relink_command="$var=$func_quote_for_eval_result; export $var; $relink_command"
fi
done
relink_command="(cd `pwd`; $relink_command)"
relink_command=`$ECHO "$relink_command" | $SED "$sed_quote_subst"`
fi
# Only actually do things if not in dry run mode.
$opt_dry_run || {
# win32 will think the script is a binary if it has
# a .exe suffix, so we strip it off here.
case $output in
*.exe) func_stripname '' '.exe' "$output"
output=$func_stripname_result ;;
esac
# test for cygwin because mv fails w/o .exe extensions
case $host in
*cygwin*)
exeext=.exe
func_stripname '' '.exe' "$outputname"
outputname=$func_stripname_result ;;
*) exeext= ;;
esac
case $host in
*cygwin* | *mingw* )
func_dirname_and_basename "$output" "" "."
output_name=$func_basename_result
output_path=$func_dirname_result
cwrappersource=$output_path/$objdir/lt-$output_name.c
cwrapper=$output_path/$output_name.exe
$RM $cwrappersource $cwrapper
trap "$RM $cwrappersource $cwrapper; exit $EXIT_FAILURE" 1 2 15
func_emit_cwrapperexe_src > $cwrappersource
# The wrapper executable is built using the $host compiler,
# because it contains $host paths and files. If cross-
# compiling, it, like the target executable, must be
# executed on the $host or under an emulation environment.
$opt_dry_run || {
$LTCC $LTCFLAGS -o $cwrapper $cwrappersource
$STRIP $cwrapper
}
# Now, create the wrapper script for func_source use:
func_ltwrapper_scriptname $cwrapper
$RM $func_ltwrapper_scriptname_result
trap "$RM $func_ltwrapper_scriptname_result; exit $EXIT_FAILURE" 1 2 15
$opt_dry_run || {
# note: this script will not be executed, so do not chmod.
if test "x$build" = "x$host"; then
$cwrapper --lt-dump-script > $func_ltwrapper_scriptname_result
else
func_emit_wrapper no > $func_ltwrapper_scriptname_result
fi
}
;;
* )
$RM $output
trap "$RM $output; exit $EXIT_FAILURE" 1 2 15
func_emit_wrapper no > $output
chmod +x $output
;;
esac
}
exit $EXIT_SUCCESS
;;
esac
# See if we need to build an old-fashioned archive.
for oldlib in $oldlibs; do
case $build_libtool_libs in
convenience)
oldobjs="$libobjs_save $symfileobj"
addlibs=$convenience
build_libtool_libs=no
;;
module)
oldobjs=$libobjs_save
addlibs=$old_convenience
build_libtool_libs=no
;;
*)
oldobjs="$old_deplibs $non_pic_objects"
$preload && test -f "$symfileobj" \
&& func_append oldobjs " $symfileobj"
addlibs=$old_convenience
;;
esac
if test -n "$addlibs"; then
gentop=$output_objdir/${outputname}x
func_append generated " $gentop"
func_extract_archives $gentop $addlibs
func_append oldobjs " $func_extract_archives_result"
fi
# Do each command in the archive commands.
if test -n "$old_archive_from_new_cmds" && test yes = "$build_libtool_libs"; then
cmds=$old_archive_from_new_cmds
else
# Add any objects from preloaded convenience libraries
if test -n "$dlprefiles"; then
gentop=$output_objdir/${outputname}x
func_append generated " $gentop"
func_extract_archives $gentop $dlprefiles
func_append oldobjs " $func_extract_archives_result"
fi
# POSIX demands no paths to be encoded in archives. We have
# to avoid creating archives with duplicate basenames if we
# might have to extract them afterwards, e.g., when creating a
# static archive out of a convenience library, or when linking
# the entirety of a libtool archive into another (currently
# not supported by libtool).
if (for obj in $oldobjs
do
func_basename "$obj"
$ECHO "$func_basename_result"
done | sort | sort -uc >/dev/null 2>&1); then
:
else
echo "copying selected object files to avoid basename conflicts..."
gentop=$output_objdir/${outputname}x
func_append generated " $gentop"
func_mkdir_p "$gentop"
save_oldobjs=$oldobjs
oldobjs=
counter=1
for obj in $save_oldobjs
do
func_basename "$obj"
objbase=$func_basename_result
case " $oldobjs " in
" ") oldobjs=$obj ;;
*[\ /]"$objbase "*)
while :; do
# Make sure we don't pick an alternate name that also
# overlaps.
newobj=lt$counter-$objbase
func_arith $counter + 1
counter=$func_arith_result
case " $oldobjs " in
*[\ /]"$newobj "*) ;;
*) if test ! -f "$gentop/$newobj"; then break; fi ;;
esac
done
func_show_eval "ln $obj $gentop/$newobj || cp $obj $gentop/$newobj"
func_append oldobjs " $gentop/$newobj"
;;
*) func_append oldobjs " $obj" ;;
esac
done
fi
func_to_tool_file "$oldlib" func_convert_file_msys_to_w32
tool_oldlib=$func_to_tool_file_result
eval cmds=\"$old_archive_cmds\"
func_len " $cmds"
len=$func_len_result
if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then
cmds=$old_archive_cmds
elif test -n "$archiver_list_spec"; then
func_verbose "using command file archive linking..."
for obj in $oldobjs
do
func_to_tool_file "$obj"
$ECHO "$func_to_tool_file_result"
done > $output_objdir/$libname.libcmd
func_to_tool_file "$output_objdir/$libname.libcmd"
oldobjs=" $archiver_list_spec$func_to_tool_file_result"
cmds=$old_archive_cmds
else
# the command line is too long to link in one step, link in parts
func_verbose "using piecewise archive linking..."
save_RANLIB=$RANLIB
RANLIB=:
objlist=
concat_cmds=
save_oldobjs=$oldobjs
oldobjs=
# Is there a better way of finding the last object in the list?
for obj in $save_oldobjs
do
last_oldobj=$obj
done
eval test_cmds=\"$old_archive_cmds\"
func_len " $test_cmds"
len0=$func_len_result
len=$len0
for obj in $save_oldobjs
do
func_len " $obj"
func_arith $len + $func_len_result
len=$func_arith_result
func_append objlist " $obj"
if test "$len" -lt "$max_cmd_len"; then
:
else
# the above command should be used before it gets too long
oldobjs=$objlist
if test "$obj" = "$last_oldobj"; then
RANLIB=$save_RANLIB
fi
test -z "$concat_cmds" || concat_cmds=$concat_cmds~
eval concat_cmds=\"\$concat_cmds$old_archive_cmds\"
objlist=
len=$len0
fi
done
RANLIB=$save_RANLIB
oldobjs=$objlist
if test -z "$oldobjs"; then
eval cmds=\"\$concat_cmds\"
else
eval cmds=\"\$concat_cmds~\$old_archive_cmds\"
fi
fi
fi
func_execute_cmds "$cmds" 'exit $?'
done
test -n "$generated" && \
func_show_eval "${RM}r$generated"
# Now create the libtool archive.
case $output in
*.la)
old_library=
test yes = "$build_old_libs" && old_library=$libname.$libext
func_verbose "creating $output"
# Preserve any variables that may affect compiler behavior
for var in $variables_saved_for_relink; do
if eval test -z \"\${$var+set}\"; then
relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command"
elif eval var_value=\$$var; test -z "$var_value"; then
relink_command="$var=; export $var; $relink_command"
else
func_quote_for_eval "$var_value"
relink_command="$var=$func_quote_for_eval_result; export $var; $relink_command"
fi
done
# Quote the link command for shipping.
relink_command="(cd `pwd`; $SHELL \"$progpath\" $preserve_args --mode=relink $libtool_args @inst_prefix_dir@)"
relink_command=`$ECHO "$relink_command" | $SED "$sed_quote_subst"`
if test yes = "$hardcode_automatic"; then
relink_command=
fi
# Only create the output if not a dry run.
$opt_dry_run || {
for installed in no yes; do
if test yes = "$installed"; then
if test -z "$install_libdir"; then
break
fi
output=$output_objdir/${outputname}i
# Replace all uninstalled libtool libraries with the installed ones
newdependency_libs=
for deplib in $dependency_libs; do
case $deplib in
*.la)
func_basename "$deplib"
name=$func_basename_result
func_resolve_sysroot "$deplib"
eval libdir=`$SED -n -e 's/^libdir=\(.*\)$/\1/p' $func_resolve_sysroot_result`
test -z "$libdir" && \
func_fatal_error "'$deplib' is not a valid libtool archive"
func_append newdependency_libs " ${lt_sysroot:+=}$libdir/$name"
;;
-L*)
func_stripname -L '' "$deplib"
func_replace_sysroot "$func_stripname_result"
func_append newdependency_libs " -L$func_replace_sysroot_result"
;;
-R*)
func_stripname -R '' "$deplib"
func_replace_sysroot "$func_stripname_result"
func_append newdependency_libs " -R$func_replace_sysroot_result"
;;
*) func_append newdependency_libs " $deplib" ;;
esac
done
dependency_libs=$newdependency_libs
newdlfiles=
for lib in $dlfiles; do
case $lib in
*.la)
func_basename "$lib"
name=$func_basename_result
eval libdir=`$SED -n -e 's/^libdir=\(.*\)$/\1/p' $lib`
test -z "$libdir" && \
func_fatal_error "'$lib' is not a valid libtool archive"
func_append newdlfiles " ${lt_sysroot:+=}$libdir/$name"
;;
*) func_append newdlfiles " $lib" ;;
esac
done
dlfiles=$newdlfiles
newdlprefiles=
for lib in $dlprefiles; do
case $lib in
*.la)
# Only pass preopened files to the pseudo-archive (for
# eventual linking with the app. that links it) if we
# didn't already link the preopened objects directly into
# the library:
func_basename "$lib"
name=$func_basename_result
eval libdir=`$SED -n -e 's/^libdir=\(.*\)$/\1/p' $lib`
test -z "$libdir" && \
func_fatal_error "'$lib' is not a valid libtool archive"
func_append newdlprefiles " ${lt_sysroot:+=}$libdir/$name"
;;
esac
done
dlprefiles=$newdlprefiles
else
newdlfiles=
for lib in $dlfiles; do
case $lib in
[\\/]* | [A-Za-z]:[\\/]*) abs=$lib ;;
*) abs=`pwd`"/$lib" ;;
esac
func_append newdlfiles " $abs"
done
dlfiles=$newdlfiles
newdlprefiles=
for lib in $dlprefiles; do
case $lib in
[\\/]* | [A-Za-z]:[\\/]*) abs=$lib ;;
*) abs=`pwd`"/$lib" ;;
esac
func_append newdlprefiles " $abs"
done
dlprefiles=$newdlprefiles
fi
$RM $output
# place dlname in correct position for cygwin
# In fact, it would be nice if we could use this code for all target
# systems that can't hard-code library paths into their executables
# and that have no shared library path variable independent of PATH,
# but it turns out we can't easily determine that from inspecting
# libtool variables, so we have to hard-code the OSs to which it
# applies here; at the moment, that means platforms that use the PE
# object format with DLL files. See the long comment at the top of
# tests/bindir.at for full details.
tdlname=$dlname
case $host,$output,$installed,$module,$dlname in
*cygwin*,*lai,yes,no,*.dll | *mingw*,*lai,yes,no,*.dll | *cegcc*,*lai,yes,no,*.dll)
# If a -bindir argument was supplied, place the dll there.
if test -n "$bindir"; then
func_relative_path "$install_libdir" "$bindir"
tdlname=$func_relative_path_result/$dlname
else
# Otherwise fall back on heuristic.
tdlname=../bin/$dlname
fi
;;
esac
$ECHO > $output "\
# $outputname - a libtool library file
# Generated by $PROGRAM (GNU $PACKAGE) $VERSION
#
# Please DO NOT delete this file!
# It is necessary for linking the library.
# The name that we can dlopen(3).
dlname='$tdlname'
# Names of this library.
library_names='$library_names'
# The name of the static archive.
old_library='$old_library'
# Linker flags that cannot go in dependency_libs.
inherited_linker_flags='$new_inherited_linker_flags'
# Libraries that this one depends upon.
dependency_libs='$dependency_libs'
# Names of additional weak libraries provided by this library
weak_library_names='$weak_libs'
# Version information for $libname.
current=$current
age=$age
revision=$revision
# Is this an already installed library?
installed=$installed
# Should we warn about portability when linking against -modules?
shouldnotlink=$module
# Files to dlopen/dlpreopen
dlopen='$dlfiles'
dlpreopen='$dlprefiles'
# Directory that this library needs to be installed in:
libdir='$install_libdir'"
if test no,yes = "$installed,$need_relink"; then
$ECHO >> $output "\
relink_command=\"$relink_command\""
fi
done
}
# Do a symbolic link so that the libtool archive can be found in
# LD_LIBRARY_PATH before the program is installed.
func_show_eval '( cd "$output_objdir" && $RM "$outputname" && $LN_S "../$outputname" "$outputname" )' 'exit $?'
;;
esac
exit $EXIT_SUCCESS
}
if test link = "$opt_mode" || test relink = "$opt_mode"; then
func_mode_link ${1+"$@"}
fi
# func_mode_uninstall arg...
func_mode_uninstall ()
{
$debug_cmd
RM=$nonopt
files=
rmforce=false
exit_status=0
# This variable tells wrapper scripts just to set variables rather
# than running their programs.
libtool_install_magic=$magic
for arg
do
case $arg in
-f) func_append RM " $arg"; rmforce=: ;;
-*) func_append RM " $arg" ;;
*) func_append files " $arg" ;;
esac
done
test -z "$RM" && \
func_fatal_help "you must specify an RM program"
rmdirs=
for file in $files; do
func_dirname "$file" "" "."
dir=$func_dirname_result
if test . = "$dir"; then
odir=$objdir
else
odir=$dir/$objdir
fi
func_basename "$file"
name=$func_basename_result
test uninstall = "$opt_mode" && odir=$dir
# Remember odir for removal later, being careful to avoid duplicates
if test clean = "$opt_mode"; then
case " $rmdirs " in
*" $odir "*) ;;
*) func_append rmdirs " $odir" ;;
esac
fi
# Don't error if the file doesn't exist and rm -f was used.
if { test -L "$file"; } >/dev/null 2>&1 ||
{ test -h "$file"; } >/dev/null 2>&1 ||
test -f "$file"; then
:
elif test -d "$file"; then
exit_status=1
continue
elif $rmforce; then
continue
fi
rmfiles=$file
case $name in
*.la)
# Possibly a libtool archive, so verify it.
if func_lalib_p "$file"; then
func_source $dir/$name
# Delete the libtool libraries and symlinks.
for n in $library_names; do
func_append rmfiles " $odir/$n"
done
test -n "$old_library" && func_append rmfiles " $odir/$old_library"
case $opt_mode in
clean)
case " $library_names " in
*" $dlname "*) ;;
*) test -n "$dlname" && func_append rmfiles " $odir/$dlname" ;;
esac
test -n "$libdir" && func_append rmfiles " $odir/$name $odir/${name}i"
;;
uninstall)
if test -n "$library_names"; then
# Do each command in the postuninstall commands.
func_execute_cmds "$postuninstall_cmds" '$rmforce || exit_status=1'
fi
if test -n "$old_library"; then
# Do each command in the old_postuninstall commands.
func_execute_cmds "$old_postuninstall_cmds" '$rmforce || exit_status=1'
fi
# FIXME: should reinstall the best remaining shared library.
;;
esac
fi
;;
*.lo)
# Possibly a libtool object, so verify it.
if func_lalib_p "$file"; then
# Read the .lo file
func_source $dir/$name
# Add PIC object to the list of files to remove.
if test -n "$pic_object" && test none != "$pic_object"; then
func_append rmfiles " $dir/$pic_object"
fi
# Add non-PIC object to the list of files to remove.
if test -n "$non_pic_object" && test none != "$non_pic_object"; then
func_append rmfiles " $dir/$non_pic_object"
fi
fi
;;
*)
if test clean = "$opt_mode"; then
noexename=$name
case $file in
*.exe)
func_stripname '' '.exe' "$file"
file=$func_stripname_result
func_stripname '' '.exe' "$name"
noexename=$func_stripname_result
# $file with .exe has already been added to rmfiles,
# add $file without .exe
func_append rmfiles " $file"
;;
esac
# Do a test to see if this is a libtool program.
if func_ltwrapper_p "$file"; then
if func_ltwrapper_executable_p "$file"; then
func_ltwrapper_scriptname "$file"
relink_command=
func_source $func_ltwrapper_scriptname_result
func_append rmfiles " $func_ltwrapper_scriptname_result"
else
relink_command=
func_source $dir/$noexename
fi
# note $name still contains .exe if it was in $file originally
# as does the version of $file that was added into $rmfiles
func_append rmfiles " $odir/$name $odir/${name}S.$objext"
if test yes = "$fast_install" && test -n "$relink_command"; then
func_append rmfiles " $odir/lt-$name"
fi
if test "X$noexename" != "X$name"; then
func_append rmfiles " $odir/lt-$noexename.c"
fi
fi
fi
;;
esac
func_show_eval "$RM $rmfiles" 'exit_status=1'
done
# Try to remove the $objdir's in the directories where we deleted files
for dir in $rmdirs; do
if test -d "$dir"; then
func_show_eval "rmdir $dir >/dev/null 2>&1"
fi
done
exit $exit_status
}
if test uninstall = "$opt_mode" || test clean = "$opt_mode"; then
func_mode_uninstall ${1+"$@"}
fi
test -z "$opt_mode" && {
help=$generic_help
func_fatal_help "you must specify a MODE"
}
test -z "$exec_cmd" && \
func_fatal_help "invalid operation mode '$opt_mode'"
if test -n "$exec_cmd"; then
eval exec "$exec_cmd"
exit $EXIT_FAILURE
fi
exit $exit_status
# The TAGs below are defined such that we never get into a situation
# where we disable both kinds of libraries. Given conflicting
# choices, we go for a static library, that is the most portable,
# since we can't tell whether shared libraries were disabled because
# the user asked for that or because the platform doesn't support
# them. This is particularly important on AIX, because we don't
# support having both static and shared libraries enabled at the same
# time on that platform, so we default to a shared-only configuration.
# If a disable-shared tag is given, we'll fallback to a static-only
# configuration. But we'll never go from static-only to shared-only.
# ### BEGIN LIBTOOL TAG CONFIG: disable-shared
build_libtool_libs=no
build_old_libs=yes
# ### END LIBTOOL TAG CONFIG: disable-shared
# ### BEGIN LIBTOOL TAG CONFIG: disable-static
build_old_libs=`case $build_libtool_libs in yes) echo no;; *) echo yes;; esac`
# ### END LIBTOOL TAG CONFIG: disable-static
# Local Variables:
# mode:shell-script
# sh-indentation:2
# End:
|
package com.company.example.hazelcast.repository.utils;
import java.io.Serializable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import com.fasterxml.jackson.databind.ObjectMapper;
@Component
public class JsonUtils
implements Serializable {
private static final long serialVersionUID = 1L;
private static final Logger LOGGER = LoggerFactory.getLogger(JsonUtils.class);
@Autowired
private ObjectMapper objectMapperCamelCase;
public String toJson(Object object) {
try {
return this.objectMapperCamelCase.writeValueAsString(object);
} catch (Exception e) {
LOGGER.error("Error al serializar la clase={} : {}", object.getClass(), object);
return null;
}
}
public <T> T toObject(String json, Class<T> clazz) {
try {
T object = this.objectMapperCamelCase.readValue(json, clazz);
return object;
} catch (Exception e) {
LOGGER.error("Error al deserializar el json {} a la clase={} : {} {}", json, clazz, e);
return null;
}
}
}
|
class ParameterConfig:
INTEGER = 5
DECIMAL = 6
STRING = 7
TOKEN = 8
BINARY = 9
BOOLEAN = 10
LIST = 11
INNER_LIST = 12
def __init__(self):
self.parameters = {}
def set_parameter(self, name, value):
self.parameters[name] = value
def get_parameter(self, name):
return self.parameters.get(name, None) |
<gh_stars>1-10
package main
import (
"fmt"
"net/http"
"os"
log "github.com/sirupsen/logrus"
)
func main() {
// Log as JSON instead of the default ASCII formatter.
log.SetFormatter(&log.JSONFormatter{})
// Output to stdout instead of the default stderr
// Can be any io.Writer, see below for File example
log.SetOutput(os.Stdout)
// Only log the warning severity or above.
log.SetLevel(log.InfoLevel)
m := map[string]*GameHolder{
"test": &GameHolder{g: NewGame("test")},
}
server := &Server{
Server: http.Server{
Addr: ":80",
},
}
if err := server.Start(m); err != nil {
fmt.Fprintf(os.Stderr, "error: %s\n", err)
}
}
|
#!/bin/bash
rm ./emojione.json
ls ./src/emoji-menu/emoji.js
curl https://raw.githubusercontent.com/emojione/emojione/master/emoji.json > emojione.json
node parse-emoji.js > src/emoji-menu/emoji.js
node_modules/.bin/prettier src/emoji-menu/emoji.js --write --single-quote
echo Downloaded and parsed emojione.json!
|
#! /bin/sh
# Copyright (C) 1998-2014 Free Software Foundation, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Another sources-in-conditional test. Report from Tim Goodwin.
. test-init.sh
cat >> configure.ac << 'END'
AC_SUBST([CC], [false])
AC_SUBST([OBJEXT], [o])
AM_CONDITIONAL([ONE], [test "x$CONDITION1" = "xtrue"])
AM_CONDITIONAL([TWO], [test "x$CONDITION2" = "xtrue"])
AC_OUTPUT
END
cat > Makefile.am << 'END'
bin_PROGRAMS = targ
if ONE
OPT1 = one.c
endif
if TWO
OPT2 = two.c
endif
targ_SOURCES = main.c $(OPT1) $(OPT2)
.PHONY: test
test:
is $(exp) == $(targ_OBJECTS)
END
$ACLOCAL
$AUTOMAKE -i
# We should not output useless definitions.
grep '^@ONE_FALSE@' Makefile.in && exit 1
grep '^@TWO_FALSE@' Makefile.in && exit 1
$AUTOCONF
CONDITION1=true CONDITION2=true ./configure
$MAKE test exp='main.o one.o two.o'
CONDITION1=true CONDITION2=false ./configure
$MAKE test exp='main.o one.o'
CONDITION1=false CONDITION2=true ./configure
$MAKE test exp='main.o two.o'
CONDITION1=false CONDITION2=false ./configure
$MAKE test exp='main.o'
:
|
#include "TTFKerningSubtable.h"
BF::TTFKerningSubtable::TTFKerningSubtable()
{
Version = 0;
Length = 0;
Horizontal = 0;
Minimum = 0;
Cross = 0;
Override = 0;
Reserved = 0;
Format = 0;
}
void BF::TTFKerningSubtable::ParseCoverageValue(unsigned short coverage)
{
Horizontal = coverage & 0b0000000000000001;
Minimum = (coverage & 0b0000000000000010) >> 1;
Cross = (coverage & 0b0000000000000100) >> 2;
Override = (coverage & 0b0000000000001000) >> 3;
Reserved = (coverage & 0b0000000011110000) >> 4;
Format = (coverage & 0b1111111100000000) >> 8;
} |
#!/bin/bash -e
function printUsage
{
echo ""
echo "Usage:"
echo " $0 -i id_rsa -m 192.168.102.34 -u azureuser -o validation.json"
echo ""
echo "Options:"
echo " -u, --user User name associated to the identifity-file"
echo " -i, --identity-file RSA private key tied to the public key used to create the Kubernetes cluster (usually named 'id_rsa')"
echo " -m, --vmd-host The DVM's public IP or FQDN (host name starts with 'vmd-')"
echo " -o, --output-file Output file"
echo " -h, --help Print the command usage"
exit 1
}
if [ "$#" -eq 0 ]
then
printUsage
fi
# Handle named parameters
while [[ "$#" -gt 0 ]]
do
case $1 in
-i|--identity-file)
IDENTITYFILE="$2"
shift 2
;;
-m|--vmd-host)
DVM_HOST="$2"
shift 2
;;
-u|--user)
USER="$2"
shift 2
;;
-a|--app-ip)
APP_IP="$2"
shift 2
;;
-o|--output-file)
OUTPUT_SUMMARYFILE="$2"
shift 2
;;
-h|--help)
printUsage
;;
*)
echo ""
echo "[ERR] Incorrect option $1"
printUsage
;;
esac
done
# Validate input
if [ -z "$USER" ]
then
echo ""
echo "[ERR] --user is required"
printUsage
fi
if [ -z "$IDENTITYFILE" ]
then
echo ""
echo "[ERR] --identity-file is required"
printUsage
fi
if [ -z "$DVM_HOST" ]
then
echo ""
echo "[ERR] --vmd-host should be provided"
printUsage
fi
if [ ! -f $IDENTITYFILE ]
then
echo ""
echo "[ERR] identity-file not found at $IDENTITYFILE"
printUsage
exit 1
else
cat $IDENTITYFILE | grep -q "BEGIN \(RSA\|OPENSSH\) PRIVATE KEY" \
|| { echo "The identity file $IDENTITYFILE is not a RSA Private Key file."; echo "A RSA private key file starts with '-----BEGIN [RSA|OPENSSH] PRIVATE KEY-----''"; exit 1; }
fi
# Print user input
echo ""
echo "user: $USER"
echo "identity-file: $IDENTITYFILE"
echo "vmd-host: $DVM_HOST"
echo ""
OUTPUT_FOLDER=$(dirname $OUTPUT_SUMMARYFILE)
LOG_FILENAME="$OUTPUT_FOLDER/mongo_availability_validation.log"
AVAILABILITY_FILENAME="$OUTPUT_FOLDER/mongo_availability.log"
MONGODB_ERROR_COUNT=10
{
ROOT_PATH=/home/$USER
scp -q -i $IDENTITYFILE $USER@$DVM_HOST:$ROOT_PATH/mongo_availability_logs $AVAILABILITY_FILENAME
MONGO_CONNECTIVITY_ERROR=$(ssh -t -i $IDENTITYFILE $USER@$DVM_HOST "grep -c \"Error: couldn't connect to server\" $ROOT_PATH/mongo_availability_logs")
if [[ "$MONGO_CONNECTIVITY_ERROR" -gt "$MONGODB_ERROR_COUNT" ]]; then
printf '{"result":"fail","error":"%s"}\n' "$MONGO_CONNECTIVITY_ERROR is greater than $MONGODB_ERROR_COUNT threshold."> $OUTPUT_SUMMARYFILE
else
printf '{"result":"%s"}\n' "pass" > $OUTPUT_SUMMARYFILE
fi
} 2>&1 | tee $LOG_FILENAME |
export enum EVENTS {
// Invoke
OPEN_FILE,
REC_AUDIO,
SAVE_FILE,
OPEN_IMPORT_FILE,
OPEN_DIST_FOLDER,
DOWNLOAD_INFO,
DOWNLOAD_FILE,
EXEC_PRELOAD,
EXEC_DOWNLOAD,
OPEN_FILE_IN_DIR,
EXEC_PAUSE,
// On
REPLY_OPEN_FILE,
REPLY_REC_AUDIO,
REPLY_SAVE_FILE,
REPLY_OPEN_IMPORT_FILE,
REPLY_OPEN_DIST_FOLDER,
REPLY_DOWNLOAD_INFO,
REPLY_DOWNLOAD_FILE,
REPLY_EXEC_PRELOAD,
REPLY_EXEC_DOWNLOAD,
REPLY_OPEN_FILE_IN_DIR,
REPLY_EXEC_PAUSE,
}
|
<filename>src/Client_Service1.java
import java.io.BufferedReader;
import java.io.File;
import java.io.InputStreamReader;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Map;
public class Client_Service1 extends Client{
int mport = 7777; // Meta server- getting block
int mportu = 5555; //Meta server- updating C-table
static String Host_Client = "localhost";
protected static int Port_Client;
ServerSocket cachelisten;
public static Map<String, Object> mapblk = new HashMap<String, Object>(); // blk->Copclient
public ArrayList<Object> hport11;
public Client_Service1() {
//boolean cdir = (new File("/DHPFS/Client")).mkdir();
arraybyt1 = new byte[buff_size];
/*try {
cachelisten = new ServerSocket(Port_Client);
System.out.println ("Listening on port for cache req: " + cachelisten.getLocalPort());
} catch(Exception e) {System.out.println("Client cache blk listen::"+ e.getMessage());}*/
}
public void GetBlock(String blkname){
//connect to meta server write blkname
//get storage serv locn
//connect to storage server, read blk
//Read blk from storageB and map blk to coop client
//update meta_service , update c-table
//listen on port to get req from other clients
try {
//Connect to Meta-server
Socket clnt = new Socket(hostname, mport);
PrintWriter mout = new PrintWriter(clnt.getOutputStream (),true);
//Cannot perform read and write both on socket simulty
System.out.println("\nEstablishing connection with Meta server on port: " + mport);
System.out.println("Client:" + clnt);
//Write to socket
System.out.println("\n------------------Getting Block location from Meta Server-------------------->");
System.out.println("\nClient getMetadata() from Meta-Server :-->" + blkname);
mout.write(blkname);
mout.close();
//Get block location from meta server
Socket clnt5 = new Socket(hostname, mport);
ObjectInputStream ooi = new ObjectInputStream(clnt5.getInputStream());
HashMap<String,ArrayList<Object>> bport7 = (HashMap) ooi.readObject();
System.out.println("\n\nReceived block location from Meta_service:" + bport7.get(blkname) );
//System.out.println("host/port value:" + bport7.get(blkname).get(0)+ ":::" + bport7.get(blkname).get(1));
//System.out.println("\n Array list::" + bport7.get(blkname).size());
//??check if array list as 3 - read unique blkid else 2 - host/port
// if 3 - write blkid instead of blkname and call cacheblk(blkid)
//if 2 - create blkid (blkname+port) and call cacheblk(blkid)
//Connect to storage server to read block
if(bport7.get(blkname).size() == 2) {
String hosts = bport7.get(blkname).get(0).toString();
int ports = Integer.parseInt(bport7.get(blkname).get(1).toString());
Client obj7 = new Client();
System.out.println("\n\n Client connecting to storage server to read blocks...");
obj7.ConnectToStorageserverSIO(blkname, hosts, ports);
// If blk is read then only cache else exit
clnt5.close();
ooi.close();
//Create a unique blkid and then Cache Blocks
String blkid = blkname+ports;
Cache_Block(blkname, blkid);
}
//Connect to Client cooperative cache
else if(bport7.get(blkname).size() == 3) {
String blkid = bport7.get(blkname).get(0).toString();
String hosts = bport7.get(blkname).get(1).toString();
int ports = Integer.parseInt(bport7.get(blkname).get(2).toString());
Client obj7 = new Client();
System.out.println("\n\n Client connecting to Client cooperative cache to read blocks...");
long start1 = System.currentTimeMillis();
obj7.ConnectToStorageserverSIO(blkid, hosts, ports);
long end1 = System.currentTimeMillis();
System.out.println("\n*******Block access time (Client -> Cooperative cache)::::" + (end1-start1));
// If blk is read then only cache else exit
clnt5.close();
ooi.close();
//Create a unique blkid and then Cache Blocks
//String blkid = blkname+ports;
//** not needed when performing experiments to read blocks
//String blkid1 = blkid+ports;
//Cache_Block(blkname,blkid1);
}
}catch(Exception e){ System.out.println("client service Exception:" +e.getMessage());}
}
public void Cache_Block(String blkname,String blkid) {
try {
//Create tuple and Put Blocks in memory (Call Blkcache)
Blkcache cblk = new Blkcache();
System.out.println("Putting blocks in memory::::" + blkid);
cblk.PutBlk(blkid);
// Map cache blk
hport11 = new ArrayList<Object>();
//?? add blkid
hport11.add(blkid);
hport11.add(Host_Client);
hport11.add(Port_Client);
System.out.println("Client host/port::" + hport11);
// ??Mapping blk to cop-client //** It should be unique blkid
// It should mapblk to put blkname -> <blkID, host,port>
// Also Mrublk should add blkname instead of ID, since its written to Meta
mapblk.put(blkname, hport11);
LinkedList<String> Mrublk2 = new LinkedList<String>(cblk.Mrublk);
//Update Meta server or C-table
Socket clntU = new Socket(hostname, mportu);
//PrintWriter mout22 = new PrintWriter(clntU.getOutputStream (),true);
System.out.println("\nEstablishing connection with Meta server to update C-table on port: " + mportu);
ObjectOutputStream objw = new ObjectOutputStream(clntU.getOutputStream());
System.out.println("\n\n Mapblks and Mrublk list:::" + Mrublk2 + mapblk );
objw.writeObject(Mrublk2);
objw.writeObject(mapblk);
clntU.close();
objw.close();
//Listen on port to get cache client req
Client_Service1 objcc = new Client_Service1();
//objcc.run();
}catch(Exception e){ System.out.println("Exception:" +e.getMessage());}
}
/*public void ClientReq_Cache(){
try {
cachelisten = new ServerSocket(Port_Client);
System.out.println ("Listening on port for cache req: " + cachelisten.getLocalPort());
for(;;){
//Accept client connection for cache blk req
Socket cacsock = cachelisten.accept();
BufferedReader iread = new BufferedReader(new InputStreamReader(cacsock.getInputStream()));
String blk = iread.readLine();
System.out.println("\nClient requesting for cache block::" + blk);
iread.close();
cacsock.close();
Blkcache cblk = new Blkcache();
if(cblk.mblk.containsKey(blk)) {
System.out.println("\n Cache blk exists in copclient::" + blk);
Socket cacs2 = cachelisten.accept();
OutputStream cacos = cacs2.getOutputStream();
cacos.write(cblk.GetBlk(blk));
}
else {
System.out.println("Cache blk expired / does not exist XX");
}
}
} catch(Exception e) {System.out.println("Client cache blk listen::"+ e.getMessage());}
}
public void run() {
ClientReq_Cache();
}*/
public static void main(String args[]) {
//Coop client port that listen
Port_Client = Integer.parseInt(args[0]);
/*
While performing experiments, to test client cooperative cache:
after client1 read blk from storage server, put them in client cooperative cache.
*wait for few seconds, before running client2 which try to access blk from cache
*/
// get Meta-data
Client_Service1 obj = new Client_Service1();
String blkname = args[1];
obj.GetBlock(blkname);
}
}
|
#!/bin/sh
# With POSIXLY_CORRECT, id must not print context=...
# Copyright (C) 2009-2015 Free Software Foundation, Inc.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
. "${srcdir=.}/tests/init.sh"; path_prepend_ ./src
print_ver_ id
# We don't need selinux *FS* support to test id,
# but this is as good a witness as any, in general.
require_selinux_
# Require the context=... part by default.
id > out || fail=1
grep context= out || fail=1
# Require no context=... part in conforming mode.
POSIXLY_CORRECT=1 id > out || fail=1
grep context= out && fail=1
Exit $fail
|
def minMax(arr):
# set the initial min and max values
min = float("inf")
max = float("-inf")
# find the min and max
for el in arr:
if el < min:
min = el
if el > max:
max = el
return [min, max]
print(minMax([5, 1, -2, 7, 4, 12])) |
#!/bin/bash
# Module specific variables go here
# Files: file=/path/to/file
# Arrays: declare -a array_name
# Strings: foo="bar"
# Integers: x=9
###############################################
# Bootstrapping environment setup
###############################################
# Get our working directory
cwd="$(pwd)"
# Define our bootstrapper location
bootstrap="${cwd}/tools/bootstrap.sh"
# Bail if it cannot be found
if [ ! -f ${bootstrap} ]; then
echo "Unable to locate bootstrap; ${bootstrap}" && exit 1
fi
# Load our bootstrap
source ${bootstrap}
###############################################
# Metrics start
###############################################
# Get EPOCH
s_epoch="$(gen_epoch)"
# Create a timestamp
timestamp="$(gen_date)"
# Whos is calling? 0 = singular, 1 is as group
caller=$(ps $PPID | grep -c stigadm)
###############################################
# Perform restoration
###############################################
# If ${restore} = 1 go to restoration mode
if [ ${restore} -eq 1 ]; then
report "Not yet implemented" && exit 1
fi
###############################################
# STIG validation/remediation
###############################################
# Module specific validation code should go here
# Errors should go in ${errors[@]} array (which on remediation get handled)
# All inspected items should go in ${inspected[@]} array
errors=("${stigid}")
# If ${change} = 1
#if [ ${change} -eq 1 ]; then
# Create the backup env
#backup_setup_env "${backup_path}"
# Create a backup (configuration output, file/folde permissions output etc
#bu_configuration "${backup_path}" "${author}" "${stigid}" "$(echo "${array_values[@]}" | tr ' ' '\n')"
#bu_file "${backup_path}" "${author}" "${stigid}" "${file}"
#if [ $? -ne 0 ]; then
# Stop, we require a backup
#report "Unable to create backup" && exit 1
#fi
# Iterate ${errors[@]}
#for error in ${errors[@]}; do
# Work to remediate ${error} should go here
#done
#fi
# Remove dupes
#inspected=( $(remove_duplicates "${inspected[@]}") )
###############################################
# Results for printable report
###############################################
# If ${#errors[@]} > 0
if [ ${#errors[@]} -gt 0 ]; then
# Set ${results} error message
#results="Failed validation" UNCOMMENT ONCE WORK COMPLETE!
results="Not yet implemented!"
fi
# Set ${results} passed message
[ ${#errors[@]} -eq 0 ] && results="Passed validation"
###############################################
# Report generation specifics
###############################################
# Apply some values expected for report footer
[ ${#errors[@]} -eq 0 ] && passed=1 || passed=0
[ ${#errors[@]} -gt 0 ] && failed=1 || failed=0
# Calculate a percentage from applied modules & errors incurred
percentage=$(percent ${passed} ${failed})
# If the caller was only independant
if [ ${caller} -eq 0 ]; then
# Show failures
[ ${#errors[@]} -gt 0 ] && print_array ${log} "errors" "${errors[@]}"
# Provide detailed results to ${log}
if [ ${verbose} -eq 1 ]; then
# Print array of failed & validated items
[ ${#inspected[@]} -gt 0 ] && print_array ${log} "validated" "${inspected[@]}"
fi
# Generate the report
report "${results}"
# Display the report
cat ${log}
else
# Since we were called from stigadm
module_header "${results}"
# Show failures
[ ${#errors[@]} -gt 0 ] && print_array ${log} "errors" "${errors[@]}"
# Provide detailed results to ${log}
if [ ${verbose} -eq 1 ]; then
# Print array of failed & validated items
[ ${#inspected[@]} -gt 0 ] && print_array ${log} "validated" "${inspected[@]}"
fi
# Finish up the module specific report
module_footer
fi
###############################################
# Return code for larger report
###############################################
# Return an error/success code (0/1)
exit ${#errors[@]}
# Date: 2018-10-03
#
# Severity: CAT-II
# Classification: UNCLASSIFIED
# STIG_ID: V0022539
# STIG_Version: SV-63447r1
# Rule_ID: GEN007660
#
# OS: Oracle_Linux
# Version: 5
# Architecture:
#
# Title: The Bluetooth protocol handler must be disabled or not installed.
# Description: Bluetooth is a Personal Area Network (PAN) technology. Binding this protocol to the network stack increases the attack surface of the host. Unprivileged local processes may be able to cause the kernel to dynamically load a protocol handler by opening a socket using the protocol.
|
package controllers
import (
"context"
"encoding/json"
"fmt"
"net"
"net/http"
"net/http/httptest"
"reflect"
"testing"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/client-go/kubernetes/scheme"
ctrl "sigs.k8s.io/controller-runtime"
"sigs.k8s.io/controller-runtime/pkg/client"
"sigs.k8s.io/controller-runtime/pkg/client/fake"
"sigs.k8s.io/controller-runtime/pkg/reconcile"
powerv1alpha1 "gitlab.devtools.intel.com/OrchSW/CNO/power-operator.git/api/v1alpha1"
"gitlab.devtools.intel.com/OrchSW/CNO/power-operator.git/pkg/appqos"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
const (
PowerNodeNamespace = "default"
AppQoSAddress = "127.0.0.1:5000"
)
func createPowerNodeReconcilerObject(objs []runtime.Object) (*PowerNodeReconciler, error) {
s := scheme.Scheme
if err := powerv1alpha1.AddToScheme(s); err != nil {
return nil, err
}
s.AddKnownTypes(powerv1alpha1.GroupVersion)
cl := fake.NewFakeClient(objs...)
appqosCl := appqos.NewDefaultAppQoSClient()
r := &PowerNodeReconciler{Client: cl, Log: ctrl.Log.WithName("controllers").WithName("PowerProfile"), Scheme: s, AppQoSClient: appqosCl}
return r, nil
}
func createListeners(appqosPools []appqos.Pool) (*httptest.Server, error) {
var err error
newListener, err := net.Listen("tcp", "127.0.0.1:5000")
if err != nil {
return nil, fmt.Errorf("Failed to create Listerner: %v", err)
}
mux := http.NewServeMux()
mux.HandleFunc("/pools", (func(w http.ResponseWriter, r *http.Request) {
if r.Method == "GET" {
b, err := json.Marshal(appqosPools)
if err == nil {
fmt.Fprintln(w, string(b[:]))
}
}
}))
ts := httptest.NewUnstartedServer(mux)
ts.Listener.Close()
ts.Listener = newListener
// Start the server.
ts.Start()
return ts, nil
}
func TestPowerNodeReconciler(t *testing.T) {
tcases := []struct {
testCase string
powerNode *powerv1alpha1.PowerNode
pools map[string][]int
powerProfileList *powerv1alpha1.PowerProfileList
powerWorkloadList *powerv1alpha1.PowerWorkloadList
expectedActiveProfiles map[string]bool
expectedActiveWorkloads []powerv1alpha1.WorkloadInfo
expectedPowerContainers []powerv1alpha1.Container
expectedSharedPools []powerv1alpha1.SharedPoolInfo
}{
{
testCase: "Test Case 1",
powerNode: &powerv1alpha1.PowerNode{
ObjectMeta: metav1.ObjectMeta{
Name: "example-node1",
Namespace: PowerNodeNamespace,
},
Spec: powerv1alpha1.PowerNodeSpec{
NodeName: "example-node1",
},
},
pools: map[string][]int{
"Default": []int{4, 5, 6, 7, 8, 9},
},
powerProfileList: &powerv1alpha1.PowerProfileList{
Items: []powerv1alpha1.PowerProfile{
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance-example-node1",
Namespace: PowerNodeNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "performance-example-node1",
Epp: "performance",
},
},
},
},
powerWorkloadList: &powerv1alpha1.PowerWorkloadList{
Items: []powerv1alpha1.PowerWorkload{
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance-example-node1-workload",
Namespace: PowerNodeNamespace,
},
Spec: powerv1alpha1.PowerWorkloadSpec{
Name: "performance-workload",
Node: powerv1alpha1.NodeInfo{
Name: "example-node1",
Containers: []powerv1alpha1.Container{
{
Name: "example-container",
Id: "abcdefg",
Pod: "example-pod",
ExclusiveCPUs: []int{0, 1, 2, 3},
PowerProfile: "performance-example-node1",
Workload: "performance-example-node1-workload",
},
},
CpuIds: []int{0, 1, 2, 3},
},
PowerProfile: "performance-example-node1",
},
},
},
},
expectedActiveProfiles: map[string]bool{
"performance-example-node1": true,
},
expectedActiveWorkloads: []powerv1alpha1.WorkloadInfo{
{
Name: "performance-example-node1-workload",
CpuIds: []int{0, 1, 2, 3},
},
},
expectedPowerContainers: []powerv1alpha1.Container{
{
Name: "example-container",
Id: "abcdefg",
Pod: "example-pod",
ExclusiveCPUs: []int{0, 1, 2, 3},
PowerProfile: "performance-example-node1",
Workload: "performance-example-node1-workload",
},
},
expectedSharedPools: []powerv1alpha1.SharedPoolInfo{
{
Name: "Default",
SharedPoolCpuIds: []int{4, 5, 6, 7, 8, 9},
},
},
},
{
testCase: "Test Case 2",
powerNode: &powerv1alpha1.PowerNode{
ObjectMeta: metav1.ObjectMeta{
Name: "example-node2",
Namespace: PowerNodeNamespace,
},
Spec: powerv1alpha1.PowerNodeSpec{
NodeName: "example-node2",
},
},
pools: map[string][]int{
"Default": []int{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
},
powerProfileList: &powerv1alpha1.PowerProfileList{
Items: []powerv1alpha1.PowerProfile{
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance",
Namespace: PowerNodeNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "performance",
Epp: "performance",
},
},
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance-example-node2",
Namespace: PowerNodeNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "performance-example-node2",
Max: 3200,
Min: 2800,
Epp: "performance",
},
},
},
},
powerWorkloadList: &powerv1alpha1.PowerWorkloadList{},
expectedActiveProfiles: map[string]bool{
"performance-example-node2": false,
},
expectedSharedPools: []powerv1alpha1.SharedPoolInfo{
{
Name: "Default",
SharedPoolCpuIds: []int{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
},
},
},
{
testCase: "Test Case 3",
powerNode: &powerv1alpha1.PowerNode{
ObjectMeta: metav1.ObjectMeta{
Name: "example-node3",
Namespace: PowerNodeNamespace,
},
Spec: powerv1alpha1.PowerNodeSpec{
NodeName: "example-node3",
},
},
pools: map[string][]int{
"Default": []int{0, 1},
"Shared": []int{6, 7, 8, 9},
},
powerProfileList: &powerv1alpha1.PowerProfileList{
Items: []powerv1alpha1.PowerProfile{
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance",
Namespace: PowerNodeNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "performance",
Epp: "performance",
},
},
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance-example-node3",
Namespace: PowerNodeNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "performance-example-node3",
Max: 3200,
Min: 2800,
Epp: "performance",
},
},
},
},
powerWorkloadList: &powerv1alpha1.PowerWorkloadList{
Items: []powerv1alpha1.PowerWorkload{
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance-example-node3-workload",
Namespace: PowerNodeNamespace,
},
Spec: powerv1alpha1.PowerWorkloadSpec{
Name: "performance-workload",
Node: powerv1alpha1.NodeInfo{
Name: "example-node3",
Containers: []powerv1alpha1.Container{
{
Name: "example-container1",
Id: "abcdefg",
Pod: "example-pod",
ExclusiveCPUs: []int{2, 3},
PowerProfile: "performance-example-node3",
Workload: "performance-example-node3-workload",
},
{
Name: "example-container2",
Id: "hijklmop",
Pod: "example-pod",
ExclusiveCPUs: []int{4, 5},
PowerProfile: "performance-example-node3",
Workload: "performance-example-node3-workload",
},
},
CpuIds: []int{2, 3, 4, 5},
},
PowerProfile: "performance-example-node3",
},
},
},
},
expectedActiveProfiles: map[string]bool{
"performance-example-node3": true,
},
expectedActiveWorkloads: []powerv1alpha1.WorkloadInfo{
{
Name: "performance-example-node3-workload",
CpuIds: []int{2, 3, 4, 5},
},
},
expectedPowerContainers: []powerv1alpha1.Container{
{
Name: "example-container1",
Id: "abcdefg",
Pod: "example-pod",
ExclusiveCPUs: []int{2, 3},
PowerProfile: "performance-example-node3",
Workload: "performance-example-node3-workload",
},
{
Name: "example-container2",
Id: "hijklmop",
Pod: "example-pod",
ExclusiveCPUs: []int{4, 5},
PowerProfile: "performance-example-node3",
Workload: "performance-example-node3-workload",
},
},
expectedSharedPools: []powerv1alpha1.SharedPoolInfo{
{
Name: "Default",
SharedPoolCpuIds: []int{0, 1},
},
{
Name: "Shared",
SharedPoolCpuIds: []int{6, 7, 8, 9},
},
},
},
{
testCase: "Test Case 4",
powerNode: &powerv1alpha1.PowerNode{
ObjectMeta: metav1.ObjectMeta{
Name: "example-node4",
Namespace: PowerNodeNamespace,
},
Spec: powerv1alpha1.PowerNodeSpec{
NodeName: "example-node4",
},
},
pools: map[string][]int{
"Default": []int{0, 1},
"Shared": []int{6, 7, 8, 9},
},
powerProfileList: &powerv1alpha1.PowerProfileList{
Items: []powerv1alpha1.PowerProfile{
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance",
Namespace: PowerNodeNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "performance",
Epp: "performance",
},
},
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance-example-node4",
Namespace: PowerNodeNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "performance-example-node4",
Max: 3200,
Min: 2800,
Epp: "performance",
},
},
{
ObjectMeta: metav1.ObjectMeta{
Name: "balance-performance-example-node4",
Namespace: PowerNodeNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "balance-performance-example-node4",
Max: 2400,
Min: 2000,
Epp: "balance_performance",
},
},
},
},
powerWorkloadList: &powerv1alpha1.PowerWorkloadList{
Items: []powerv1alpha1.PowerWorkload{
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance-example-node4-workload",
Namespace: PowerNodeNamespace,
},
Spec: powerv1alpha1.PowerWorkloadSpec{
Name: "performance-workload",
Node: powerv1alpha1.NodeInfo{
Name: "example-node4",
Containers: []powerv1alpha1.Container{
{
Name: "example-container1",
Id: "abcdefg",
Pod: "example-pod",
ExclusiveCPUs: []int{2, 3},
PowerProfile: "performance-example-node4",
Workload: "performance-example-node4-workload",
},
{
Name: "example-container2",
Id: "hijklmop",
Pod: "example-pod",
ExclusiveCPUs: []int{4, 5},
PowerProfile: "performance-example-node4",
Workload: "performance-example-node4-workload",
},
},
CpuIds: []int{2, 3, 4, 5},
},
PowerProfile: "performance-example-node4",
},
},
},
},
expectedActiveProfiles: map[string]bool{
"performance-example-node4": true,
"balance-performance-example-node4": false,
},
expectedActiveWorkloads: []powerv1alpha1.WorkloadInfo{
{
Name: "performance-example-node4-workload",
CpuIds: []int{2, 3, 4, 5},
},
},
expectedPowerContainers: []powerv1alpha1.Container{
{
Name: "example-container1",
Id: "abcdefg",
Pod: "example-pod",
ExclusiveCPUs: []int{2, 3},
PowerProfile: "performance-example-node4",
Workload: "performance-example-node4-workload",
},
{
Name: "example-container2",
Id: "hijklmop",
Pod: "example-pod",
ExclusiveCPUs: []int{4, 5},
PowerProfile: "performance-example-node4",
Workload: "performance-example-node4-workload",
},
},
expectedSharedPools: []powerv1alpha1.SharedPoolInfo{
{
Name: "Default",
SharedPoolCpuIds: []int{0, 1},
},
{
Name: "Shared",
SharedPoolCpuIds: []int{6, 7, 8, 9},
},
},
},
{
testCase: "Test Case 5",
powerNode: &powerv1alpha1.PowerNode{
ObjectMeta: metav1.ObjectMeta{
Name: "example-node5",
Namespace: PowerNodeNamespace,
},
Spec: powerv1alpha1.PowerNodeSpec{
NodeName: "example-node5",
},
},
pools: map[string][]int{
"Default": []int{0, 1},
"Shared": []int{8, 9},
},
powerProfileList: &powerv1alpha1.PowerProfileList{
Items: []powerv1alpha1.PowerProfile{
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance",
Namespace: PowerNodeNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "performance",
Epp: "performance",
},
},
{
ObjectMeta: metav1.ObjectMeta{
Name: "balance-performance",
Namespace: PowerNodeNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "balance-performance",
Epp: "balance_performance",
},
},
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance-example-node5",
Namespace: PowerNodeNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "performance-example-node5",
Max: 3200,
Min: 2800,
Epp: "performance",
},
},
{
ObjectMeta: metav1.ObjectMeta{
Name: "balance-performance-example-node5",
Namespace: PowerNodeNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "balance-performance-example-node5",
Max: 2400,
Min: 2000,
Epp: "balance_performance",
},
},
},
},
powerWorkloadList: &powerv1alpha1.PowerWorkloadList{
Items: []powerv1alpha1.PowerWorkload{
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance-example-node5-workload",
Namespace: PowerNodeNamespace,
},
Spec: powerv1alpha1.PowerWorkloadSpec{
Name: "performance-workload",
Node: powerv1alpha1.NodeInfo{
Name: "example-node5",
Containers: []powerv1alpha1.Container{
{
Name: "example-container1",
Id: "abcdefg",
Pod: "example-pod",
ExclusiveCPUs: []int{2, 3},
PowerProfile: "performance-example-node5",
Workload: "performance-example-node5-workload",
},
{
Name: "example-container2",
Id: "hijklmop",
Pod: "example-pod",
ExclusiveCPUs: []int{4, 5},
PowerProfile: "performance-example-node5",
Workload: "performance-example-node5-workload",
},
},
CpuIds: []int{2, 3, 4, 5},
},
PowerProfile: "performance-example-node5",
},
},
{
ObjectMeta: metav1.ObjectMeta{
Name: "balance-performance-example-node5-workload",
Namespace: PowerNodeNamespace,
},
Spec: powerv1alpha1.PowerWorkloadSpec{
Name: "performance-workload",
Node: powerv1alpha1.NodeInfo{
Name: "example-node5",
Containers: []powerv1alpha1.Container{
{
Name: "example-container3",
Id: "xyz",
Pod: "example-pod2",
ExclusiveCPUs: []int{6, 7},
PowerProfile: "balance-performance-example-node5",
Workload: "balance-performance-example-node5-workload",
},
},
CpuIds: []int{6, 7},
},
PowerProfile: "balance-performance-example-node5",
},
},
},
},
expectedActiveProfiles: map[string]bool{
"balance-performance-example-node5": true,
"performance-example-node5": true,
},
expectedActiveWorkloads: []powerv1alpha1.WorkloadInfo{
{
Name: "balance-performance-example-node5-workload",
CpuIds: []int{6, 7},
},
{
Name: "performance-example-node5-workload",
CpuIds: []int{2, 3, 4, 5},
},
},
expectedPowerContainers: []powerv1alpha1.Container{
{
Name: "example-container3",
Id: "xyz",
Pod: "example-pod2",
ExclusiveCPUs: []int{6, 7},
PowerProfile: "balance-performance-example-node5",
Workload: "balance-performance-example-node5-workload",
},
{
Name: "example-container1",
Id: "abcdefg",
Pod: "example-pod",
ExclusiveCPUs: []int{2, 3},
PowerProfile: "performance-example-node5",
Workload: "performance-example-node5-workload",
},
{
Name: "example-container2",
Id: "hijklmop",
Pod: "example-pod",
ExclusiveCPUs: []int{4, 5},
PowerProfile: "performance-example-node5",
Workload: "performance-example-node5-workload",
},
},
expectedSharedPools: []powerv1alpha1.SharedPoolInfo{
{
Name: "Default",
SharedPoolCpuIds: []int{0, 1},
},
{
Name: "Shared",
SharedPoolCpuIds: []int{8, 9},
},
},
},
{
testCase: "Test Case 6",
powerNode: &powerv1alpha1.PowerNode{
ObjectMeta: metav1.ObjectMeta{
Name: "example-node6",
Namespace: PowerNodeNamespace,
},
Spec: powerv1alpha1.PowerNodeSpec{
NodeName: "example-node6",
},
},
pools: map[string][]int{
"Default": []int{0, 1},
"Shared": []int{6, 7, 8, 9},
},
powerProfileList: &powerv1alpha1.PowerProfileList{
Items: []powerv1alpha1.PowerProfile{
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance",
Namespace: PowerNodeNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "performance",
Epp: "performance",
},
},
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance-incorrect-node",
Namespace: PowerNodeNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "performance-inccorect-node",
Max: 3700,
Min: 3400,
Epp: "performance",
},
},
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance-example-node6",
Namespace: PowerNodeNamespace,
},
Spec: powerv1alpha1.PowerProfileSpec{
Name: "performance-example-node6",
Max: 3200,
Min: 2800,
Epp: "performance",
},
},
},
},
powerWorkloadList: &powerv1alpha1.PowerWorkloadList{
Items: []powerv1alpha1.PowerWorkload{
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance-example-node6-workload",
Namespace: PowerNodeNamespace,
},
Spec: powerv1alpha1.PowerWorkloadSpec{
Name: "performance-workload",
Node: powerv1alpha1.NodeInfo{
Name: "example-node6",
Containers: []powerv1alpha1.Container{
{
Name: "example-container1",
Id: "abcdefg",
Pod: "example-pod",
ExclusiveCPUs: []int{2, 3},
PowerProfile: "performance-example-node6",
Workload: "performance-example-node6-workload",
},
{
Name: "example-container2",
Id: "hijklmop",
Pod: "example-pod",
ExclusiveCPUs: []int{4, 5},
PowerProfile: "performance-example-node6",
Workload: "performance-example-node6-workload",
},
},
CpuIds: []int{2, 3, 4, 5},
},
PowerProfile: "performance-example-node5",
},
},
{
ObjectMeta: metav1.ObjectMeta{
Name: "performance-incorrect-node-workload",
Namespace: PowerNodeNamespace,
},
Spec: powerv1alpha1.PowerWorkloadSpec{
Name: "performance-incorrect-node-workload",
Node: powerv1alpha1.NodeInfo{
Name: "incorrect-node",
Containers: []powerv1alpha1.Container{
{
Name: "example-container3",
Id: "xyz",
Pod: "example-pod2",
ExclusiveCPUs: []int{2, 3},
PowerProfile: "performance-incorrect-node",
Workload: "performance-incorrect-node-workload",
},
},
CpuIds: []int{2, 3},
},
PowerProfile: "performance-incorrect-node",
},
},
},
},
expectedActiveProfiles: map[string]bool{
"performance-example-node6": true,
"performance-incorrect-node": false,
},
expectedActiveWorkloads: []powerv1alpha1.WorkloadInfo{
{
Name: "performance-example-node6-workload",
CpuIds: []int{2, 3, 4, 5},
},
},
expectedPowerContainers: []powerv1alpha1.Container{
{
Name: "example-container1",
Id: "abcdefg",
Pod: "example-pod",
ExclusiveCPUs: []int{2, 3},
PowerProfile: "performance-example-node6",
Workload: "performance-example-node6-workload",
},
{
Name: "example-container2",
Id: "hijklmop",
Pod: "example-pod",
ExclusiveCPUs: []int{4, 5},
PowerProfile: "performance-example-node6",
Workload: "performance-example-node6-workload",
},
},
expectedSharedPools: []powerv1alpha1.SharedPoolInfo{
{
Name: "Default",
SharedPoolCpuIds: []int{0, 1},
},
{
Name: "Shared",
SharedPoolCpuIds: []int{6, 7, 8, 9},
},
},
},
}
for _, tc := range tcases {
t.Setenv("NODE_NAME", tc.powerNode.Name)
AppQoSClientAddress = "http://127.0.0.1:5000"
appqosPools := make([]appqos.Pool, 0)
for name, cores := range tc.pools {
id := 1
// Necessary because of pointer...
newName := name
newCores := cores
pool := appqos.Pool{
Name: &newName,
ID: &id,
Cores: &newCores,
}
appqosPools = append(appqosPools, pool)
}
objs := make([]runtime.Object, 0)
objs = append(objs, tc.powerNode)
for i := range tc.powerProfileList.Items {
objs = append(objs, &tc.powerProfileList.Items[i])
}
for i := range tc.powerWorkloadList.Items {
objs = append(objs, &tc.powerWorkloadList.Items[i])
}
r, err := createPowerNodeReconcilerObject(objs)
if err != nil {
t.Error(err)
t.Fatal("error creating reconcile object")
}
server, err := createListeners(appqosPools)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error creating Listeners", tc.testCase))
}
req := reconcile.Request{
NamespacedName: client.ObjectKey{
Name: tc.powerNode.Name,
Namespace: PowerNodeNamespace,
},
}
_, err = r.Reconcile(req)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error reconciling PowerWorkload object", tc.testCase))
}
server.Close()
powerNode := &powerv1alpha1.PowerNode{}
err = r.Client.Get(context.TODO(), client.ObjectKey{
Name: tc.powerNode.Name,
Namespace: PowerNodeNamespace,
}, powerNode)
if err != nil {
t.Error(err)
t.Fatal(fmt.Sprintf("%s - error retrieving PowerNode object", tc.testCase))
}
if !reflect.DeepEqual(powerNode.Spec.ActiveProfiles, tc.expectedActiveProfiles) {
t.Errorf("%s - Failed: Expected Active Profiles to be %v, got %v", tc.testCase, tc.expectedActiveProfiles, powerNode.Spec.ActiveProfiles)
}
if !reflect.DeepEqual(powerNode.Spec.ActiveWorkloads, tc.expectedActiveWorkloads) {
t.Errorf("%s - Failed: Expected Active Workloads to be %v, got %v", tc.testCase, tc.expectedActiveWorkloads, powerNode.Spec.ActiveWorkloads)
}
if !reflect.DeepEqual(powerNode.Spec.PowerContainers, tc.expectedPowerContainers) {
t.Errorf("%s - Failed: Expected Power Containers to be %v, got %v", tc.testCase, tc.expectedPowerContainers, powerNode.Spec.PowerContainers)
}
if !reflect.DeepEqual(powerNode.Spec.SharedPools, tc.expectedSharedPools) {
t.Errorf("%s - Failed: Expected Shared Pools to be %v, got %v", tc.testCase, tc.expectedSharedPools, powerNode.Spec.SharedPools)
}
}
}
|
//
// GenerationAScanningView.h
// TwoDimensionCode
//
// Created by xp on 2016/11/24.
// Copyright ยฉ 2016ๅนด com.yunwangnet. All rights reserved.
//
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
typedef void (^SucBlock)(NSString *sucStr);
@interface GenerationAScanningView : UIView
@property (nonatomic,strong) UIColor *backgroundColor;/**< ็ๆไบ็ปด็ ่ๆฏ้ข่ฒ */
@property (nonatomic,strong) UIColor *frontColor;/**< ็ๆไบ็ปด็ ๅกซๅ
้ข่ฒ */
@property (nonatomic,strong) UIImage *centerImage;/**< ็ๆไบ็ปด็ ไธญ้ดๅฐๅพ็ */
/**
* ็ดๆฅๅผๅฏๆๅๅคดๆซๆไบ็ปด็
*
*
*
*
*
*/
-(void)scanningCodeBySucBlock:(SucBlock) sucBlock;
/**
* ไปๅพ็ไธญๆซๆไบ็ปด็
*
*
*
*
*
*/
+(NSString *)scanningCodeByImg:(UIImage *)image;
/**
* ็ๆไบ็ปด็
*
*
*
*
*
*/
-(UIImage *)generationCodeByStr:(NSString *)str size:(CGSize)size;
@end
|
#!/bin/sh
# ํ์ ๋ฌธ์ ์ ๋ณ๊ฒฝํ๊ธฐ
echo "Script Start."
# ๋ฐฐ๊ฒฝ์ ํ์(47), ๋ฌธ์์์ ๋นจ๊ฐ(31)์ผ๋ก ๋ฐ๊ฟ
echo -e "\033[47;31m Important Message \033[0m"
echo "Script End." |
package models
import (
"github.com/astaxie/beego"
"github.com/astaxie/beego/orm"
_ "github.com/go-sql-driver/mysql"
)
func Init() {
dbhost := beego.AppConfig.String("db.host")
dbport := beego.AppConfig.String("db.port")
dbuser := beego.AppConfig.String("db.user")
dbpassword := beego.AppConfig.String("db.password")
dbname := beego.AppConfig.String("db.name")
if dbport == "" {
dbport = "3306"
}
dsn := dbuser + ":" + dbpassword + "@tcp(" + dbhost + ":" + dbport + ")/" + dbname + "?charset=utf8&loc=Local"
orm.RegisterDataBase("default", "mysql", dsn)
orm.RegisterModel(new(User))
// orm.RegisterModel(new(SdtBdiBase))
orm.RegisterModel(new(AdtBdiAdm))
//orm.RegisterModel(new(SdtBdiDomain))
//ไธๆณจๅๆญคmodel๏ผไฝฟ็จๅ็sqlๆไฝๅขๅ ๆนๆฅ
//orm.RegisterModel(new(SdtBdiSet))
if beego.AppConfig.String("runmode") == "dev" {
orm.Debug = true
}
}
|
watchmedo shell-command \
--patterns="*.py" \
--recursive \
--command='clear && pytest -n 3 --cov sources' \
.
|
#!/usr/bin/env bash
set -e # exit on first error
set -u # exit on using unset variable
cp ../java/CrystalCmd/build/CrystalCmd.jar .
#build
docker build --no-cache -f Dockerfile -t majorsilence/crystalcmd --rm=true .
|
/**
* @license
* Visual Blocks Editor
*
* Copyright 2012 Google Inc.
* https://developers.google.com/blockly/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @fileoverview Colour input field.
* @author <EMAIL> (<NAME>)
*/
'use strict';
goog.provide('Blockly.FieldColour');
goog.require('Blockly.Field');
goog.require('Blockly.utils');
goog.require('goog.dom');
goog.require('goog.events');
goog.require('goog.style');
goog.require('goog.ui.ColorPicker');
/**
* Class for a colour input field.
* @param {string} colour The initial colour in '#rrggbb' format.
* @param {Function=} opt_validator A function that is executed when a new
* colour is selected. Its sole argument is the new colour value. Its
* return value becomes the selected colour, unless it is undefined, in
* which case the new colour stands, or it is null, in which case the change
* is aborted.
* @extends {Blockly.Field}
* @constructor
*/
Blockly.FieldColour = function(colour, opt_validator) {
Blockly.FieldColour.superClass_.constructor.call(this, colour, opt_validator);
this.addArgType('colour');
};
goog.inherits(Blockly.FieldColour, Blockly.Field);
/**
* Construct a FieldColour from a JSON arg object.
* @param {!Object} options A JSON object with options (colour).
* @returns {!Blockly.FieldColour} The new field instance.
* @package
* @nocollapse
*/
Blockly.FieldColour.fromJson = function(options) {
return new Blockly.FieldColour(options['colour']);
};
/**
* By default use the global constants for colours.
* @type {Array.<string>}
* @private
*/
Blockly.FieldColour.prototype.colours_ = null;
/**
* By default use the global constants for columns.
* @type {number}
* @private
*/
Blockly.FieldColour.prototype.columns_ = 0;
/**
* The color picker.
* @type {goog.ui.ColorPicker}
* @private
*/
Blockly.FieldColour.prototype.colorPicker_ = null;
/**
* Install this field on a block.
* @param {!Blockly.Block} block The block containing this field.
*/
Blockly.FieldColour.prototype.init = function(block) {
Blockly.FieldColour.superClass_.init.call(this, block);
this.setValue(this.getValue());
};
/**
* Mouse cursor style when over the hotspot that initiates the editor.
*/
Blockly.FieldColour.prototype.CURSOR = 'default';
/**
* Close the colour picker if this input is being deleted.
*/
Blockly.FieldColour.prototype.dispose = function() {
Blockly.WidgetDiv.hideIfOwner(this);
Blockly.FieldColour.superClass_.dispose.call(this);
};
/**
* Return the current colour.
* @return {string} Current colour in '#rrggbb' format.
*/
Blockly.FieldColour.prototype.getValue = function() {
return this.colour_;
};
/**
* Set the colour.
* @param {string} colour The new colour in '#rrggbb' format.
*/
Blockly.FieldColour.prototype.setValue = function(colour) {
if (this.sourceBlock_ && Blockly.Events.isEnabled() &&
this.colour_ != colour) {
Blockly.Events.fire(new Blockly.Events.BlockChange(
this.sourceBlock_, 'field', this.name, this.colour_, colour));
}
this.colour_ = colour;
if (this.sourceBlock_) {
this.sourceBlock_.setColour(colour, colour, colour);
}
};
/**
* Get the text from this field. Used when the block is collapsed.
* @return {string} Current text.
*/
Blockly.FieldColour.prototype.getText = function() {
var colour = this.colour_;
// Try to use #rgb format if possible, rather than #rrggbb.
var m = colour.match(/^#(.)\1(.)\2(.)\3$/);
if (m) {
colour = '#' + m[1] + m[2] + m[3];
}
return colour;
};
/**
* Returns the fixed height and width.
* @return {!goog.math.Size} Height and width.
*/
Blockly.FieldColour.prototype.getSize = function() {
return new goog.math.Size(Blockly.BlockSvg.FIELD_WIDTH, Blockly.BlockSvg.FIELD_HEIGHT);
};
/**
* An array of colour strings for the palette.
* See bottom of this page for the default:
* http://docs.closure-library.googlecode.com/git/closure_goog_ui_colorpicker.js.source.html
* @type {!Array.<string>}
*/
Blockly.FieldColour.COLOURS = goog.ui.ColorPicker.SIMPLE_GRID_COLORS;
/**
* Number of columns in the palette.
*/
Blockly.FieldColour.COLUMNS = 7;
/**
* Set a custom colour grid for this field.
* @param {Array.<string>} colours Array of colours for this block,
* or null to use default (Blockly.FieldColour.COLOURS).
* @return {!Blockly.FieldColour} Returns itself (for method chaining).
*/
Blockly.FieldColour.prototype.setColours = function(colours) {
this.colours_ = colours;
return this;
};
/**
* Set a custom grid size for this field.
* @param {number} columns Number of columns for this block,
* or 0 to use default (Blockly.FieldColour.COLUMNS).
* @return {!Blockly.FieldColour} Returns itself (for method chaining).
*/
Blockly.FieldColour.prototype.setColumns = function(columns) {
this.columns_ = columns;
return this;
};
/**
* Create a palette under the colour field.
* @private
*/
Blockly.FieldColour.prototype.showEditor_ = function() {
Blockly.DropDownDiv.hideWithoutAnimation();
Blockly.DropDownDiv.clearContent();
this.colorPicker_ = this.createWidget_();
Blockly.DropDownDiv.showPositionedByBlock(this, this.sourceBlock_);
Blockly.DropDownDiv.setColour('#ffffff', '#dddddd');
if (this.sourceBlock_.parentBlock_) {
Blockly.DropDownDiv.setCategory(this.sourceBlock_.parentBlock_.getCategory());
}
this.setValue(this.getValue());
// Configure event handler.
var thisField = this;
Blockly.FieldColour.changeEventKey_ = goog.events.listen(this.colorPicker_,
goog.ui.ColorPicker.EventType.CHANGE,
function(event) {
var colour = event.target.getSelectedColor() || '#000000';
Blockly.DropDownDiv.hide();
if (thisField.sourceBlock_) {
// Call any validation function, and allow it to override.
colour = thisField.callValidator(colour);
}
if (colour !== null) {
thisField.setValue(colour);
}
});
};
/**
* Create a color picker widget and render it inside the widget div.
* @return {!goog.ui.ColorPicker} The newly created color picker.
* @private
*/
Blockly.FieldColour.prototype.createWidget_ = function() {
// Create the palette using Closure.
var picker = new goog.ui.ColorPicker();
picker.setSize(this.columns_ || Blockly.FieldColour.COLUMNS);
picker.setColors(this.colours_ || Blockly.FieldColour.COLOURS);
var div = Blockly.DropDownDiv.getContentDiv();
picker.render(div);
picker.setSelectedColor(this.getValue(true));
return picker;
};
/**
* Hide the colour palette.
* @private
*/
Blockly.FieldColour.prototype.dispose = function() {
if (Blockly.FieldColour.changeEventKey_) {
goog.events.unlistenByKey(Blockly.FieldColour.changeEventKey_);
}
Blockly.Events.setGroup(false);
Blockly.FieldColour.superClass_.dispose.call(this);
};
Blockly.Field.register('field_colour', Blockly.FieldColour);
|
#!/bin/bash
SCRIPT_DIR=$(dirname "$0")
PROJECT_DIR="$SCRIPT_DIR/.."
UBER_JAR=$(find ${PROJECT_DIR}/target/chronicle-queue-*-all.jar | tail -n1)
if [[ "$?" != "0" ]]; then
echo "Could not find uber-jar, please run 'mvn package' in the project root"
exit 1
fi
java -cp "$UBER_JAR" net.openhft.chronicle.queue.ChronicleReaderMain "$@"
# if running this in CQ source dir, beware of the default system.properties which is for unit testing,
# and enables resource tracing. This will lead to this tool dying after a while with OOME
|
#!/bin/sh
# Has to be run as admin
# @todo make it optional to install xdebug. It is fe. missing in sury's ppa for Xenial
# @todo make it optional to install fpm. It is not needed for the cd workflow
# @todo make it optional to disable xdebug ?
set -e
configure_php_ini() {
# note: these settings are not required for cli config
echo "cgi.fix_pathinfo = 1" >> "${1}"
echo "always_populate_raw_post_data = -1" >> "${1}"
# we disable xdebug for speed for both cli and web mode
phpdismod xdebug
}
# install php
PHP_VERSION="$1"
DEBIAN_VERSION="$(lsb_release -s -c)"
if [ "${PHP_VERSION}" = default ]; then
if [ "${DEBIAN_VERSION}" = jessie -o "${DEBIAN_VERSION}" = precise -o "${DEBIAN_VERSION}" = trusty ]; then
PHPSUFFIX=5
else
PHPSUFFIX=
fi
# @todo check for mbstring presence in php5 (jessie) packages
DEBIAN_FRONTEND=noninteractive apt-get install -y \
php${PHPSUFFIX} \
php${PHPSUFFIX}-cli \
php${PHPSUFFIX}-dom \
php${PHPSUFFIX}-curl \
php${PHPSUFFIX}-fpm \
php${PHPSUFFIX}-mbstring \
php${PHPSUFFIX}-xdebug
else
# on GHA runners ubuntu version, php 7.4 and 8.0 seem to be preinstalled. Remove them if found
for PHP_CURRENT in $(dpkg -l | grep -E 'php.+-common' | awk '{print $2}'); do
if [ "${PHP_CURRENT}" != "php${PHP_VERSION}-common" ]; then
apt-get purge -y "${PHP_CURRENT}"
fi
done
DEBIAN_FRONTEND=noninteractive apt-get install -y language-pack-en-base software-properties-common
LC_ALL=en_US.UTF-8 add-apt-repository ppa:ondrej/php
apt-get update
DEBIAN_FRONTEND=noninteractive apt-get install -y \
php${PHP_VERSION} \
php${PHP_VERSION}-cli \
php${PHP_VERSION}-dom \
php${PHP_VERSION}-curl \
php${PHP_VERSION}-fpm \
php${PHP_VERSION}-mbstring \
php${PHP_VERSION}-xdebug
update-alternatives --set php /usr/bin/php${PHP_VERSION}
fi
PHPVER=$(php -r 'echo implode(".",array_slice(explode(".",PHP_VERSION),0,2));' 2>/dev/null)
configure_php_ini /etc/php/${PHPVER}/fpm/php.ini
# use a nice name for the php-fpm service, so that it does not depend on php version running. Try to make that work
# both for docker and VMs
service "php${PHPVER}-fpm" stop
if [ -f "/etc/init.d/php${PHPVER}-fpm" ]; then
ln -s "/etc/init.d/php${PHPVER}-fpm" /etc/init.d/php-fpm
fi
if [ -f "/lib/systemd/system/php${PHPVER}-fpm.service" ]; then
ln -s "/lib/systemd/system/php${PHPVER}-fpm.service" /lib/systemd/system/php-fpm.service
if [ ! -f /.dockerenv ]; then
systemctl daemon-reload
fi
fi
# @todo shall we configure php-fpm?
service php-fpm start
# configure apache (if installed)
if [ -n "$(dpkg --list | grep apache)" ]; then
a2enconf php${PHPVER}-fpm
service apache2 restart
fi
|
<reponame>villelaitila/KantaCDA-API<filename>KantaCDA-API/src/main/java/fi/kela/kanta/cda/OstopalvelunvaltuutusKasaaja.java
<!--
Copyright 2020 Kansanelรคkelaitos
Licensed under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy
of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License.
-->
package fi.kela.kanta.cda;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.Properties;
import java.util.TimeZone;
import javax.xml.bind.JAXBException;
import org.hl7.v3.ANY;
import org.hl7.v3.BL;
import org.hl7.v3.CD;
import org.hl7.v3.CV;
import org.hl7.v3.II;
import org.hl7.v3.IVLTS;
import org.hl7.v3.ON;
import org.hl7.v3.POCDMT000040AssignedCustodian;
import org.hl7.v3.POCDMT000040AssignedEntity;
import org.hl7.v3.POCDMT000040Author;
import org.hl7.v3.POCDMT000040ClinicalDocument;
import org.hl7.v3.POCDMT000040Component2;
import org.hl7.v3.POCDMT000040Component3;
import org.hl7.v3.POCDMT000040Component5;
import org.hl7.v3.POCDMT000040EncompassingEncounter;
import org.hl7.v3.POCDMT000040Entry;
import org.hl7.v3.POCDMT000040InfrastructureRootTemplateId;
import org.hl7.v3.POCDMT000040Observation;
import org.hl7.v3.POCDMT000040Subject;
import org.hl7.v3.ParticipationTargetSubject;
import org.hl7.v3.StrucDocParagraph;
import org.hl7.v3.StrucDocText;
import org.hl7.v3.TS;
import org.hl7.v3.XActMoodDocumentObservation;
import org.hl7.v3.XActRelationshipDocument;
import org.hl7.v3.XDocumentSubject;
import fi.kela.kanta.cda.validation.OstopalvelunvaltuutusValidoija;
import fi.kela.kanta.cda.validation.OstoplavelunvaltuutuksenMitatointiValidoija;
import fi.kela.kanta.to.HenkilotiedotTO;
import fi.kela.kanta.to.OstopalvelunvaltuutusTO;
import fi.kela.kanta.util.JaxbUtil;
import fi.kela.kanta.util.KantaCDAUtil;
public class OstopalvelunvaltuutusKasaaja extends ArkistoKasaaja {
private static final String classCodeCOND = "COND";
private static final String OSVA_PREFIX = "OSVA";
//TODO: maarittelyVerio, BL_KYLLA ja BL_EI .. ehkรค kuitenkin voisi hakea propsuista
private static final String BL_KYLLA = "Kyllรค";
private static final String BL_EI = "Ei";
private static final String maarittelyVersio = "maarittelyversio";
private static final String OSTOPALVELUN_TYYPPI_TEMPLATE = "ostopalvelunTyyppi.%d";
private static final String POTILASASIAKIRJAN_REKISTERITUNNUS_TEMPLATE = "potilasasiakirjanRekisteritunnus.%d";
private static final String henkilotunnusRoot = "henkilotunnus";
private static final String yritysJaYhteisorekisteriRoot = "yritysJaYhteisorekisteri";
private static final String MAARITTAMATON_HOITOPROSESSIN_VAIHE = "99";
private static final String MUU_MERKINTA = "76";
private final OstopalvelunvaltuutusTO osva;
private final int edellinenVersio;
private final String edellinenOid;
private final String edellinenSetId;
private boolean tekninenProsessi = false;
public OstopalvelunvaltuutusKasaaja(Properties properties, OstopalvelunvaltuutusTO ostopalvelunvaltuutus) {
this(properties,ostopalvelunvaltuutus,null);
}
public OstopalvelunvaltuutusKasaaja(Properties properties, OstopalvelunvaltuutusTO ostopalvelunvaltuutus, String edellinenOid) {
super(properties);
this.osva = ostopalvelunvaltuutus;
this.edellinenOid = edellinenOid;
this.edellinenVersio = osva.getVersio();
this.edellinenSetId = osva.getSetId();
validoija = new OstopalvelunvaltuutusValidoija(ostopalvelunvaltuutus);
}
public void setTekninenProsessi(boolean tekninenProsessi) {
this.tekninenProsessi = tekninenProsessi;
}
/**
* Kasaa uusi asiakirja konstruktorissa annetun OstopalvelunvaltuutusTOn pohjalta.
*
* @return Uusi asiakirja XML-muodossa
* @throws JAXBException
*/
@Override
public String kasaaAsiakirja() throws JAXBException {
return JaxbUtil.getInstance().arkistomarshalloi(kasaaCDA(), "urn:hl7-org:v3 CDA_Fi.xsd");
//return JaxbUtil.getInstance().marshalloi(kasaaCDA(), "urn:hl7-org:v3 CDA_Fi.xsd");
}
public String kasaaMitatointiAsiakirja() throws JAXBException {
validoija = new OstoplavelunvaltuutuksenMitatointiValidoija(osva);
return JaxbUtil.getInstance().arkistomarshalloi(mitatoiCDA(), "urn:hl7-org:v3 CDA_Fi.xsd");
}
public POCDMT000040ClinicalDocument mitatoiCDA() {
validoiAsiakirja();
validoiMitatointitiedot();
Calendar now = Calendar.getInstance(TimeZone.getTimeZone(ArkistoKasaaja.TIME_ZONE));
POCDMT000040ClinicalDocument cda = of.createPOCDMT000040ClinicalDocument();
String effectiveTime = getDateFormat().format(now.getTime());
if (!onkoNullTaiTyhja(osva.getOid())) {
setDocumentId(osva.getOid());
}
addIdFields(cda, osva, effectiveTime);
//Mitรคtรถitรคessรค ylikirjoitetaan TemplateIdt varmuuden vuoksi
cda.getTemplateIds().clear();
POCDMT000040InfrastructureRootTemplateId templateIdElement = of
.createPOCDMT000040InfrastructureRootTemplateId();
fetchAttributes("templateId1", templateIdElement);
cda.getTemplateIds().add(templateIdElement);
fetchRestTemplateIds(cda,2);
addRecordTarget(cda, osva.getPotilas());
addAuthor(cda);
addCustodian(cda);
addRelatedDocument(cda, edellinenOid, edellinenSetId, edellinenVersio, XActRelationshipDocument.RPLC);
addComponentOf(cda);
addLocalHeader(cda);
//Mitรคtรถitรคessรค Ylikirjoitetaan recordstatus
CV recordStatus = of.createCV();
if ( fetchAttributes("OSVA.MITATOINTI.localHeader.recordStatus", recordStatus) ) {
cda.getLocalHeader().setRecordStatus(recordStatus);
}
POCDMT000040Component2 component2 = of.createPOCDMT000040Component2();
cda.setComponent(component2);
component2.setStructuredBody(of.createPOCDMT000040StructuredBody());
component2.getStructuredBody().setID(getOID(osva));
POCDMT000040Component3 component3 = of.createPOCDMT000040Component3();
component2.getStructuredBody().getComponents().add(component3);
component3.setSection(of.createPOCDMT000040Section());
component3.getSection().setId(of.createII());
component3.getSection().getId().setRoot(getDocumentId(osva));
component3.getSection().setCode(of.createCE());
fetchAttributes(Kasaaja.LM_CONTENTS, component3.getSection().getCode());
component3.getSection().setTitle(of.createST());
component3.getSection().getTitle().getContent().add(component3.getSection().getCode().getDisplayName());
component3.getSection().setSubject(luoMitatointiPotilas());
component3.getSection().getAuthors().add(luoMitatointiAuthor());
component3.getSection().getComponents().add(luoMitatointiHoitoprosessinVaihe());
return cda;
}
private void validoiMitatointitiedot() {
if (KantaCDAUtil.onkoNullTaiTyhja(edellinenOid)) {
throw new IllegalArgumentException("Mitรคtรถitรคessรค tarvitaan mitรคtรถitรคvรคn asiakirjan OID!");
}
}
@Override
public POCDMT000040ClinicalDocument kasaaCDA() {
validoiAsiakirja();
Calendar now = Calendar.getInstance(TimeZone.getTimeZone(ArkistoKasaaja.TIME_ZONE));
POCDMT000040ClinicalDocument cda = of.createPOCDMT000040ClinicalDocument();
String effectiveTime = getDateFormat().format(now.getTime());
//String today = getTodayDateFormat().format(now.getTime());
if (!onkoNullTaiTyhja(osva.getOid())) {
setDocumentId(osva.getOid());
}
addIdFields(cda, osva, effectiveTime);
addRecordTarget(cda, osva.getPotilas());
addAuthor(cda);
addCustodian(cda);
if (cda.getVersionNumber().getValue().intValue()>1 && !onkoNullTaiTyhja(edellinenOid)) { //Kyseessรค korvaustilanne
addRelatedDocument(cda, edellinenOid, edellinenSetId, edellinenVersio, XActRelationshipDocument.RPLC);
}
addComponentOf(cda);
addLocalHeader(cda);
POCDMT000040Component2 component2 = of.createPOCDMT000040Component2();
cda.setComponent(component2);
component2.setStructuredBody(of.createPOCDMT000040StructuredBody());
//ID Structured bodylle
component2.getStructuredBody().setID(getOID(osva));
POCDMT000040Component3 component3 = of.createPOCDMT000040Component3();
component2.getStructuredBody().getComponents().add(component3);
//component3.getTemplateIds().add(of.createPOCDMT000040InfrastructureRootTemplateId());
// TempalteId
//fetchAttributes(OstopalvelunvaltuutusKasaaja.template_id, component3.getTemplateIds().get(0));
component3.setSection(of.createPOCDMT000040Section());
//component3.getSection().setAttributeID(getNextOID(osva));
//TemplateId
component3.getSection().getTemplateIds().add(of.createPOCDMT000040InfrastructureRootTemplateId());
fetchAttributes(OstopalvelunvaltuutusKasaaja.template_id, component3.getSection().getTemplateIds().get(0));
component3.getSection().setId(of.createII());
component3.getSection().getId().setRoot(getDocumentId(osva));
component3.getSection().setCode(of.createCE());
fetchAttributes(Kasaaja.LM_CONTENTS, component3.getSection().getCode());
component3.getSection().setTitle(of.createST());
// Title
component3.getSection().getTitle().getContent().add(component3.getSection().getCode().getDisplayName());
//AsiakirjanTunniste
component3.getSection().getComponents().add(luoAsiakirjanTunniste());
//OstopalvelunTyyppi
component3.getSection().getComponents().add(luoOstopalvelunTyyppi());
//OstopalvelunValtuutuksenVoimassaolo
component3.getSection().getComponents().add(luoOstopalvelunValtuutuksenVoimassaolo());
//PalvelunJarjestaja
component3.getSection().getComponents().add(luoPalvelunJarjestaja());
//PalvelunTuottaja
component3.getSection().getComponents().add(luoPalvelunTuottaja());
//TuottajanOikeusHakeaAsiakirjoja
if (!KantaCDAUtil.onkoNullTaiTyhja(osva.getHakuRekisterinpitaja())) {
component3.getSection().getComponents().add(luoTuottajanOikeusHakeaAsiakirjoja());
}
//TuottajanOikeusTallentaaAsiakirjat
component3.getSection().getComponents().add(luoTuottajanOikeusTallentaaAsiakirjat());
//Potilas
component3.getSection().getComponents().add(luoPotilas());
//LuovutettavatAsiakirjat
component3.getSection().getComponents().add(luoLuovutettavatAsiakirjat());
//AsiakirjanTallentaja
component3.getSection().getComponents().add(luoAsiakirjanTallentaja());
//LomakkeenMetatiedot
//String templateId = cda.getTemplateIds().iterator().next().getRoot();
//component3.getSection().getComponents().add(luoLomakkeenMetatiedot(templateId));
return cda;
}
/* (non-Javadoc)
* @see fi.kela.kanta.cda.Kasaaja#addCustodian(org.hl7.v3.POCDMT000040ClinicalDocument)
*/
@Override
protected void addCustodian(POCDMT000040ClinicalDocument clinicalDocument) {
clinicalDocument.setCustodian(of.createPOCDMT000040Custodian());
POCDMT000040AssignedCustodian assignedCustodian = of.createPOCDMT000040AssignedCustodian();
clinicalDocument.getCustodian().setAssignedCustodian(assignedCustodian);
assignedCustodian.setRepresentedCustodianOrganization(of.createPOCDMT000040CustodianOrganization());
II id = of.createII();
id.setRoot(osva.getAsiakirjanRekisterinpitaja());
assignedCustodian.getRepresentedCustodianOrganization().getIds().add(id);
if (!onkoNullTaiTyhja(osva.getAsiakirjanRekisterinpitajaNimi())) {
ON name = of.createON();
name.getContent().add(osva.getAsiakirjanRekisterinpitajaNimi());
assignedCustodian.getRepresentedCustodianOrganization().setName(name);
}
}
/* (non-Javadoc)
* @see fi.kela.kanta.cda.Kasaaja#addComponentOf(org.hl7.v3.POCDMT000040ClinicalDocument)
*/
@Override
protected void addComponentOf(POCDMT000040ClinicalDocument clinicalDocument) {
clinicalDocument.setComponentOf(of.createPOCDMT000040Component1());
POCDMT000040EncompassingEncounter encompassingEncounter = of.createPOCDMT000040EncompassingEncounter();
clinicalDocument.getComponentOf().setEncompassingEncounter(encompassingEncounter);
encompassingEncounter.setEffectiveTime(of.createIVLTS());
encompassingEncounter.getEffectiveTime().getNullFlavors().add(KantaCDAConstants.NullFlavor.NA.getCode());
encompassingEncounter.setResponsibleParty(of.createPOCDMT000040ResponsibleParty());
POCDMT000040AssignedEntity assignedEntity = of.createPOCDMT000040AssignedEntity();
encompassingEncounter.getResponsibleParty().setAssignedEntity(assignedEntity);
II nullFlavorId = of.createII();
nullFlavorId.getNullFlavors().add(KantaCDAConstants.NullFlavor.NA.getCode());
assignedEntity.getIds().add(nullFlavorId);
assignedEntity.setRepresentedOrganization(of.createPOCDMT000040Organization());
assignedEntity.getRepresentedOrganization().getIds().add(of.createII());
fetchAttributes("componentOf.encompassingEncounter.responsibleParty.assignedEntity.representedOrganization.id",
assignedEntity.getRepresentedOrganization().getIds().get(0));
assignedEntity.getRepresentedOrganization().getNames().add(of.createON());
String name = fetchProperty(
"componentOf.encompassingEncounter.responsibleParty.assignedEntity.representedOrganization.name");
if ( !onkoNullTaiTyhja(name) ) {
assignedEntity.getRepresentedOrganization().getNames().get(0).getContent().add(name);
}
}
/* (non-Javadoc)
* @see fi.kela.kanta.cda.ArkistoKasaaja#getTypeKey()
*/
@Override
protected String getTypeKey() {
return OSVA_PREFIX;
}
/**
* Luo asiakirjan yksilรถivรค tunniste lohkon
* @return
*/
private POCDMT000040Component5 luoAsiakirjanTunniste() {
POCDMT000040Component5 asiakirjanTunnisteComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.ASIAKIRJAN_TUNNISTE);
POCDMT000040Component5 asiakirjanYksiloivaTunnisteComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.ASIAKIRJAN_YKSILOIVA_TUNNISTE);
String asiakirjanTunniste = osva.getOid();
II value = of.createII();
value.setRoot(asiakirjanTunniste);
asiakirjanYksiloivaTunnisteComponent.getSection().setText(luoTextContent(asiakirjanTunniste));
asiakirjanYksiloivaTunnisteComponent.getSection().getEntries().add(luoEntryObservation(value));
asiakirjanTunnisteComponent.getSection().getComponents().add(asiakirjanYksiloivaTunnisteComponent);
return asiakirjanTunnisteComponent;
}
/**
* Luo ostopalvelun tyyppi lohkon
* @return
*/
private POCDMT000040Component5 luoOstopalvelunTyyppi() {
POCDMT000040Component5 ostopalvelunTyyppiComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.OSTOPALVELUN_TYYPPI);
POCDMT000040Component5 ostopalvelunTyyppiTietoComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.OSTOPALVELUN_TYYPPI_KOODI);
String ostopalvelunTyyppi = String.valueOf(osva.getOstopalvelunTyyppi());
CV value = of.createCV();
fetchAttributes(String.format(OSTOPALVELUN_TYYPPI_TEMPLATE, osva.getOstopalvelunTyyppi()),value);
value.setCode(ostopalvelunTyyppi);
ostopalvelunTyyppiTietoComponent.getSection().setText(luoTextContent(value.getDisplayName()));
ostopalvelunTyyppiTietoComponent.getSection().getEntries().add(luoEntryObservation(value));
ostopalvelunTyyppiComponent.getSection().getComponents().add(ostopalvelunTyyppiTietoComponent);
return ostopalvelunTyyppiComponent;
}
/**
* Luo ostopalvelun valtuutuksen voimassaolo lohkon
* @return
*/
private POCDMT000040Component5 luoOstopalvelunValtuutuksenVoimassaolo() {
POCDMT000040Component5 valtuutksenVoimassaoloComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.OSTOPALVELUN_VALTUUTUKSEN_VOIMASSAOLO);
POCDMT000040Component5 voimassaoloComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.ASIAKIRJA_VOIMASSA);
IVLTS value = of.createIVLTS();
if (null != osva.getValtuutuksenVoimassaoloAlku()) {
value.setLow(of.createIVXBTS());
value.getLow().setValue(getShortDateFormat().format(osva.getValtuutuksenVoimassaoloAlku()));
}
if (null != osva.getValtuutuksenVoimassaoloLoppu()) {
value.setHigh(of.createIVXBTS());
value.getHigh().setValue(getShortDateFormat().format(osva.getValtuutuksenVoimassaoloLoppu()));
}
voimassaoloComponent.getSection().setText(luoTextContent(muotoileAikavali(osva.getValtuutuksenVoimassaoloAlku(), osva.getValtuutuksenVoimassaoloLoppu())));
voimassaoloComponent.getSection().getEntries().add(luoEntryObservation(value));
valtuutksenVoimassaoloComponent.getSection().getComponents().add(voimassaoloComponent);
return valtuutksenVoimassaoloComponent;
}
/**
* Luo palvelun jรคrjestรคjรค lohkon
* @return
*/
private POCDMT000040Component5 luoPalvelunJarjestaja() {
POCDMT000040Component5 palvelunJarjestajaComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.PALVELUN_JARJESTAJA);
POCDMT000040Component5 palvelunJarjestajanYksiloivaTunnisteComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.PALVELUN_JARJESTAJA_YKSILOINTITUNNUS);
POCDMT000040Component5 palvelunJarjestajanNimiComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.PALVELUN_JARJESTAJAN_NIMI);
String palvelunJarjestajanTunniste = osva.getPalvelunJarjestaja();
String palvelunJarjestajanNimi = osva.getPalvelunJarjestajaNimi();
II palvelunJarjestajanOidValue = of.createII();
palvelunJarjestajanOidValue.setRoot(palvelunJarjestajanTunniste);
palvelunJarjestajanYksiloivaTunnisteComponent.getSection().setText(luoTextContent(palvelunJarjestajanTunniste));
palvelunJarjestajanYksiloivaTunnisteComponent.getSection().getEntries().add(luoEntryObservation(palvelunJarjestajanOidValue));
palvelunJarjestajanNimiComponent.getSection().setText(luoTextContent(palvelunJarjestajanNimi));
palvelunJarjestajaComponent.getSection().getComponents().add(palvelunJarjestajanYksiloivaTunnisteComponent);
palvelunJarjestajaComponent.getSection().getComponents().add(palvelunJarjestajanNimiComponent);
if (!KantaCDAUtil.onkoNullTaiTyhja(osva.getPalvelunJarjestajanPalveluyksikko())) {
POCDMT000040Component5 palvelunJarjestajanPalveluyksikkoComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.PALVELUN_JARJESTAJA_PALVELUYKSIKKO);
POCDMT000040Component5 palvelunJarjestajanPalveluyksikonNimiComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.PALVELUN_JARJESTAJA_PALVELUYKSIKON_NIMI);
String palvelunJarjestajanPalveluyksikko = osva.getPalvelunJarjestajanPalveluyksikko();
String palvelunJarjestajanPalveluykikonNimi = osva.getPalvelunJarjestajanPalveluyksikonNimi();
II palvelunJarjestajanPalveluyksikonOidValue = of.createII();
palvelunJarjestajanPalveluyksikonOidValue.setRoot(palvelunJarjestajanPalveluyksikko);
palvelunJarjestajanPalveluyksikkoComponent.getSection().setText(luoTextContent(palvelunJarjestajanPalveluyksikko));
palvelunJarjestajanPalveluyksikkoComponent.getSection().getEntries().add(luoEntryObservation(palvelunJarjestajanPalveluyksikonOidValue));
palvelunJarjestajanPalveluyksikonNimiComponent.getSection().setText(luoTextContent(palvelunJarjestajanPalveluykikonNimi));
palvelunJarjestajaComponent.getSection().getComponents().add(palvelunJarjestajanPalveluyksikkoComponent);
palvelunJarjestajaComponent.getSection().getComponents().add(palvelunJarjestajanPalveluyksikonNimiComponent);
}
return palvelunJarjestajaComponent;
}
/**
* Luo palvelun tuottaja lohkon
* @return
*/
private POCDMT000040Component5 luoPalvelunTuottaja() {
POCDMT000040Component5 palvelunTuottajaComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.PALVELUN_TUOTTAJA);
POCDMT000040Component5 palvelunTuottajanYksiloivaTunnisteComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.PALVELUN_TUOTTAJAN_YKSILOINTITUNNUS);
POCDMT000040Component5 palvelunTuottajanNimiComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.PALVELUN_TUOTTAJAN_NIMI);
String palvelunTuottajanTunniste = osva.getPalvelunTuottaja();
String palvelunTuottajanNimi = osva.getPalvelunTuottajanNimi();
II value = of.createII();
value.setRoot(palvelunTuottajanTunniste);
palvelunTuottajanYksiloivaTunnisteComponent.getSection().setText(luoTextContent(palvelunTuottajanTunniste));
palvelunTuottajanYksiloivaTunnisteComponent.getSection().getEntries().add(luoEntryObservation(value));
palvelunTuottajanNimiComponent.getSection().setText(luoTextContent(palvelunTuottajanNimi));
palvelunTuottajaComponent.getSection().getComponents().add(palvelunTuottajanYksiloivaTunnisteComponent);
palvelunTuottajaComponent.getSection().getComponents().add(palvelunTuottajanNimiComponent);
return palvelunTuottajaComponent;
}
/**
* Luo ostopalvelun tuottajan oikeus hakea asiakirjoja jรคrjestรคjรคn reksiteristรค lohkon
* @return
*/
private POCDMT000040Component5 luoTuottajanOikeusHakeaAsiakirjoja() {
POCDMT000040Component5 tuottajanOikeusHakeaAsiakirjojaComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.OSTOP_TUOTT_OIKEUS_HAKEA_PALVELUN_JARJ_REK);
POCDMT000040Component5 rekisterinpitajaComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.REKISTERINPITAJA_HAKU);
POCDMT000040Component5 rekisteriComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.REKISTERI_HAKU);
String rekisterinpitaja = osva.getHakuRekisterinpitaja();
String rekisteri = String.valueOf(osva.getHakuRekisteri());
II rekisterinpitajaValue = of.createII();
CV rekisteriValue = of.createCV();
rekisterinpitajaValue.setRoot(rekisterinpitaja);
fetchAttributes(String.format(POTILASASIAKIRJAN_REKISTERITUNNUS_TEMPLATE, osva.getHakuRekisteri()),rekisteriValue);
rekisteriValue.setCode(rekisteri);
rekisterinpitajaComponent.getSection().setText(luoTextContent(rekisterinpitaja));
rekisterinpitajaComponent.getSection().getEntries().add(luoEntryObservation(rekisterinpitajaValue));
rekisteriComponent.getSection().setText(luoTextContent(rekisteriValue.getDisplayName()));
rekisteriComponent.getSection().getEntries().add(luoEntryObservation(rekisteriValue));
tuottajanOikeusHakeaAsiakirjojaComponent.getSection().getComponents().add(rekisterinpitajaComponent);
tuottajanOikeusHakeaAsiakirjojaComponent.getSection().getComponents().add(rekisteriComponent);
if (!KantaCDAUtil.onkoNullTaiTyhja(osva.getHakuRekisterinTarkenne())) {
POCDMT000040Component5 rekisterinTarkenneComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.REKISTERIN_TARKENNE_HAKU);
POCDMT000040Component5 rekisterinTarkentimenNimiComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.REKISTERIN_TARKENTIMEN_NIMI_HAKU);
String rekisterinTarkenne = osva.getHakuRekisterinTarkenne();
String rekisterinTarkentimenNimi = osva.getHakuRekisterinTarkentimenNimi();
II rekisterinTarkenneValue = of.createII();
//Kรคytetรครคnkรถ Yritys- ja yhteisรถrekisteriรค vai henkilรถtunnusta
if (KantaCDAUtil.onkoYTunnus(rekisterinTarkenne)) {
fetchAttributes(yritysJaYhteisorekisteriRoot, rekisterinTarkenneValue);
} else {
fetchAttributes(henkilotunnusRoot, rekisterinTarkenneValue);
}
rekisterinTarkenneValue.setExtension(rekisterinTarkenne);
rekisterinTarkenneComponent.getSection().setText(luoTextContent(rekisterinTarkenne));
rekisterinTarkenneComponent.getSection().getEntries().add(luoEntryObservation(rekisterinTarkenneValue));
rekisterinTarkentimenNimiComponent.getSection().setText(luoTextContent(rekisterinTarkentimenNimi));
tuottajanOikeusHakeaAsiakirjojaComponent.getSection().getComponents().add(rekisterinTarkenneComponent);
tuottajanOikeusHakeaAsiakirjojaComponent.getSection().getComponents().add(rekisterinTarkentimenNimiComponent);
}
return tuottajanOikeusHakeaAsiakirjojaComponent;
}
/**
* Luo ostopalvelun tuottajan oikeus tallentaa asiakirjat palvelun jรคrjestรคjรคn rekisteriin lohkon
* @return
*/
private POCDMT000040Component5 luoTuottajanOikeusTallentaaAsiakirjat() {
POCDMT000040Component5 tuottajanOikeusTallentaaAsiakirjojaComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.OSTOP_TUOTT_OIKEUS_TALLENTAA_PALV_JARJ_REK);
POCDMT000040Component5 rekisterinpitajaComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.REKISTERINPITAJA_TALLENNUS);
POCDMT000040Component5 rekisteriComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.REKISTERI_TALLENNUS);
String rekisterinpitaja = osva.getTallennusRekisterinpitaja();
String rekisteri = String.valueOf(osva.getTallennusRekisteri());
II rekisterinpitajaValue = of.createII();
CV rekisteriValue = of.createCV();
rekisterinpitajaValue.setRoot(rekisterinpitaja);
fetchAttributes(String.format(POTILASASIAKIRJAN_REKISTERITUNNUS_TEMPLATE, osva.getTallennusRekisteri()),rekisteriValue);
rekisteriValue.setCode(rekisteri);
rekisterinpitajaComponent.getSection().setText(luoTextContent(rekisterinpitaja));
rekisterinpitajaComponent.getSection().getEntries().add(luoEntryObservation(rekisterinpitajaValue));
rekisteriComponent.getSection().setText(luoTextContent(rekisteriValue.getDisplayName()));
rekisteriComponent.getSection().getEntries().add(luoEntryObservation(rekisteriValue));
tuottajanOikeusTallentaaAsiakirjojaComponent.getSection().getComponents().add(rekisterinpitajaComponent);
tuottajanOikeusTallentaaAsiakirjojaComponent.getSection().getComponents().add(rekisteriComponent);
if (!KantaCDAUtil.onkoNullTaiTyhja(osva.getTallennusRekisterinTarkenne())) {
POCDMT000040Component5 rekisterinTarkenneComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.REKISTERIN_TARKENNE_TALLENNUS);
POCDMT000040Component5 rekisterinTarkentimenNimiComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.REKISTERIN_TARKENTIMEN_NIMI_TALLENNUS);
String rekisterinTarkenne = osva.getTallennusRekisterinTarkenne();
String rekisterinTarkentimenNimi = osva.getTallennusRekisterinTarkentimenNimi();
II rekisterinTarkenneValue = of.createII();
//kรคytetรครคnkรถ Yritys- ja yhteisรถrekisteriรค vai henkilรถtunnusta
if (KantaCDAUtil.onkoYTunnus(rekisterinTarkenne)) {
fetchAttributes(yritysJaYhteisorekisteriRoot, rekisterinTarkenneValue);
} else {
fetchAttributes(henkilotunnusRoot, rekisterinTarkenneValue);
}
rekisterinTarkenneValue.setExtension(rekisterinTarkenne);
rekisterinTarkenneComponent.getSection().setText(luoTextContent(rekisterinTarkenne));
rekisterinTarkenneComponent.getSection().getEntries().add(luoEntryObservation(rekisterinTarkenneValue));
rekisterinTarkentimenNimiComponent.getSection().setText(luoTextContent(rekisterinTarkentimenNimi));
tuottajanOikeusTallentaaAsiakirjojaComponent.getSection().getComponents().add(rekisterinTarkenneComponent);
tuottajanOikeusTallentaaAsiakirjojaComponent.getSection().getComponents().add(rekisterinTarkentimenNimiComponent);
}
return tuottajanOikeusTallentaaAsiakirjojaComponent;
}
/**
* Luo potilas lohkon
* @return
*/
private POCDMT000040Component5 luoPotilas() {
POCDMT000040Component5 potilasComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.POTILAS);
POCDMT000040Component5 hetuComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.HENKILOTUNNUS);
POCDMT000040Component5 nimetComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.SUKU_JA_ETUNIMET);
POCDMT000040Component5 syntymaaikaComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.SYNTYMAAIKA);
HenkilotiedotTO henkilotiedot = osva.getPotilas();
II hetuValue = of.createII();
fetchAttributes(henkilotunnusRoot, hetuValue);
hetuValue.setExtension(henkilotiedot.getHetu());
TS syntymaaikaValue = of.createTS();
syntymaaikaValue.setValue(henkilotiedot.getSyntymaaika());
hetuComponent.getSection().setText(luoTextContent(henkilotiedot.getHetu()));
hetuComponent.getSection().getEntries().add(luoEntryObservation(hetuValue));
nimetComponent.getSection().setText(luoTextContent(henkilotiedot.getNimi().getSukunimi()+ ", "+henkilotiedot.getNimi().getEtunimi()));
nimetComponent.getSection().getEntries().add(luoEntryObservation(getNames(henkilotiedot.getNimi())));
syntymaaikaComponent.getSection().setText(luoTextContent(KantaCDAUtil.hetuToBirthTime(henkilotiedot.getHetu(), "dd.MM.YYYY")));
syntymaaikaComponent.getSection().getEntries().add(luoEntryObservation(syntymaaikaValue));
potilasComponent.getSection().getComponents().add(hetuComponent);
potilasComponent.getSection().getComponents().add(nimetComponent);
potilasComponent.getSection().getComponents().add(syntymaaikaComponent);
return potilasComponent;
}
/**
* Luo luovutettavat asiakirjat lohkon
* @return
*/
private POCDMT000040Component5 luoLuovutettavatAsiakirjat() {
POCDMT000040Component5 luovutettavatAsiakirjatComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.LUOVUTETTAVAT_ASIAKIRJAT);
POCDMT000040Component5 kaikkiAsiakirjatComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.KAIKKI_ASIAKIRJAT);
BL value = of.createBL();
value.setValue(osva.isKaikkiAsiakirjat());
kaikkiAsiakirjatComponent.getSection().setText(luoTextContent(osva.isKaikkiAsiakirjat()?BL_KYLLA:BL_EI));
kaikkiAsiakirjatComponent.getSection().getEntries().add(luoEntryObservation(value));
luovutettavatAsiakirjatComponent.getSection().getComponents().add(kaikkiAsiakirjatComponent);
if (!osva.isKaikkiAsiakirjat()) {
if (null != osva.getLuovutettavatPalvelutapahtumat() && !osva.getLuovutettavatPalvelutapahtumat().isEmpty()) {
for (String palvelutapahtuma : osva.getLuovutettavatPalvelutapahtumat()) {
POCDMT000040Component5 luovutettavaPalvelutapahtumaComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.LUOVUTETTAVAT_PALVELUTAPAHTUMAT);
II palvelutapahtumaValue = of.createII();
palvelutapahtumaValue.setRoot(palvelutapahtuma);
luovutettavaPalvelutapahtumaComponent.getSection().setText(luoTextContent(palvelutapahtuma));
luovutettavaPalvelutapahtumaComponent.getSection().getEntries().add(luoEntryObservation(palvelutapahtumaValue));
luovutettavatAsiakirjatComponent.getSection().getComponents().add(luovutettavaPalvelutapahtumaComponent);
}
} else {
POCDMT000040Component5 luovutettavanAineistoinAikavaliComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.LUOVUTETTAVAN_AINEISTON_AIKAVALI);
IVLTS aikavaliValue = of.createIVLTS();
if (null != osva.getLuovutettavanAineistonAlku()) {
aikavaliValue.setLow(of.createIVXBTS());
aikavaliValue.getLow().setValue(getShortDateFormat().format(osva.getLuovutettavanAineistonAlku()));
}
if (null != osva.getLuovutettavanAineistonLoppu()) {
aikavaliValue.setHigh(of.createIVXBTS());
aikavaliValue.getHigh().setValue(getShortDateFormat().format(osva.getLuovutettavanAineistonLoppu()));
}
luovutettavanAineistoinAikavaliComponent.getSection().setText(luoTextContent(muotoileAikavali(osva.getLuovutettavanAineistonAlku(), osva.getLuovutettavanAineistonLoppu())));
luovutettavatAsiakirjatComponent.getSection().getComponents().add(luovutettavanAineistoinAikavaliComponent);
}
}
return luovutettavatAsiakirjatComponent;
}
/**
* Luo asiakirjan tallentaja lohkon
* @return
*/
private POCDMT000040Component5 luoAsiakirjanTallentaja() {
POCDMT000040Component5 asiakirjanTallentajaComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.ASIAKIRJAN_TALLENTAJA);
POCDMT000040Component5 ammattihenkilonTunnisteComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.AMMATTIHENKILON_TUNNISTE);
POCDMT000040Component5 ammattihenkilonNimiComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.AMMATTIHENKILON_NIMI);
POCDMT000040Component5 ammattihenkilonPalveluyksikkoComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.AMMATTIHENKILON_PALVELUYKSIKKO);
POCDMT000040Component5 asiakirjanTekemisenAjankohtaComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.ASIAKIRJAN_TEKEMISEN_AJANKOHTA);
HenkilotiedotTO henkilotiedot = osva.getAsiakirjanTallentaja();
String ammattihenkilonPalveluyksikko = osva.getAmmattihenkilonPalveluyksikko();
String ammattihenkilonPalveluyksikkoNimi = osva.getAmmattihenkilonPalveluyksikonNimi();
II ammattihenkilonTunnisteValue = of.createII();
II ammattihenkilonPalveluyksikkoValue = of.createII();
TS tekemisenAjankohtaValue = of.createTS();
StrucDocText tunnisteText;
if (tekninenProsessi) {
fetchAttributes("tekninenProsessi", ammattihenkilonTunnisteValue);
tunnisteText = luoTextContent(ammattihenkilonTunnisteValue.getRoot());
} else {
if (!onkoNullTaiTyhja(osva.getAmmattihenkilonKatsoTunnus())) {
fetchAttributes("katsotunnus", ammattihenkilonTunnisteValue);
ammattihenkilonTunnisteValue.setExtension(osva.getAmmattihenkilonKatsoTunnus());
tunnisteText = luoTextContent(osva.getAmmattihenkilonKatsoTunnus());
} else {
fetchAttributes("henkilotunnus", ammattihenkilonTunnisteValue);
ammattihenkilonTunnisteValue.setExtension(henkilotiedot.getHetu());
tunnisteText = luoTextContent(henkilotiedot.getHetu());
}
}
ammattihenkilonPalveluyksikkoValue.setRoot(ammattihenkilonPalveluyksikko);
tekemisenAjankohtaValue.setValue(getDateFormat().format(osva.getAikaleima()));
ammattihenkilonTunnisteComponent.getSection().setText(tunnisteText);
ammattihenkilonTunnisteComponent.getSection().getEntries().add(luoEntryObservation(ammattihenkilonTunnisteValue));
ammattihenkilonNimiComponent.getSection().setText(luoTextContent(henkilotiedot.getNimi().getSukunimi()+ ", "+henkilotiedot.getNimi().getEtunimi()));
ammattihenkilonNimiComponent.getSection().getEntries().add(luoEntryObservation(getNames(henkilotiedot.getNimi())));
ammattihenkilonPalveluyksikkoComponent.getSection().setText(luoTextContent(ammattihenkilonPalveluyksikkoNimi));
ammattihenkilonPalveluyksikkoComponent.getSection().getEntries().add(luoEntryObservation(ammattihenkilonPalveluyksikkoValue));
asiakirjanTekemisenAjankohtaComponent.getSection().setText(luoTextContent(getTekemisenAjankohtaDateFormat().format(osva.getAikaleima())));
asiakirjanTekemisenAjankohtaComponent.getSection().getEntries().add(luoEntryObservation(tekemisenAjankohtaValue));
asiakirjanTallentajaComponent.getSection().getComponents().add(ammattihenkilonTunnisteComponent);
asiakirjanTallentajaComponent.getSection().getComponents().add(ammattihenkilonNimiComponent);
asiakirjanTallentajaComponent.getSection().getComponents().add(ammattihenkilonPalveluyksikkoComponent);
asiakirjanTallentajaComponent.getSection().getComponents().add(asiakirjanTekemisenAjankohtaComponent);
return asiakirjanTallentajaComponent;
}
/**
* Luo lomakkeen metatiedot lohkon
* Deprekoitu koska metatiedot lohkoa ei enรครค vaadita asiakirjalle
* @return
*/
@Deprecated
private POCDMT000040Component5 luoLomakkeenMetatiedot(String templateId) {
POCDMT000040Component5 metatiedotComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.LOMAKKEEN_METATIEDOT);
POCDMT000040Component5 templateIdComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.TEMPLATEID);
POCDMT000040Component5 maarittelyversioComponent = createComponent(KantaCDAConstants.OstopalvelunValtuutus.MAARITTELYVERSIO);
//String templateId = osva.getTemplateIds().iterator().next();
II value = of.createII();
value.setRoot(templateId);
templateIdComponent.getSection().setText(luoTextContent(templateId));
templateIdComponent.getSection().getEntries().add(luoEntryObservation(value));
maarittelyversioComponent.getSection().setText(luoTextContent(fetchProperty(maarittelyVersio)));
metatiedotComponent.getSection().getComponents().add(templateIdComponent);
metatiedotComponent.getSection().getComponents().add(maarittelyversioComponent);
return metatiedotComponent;
}
/**
* Luo potilaan tiedot lohko mitรคtรฅรคtรถitรคessรค
* @return
*/
private POCDMT000040Subject luoMitatointiPotilas() {
POCDMT000040Subject subject = of.createPOCDMT000040Subject();
subject.setTypeCode(ParticipationTargetSubject.SBJ);
subject.setRelatedSubject(of.createPOCDMT000040RelatedSubject());
subject.getRelatedSubject().setClassCode(XDocumentSubject.PAT);
subject.getRelatedSubject().setCode(of.createCE());
subject.getRelatedSubject().getCode().setCode(osva.getPotilas().getHetu());
//TODO: codelle hetu codeSystem 172.16.17.32 haettava jostain
subject.getRelatedSubject().getCode().setCodeSystem("172.16.17.32");
subject.getRelatedSubject().setSubject(of.createPOCDMT000040SubjectPerson());
subject.getRelatedSubject().getSubject().getClassCodes().add("PSN");
subject.getRelatedSubject().getSubject().getNames().add(getNames(osva.getPotilas().getNimi()));
return subject;
}
/**
* Luo merkinnรคn tekijรค tapahtuma-aika jap alveluykiskkรถ lohko
* Tyhjรคnรค vv-jรคrjestelmรคssรค
* @return
*/
private POCDMT000040Author luoMitatointiAuthor() {
POCDMT000040Author author = of.createPOCDMT000040Author();
author.getNullFlavors().add(KantaCDAConstants.NullFlavor.NA.getCode());
author.setTime(of.createTS());
author.getTime().getNullFlavors().add(KantaCDAConstants.NullFlavor.NA.getCode());
author.setAssignedAuthor(of.createPOCDMT000040AssignedAuthor());
II assignedAuthorId = of.createII();
assignedAuthorId.getNullFlavors().add(KantaCDAConstants.NullFlavor.NA.getCode());
author.getAssignedAuthor().getIds().add(assignedAuthorId);
return author;
}
/**
* Luo hoitoprosessin vaihe lohko
* @return
*/
private POCDMT000040Component5 luoMitatointiHoitoprosessinVaihe() {
POCDMT000040Component5 hoitoprosessinvaiheComponent = createComponent(MAARITTAMATON_HOITOPROSESSIN_VAIHE);
POCDMT000040Component5 muuMerkintaComponent= createComponent(MUU_MERKINTA);
StrucDocText text = of.createStrucDocText();
StrucDocParagraph paragraph1 = of.createStrucDocParagraph();
StrucDocParagraph paragraph2 = of.createStrucDocParagraph();
//TODO: Mitรคtรถinnin syy pitรคisi varmaan olla annettavissa
paragraph1.getContent().add("Asiakirja on tyhjรค, koska se on mitรคtรถity");
paragraph2.getContent().add("Mitรคtรถinnin syy: Valinta on poistettu");
text.getContent().add(of.createStrucDocTextParagraph(paragraph1));
text.getContent().add(of.createStrucDocTextParagraph(paragraph2));
muuMerkintaComponent.getSection().setText(text);
hoitoprosessinvaiheComponent.getSection().getComponents().add(muuMerkintaComponent);
return hoitoprosessinvaiheComponent;
}
/**
* Apumetodi joka luo SrtucDocText elementin jonka content elementttiin annettu teksti sijoitettaan
*
* <pre>
* {@code
* <text>
* <content>[teksti]</content>
* </text>
* }
* </pre>
* @param teksti
* @return text elementti
*/
private StrucDocText luoTextContent(String teksti) {
StrucDocText text = of.createStrucDocText();
text.getContent().add(teksti);
return text;
}
/**
* Apumetodi joka luo POCDMT000040Component5 componentin sekรค sectionin jolle code ja title elementit
* <pre>
* {@code
* <component>
* <section>
* <code code="[key.code]" codeSystem="[key.codeSystem]" codeSystemName="[key.codeSystemName]" displayName="[key.displayName]"/>
* <title>[key.displayName]</title>
* </section>
* </component>
* }
* </pre>
* @return
*/
private POCDMT000040Entry luoEntryObservation(ANY value) {
POCDMT000040Entry entry = of.createPOCDMT000040Entry();
entry.setObservation(luoObservation(value));
return entry;
}
private POCDMT000040Observation luoObservation(ANY value) {
POCDMT000040Observation observation = of.createPOCDMT000040Observation();
observation.getClassCodes().add(classCodeCOND);
observation.setMoodCode(XActMoodDocumentObservation.EVN);
//II id = of.createII();
//id.setRoot(getNextId(osva));
//observation.getIds().add(id);
CD nullFlavorCode = of.createCD();
nullFlavorCode.getNullFlavors().add(KantaCDAConstants.NullFlavor.NA.getCode());
observation.setCode(nullFlavorCode);
if (value != null) {
observation.getValues().add(value);
}
return observation;
}
private String muotoileAikavali(Date alku, Date loppu) {
if (null == alku && null == loppu) {
return "";
}
StringBuilder aikavali = new StringBuilder();
if (null != alku) {
aikavali.append(getTodayDateFormat().format(alku));
aikavali.append(" ");
}
aikavali.append("-");
if (null != loppu) {
aikavali.append(" ");
aikavali.append(getTodayDateFormat().format(loppu));
}
return aikavali.toString();
}
private final SimpleDateFormat getTekemisenAjankohtaDateFormat() {
SimpleDateFormat sdf = new SimpleDateFormat("dd.MM.yyyy HH:mm");
sdf.setTimeZone(TimeZone.getTimeZone(Kasaaja.TIME_ZONE));
return sdf;
}
}
|
<gh_stars>10-100
require "rails_helper"
RSpec.describe VisitCountsByPrisonStateDateAndTimely do
it { is_expected.to be_readonly }
end
|
import React, { useCallback, useState } from "react";
import "./App.css";
// querylab lib code
import { Dialect, allDialects, translate } from "./querylab";
// used for LZ compress query and store in query string
import LZString from "lz-string";
import { useQueryString } from "./use-query-string";
//
// code editor and syntax highlighter
//
import Editor from "react-simple-code-editor";
import { highlight, languages } from "prismjs";
import "prismjs/components/prism-sql";
import "prismjs/themes/prism-solarizedlight.css"; // change the theme here
import { format, FormatOptions } from "sql-formatter";
import {
AppBar,
Toolbar,
Select,
MenuItem,
Typography,
Table,
TableHead,
TableBody,
TableRow,
TableCell,
Link,
Paper,
TableContainer,
Button,
} from "@material-ui/core";
import { createMuiTheme, ThemeProvider } from "@material-ui/core/styles";
// decompresses to: knex('change').select('me').count();
const DEFAULT_QUERY = "NYOwpgHgFA5AxgCwIYgOZhgSgHQGcwA2YcALrALYY5wD2AriGZgNxA";
const DEFAULT_DIALECT = Dialect.mssql;
const FORMAT_OPTIONS: FormatOptions = {
uppercase: true,
language: "sql",
};
export const theme = createMuiTheme({
palette: {
primary: {
main: "#121113",
},
secondary: {
main: "#f0f0f2",
},
},
overrides: {
MuiSelect: {
icon: {
color: "#f0f0f2",
},
select: {
"&:focus": {
background: "#3a3660",
},
color: "#f0f0f2",
},
},
},
});
export default function App() {
const [dialect, setDialect] = useQueryString("dialect", DEFAULT_DIALECT);
const [query, setQuery] = useQueryString("query", DEFAULT_QUERY);
// eslint-disable-next-line
const [message, setMessage] = useState<string | null>(null);
const updateDialect = useCallback(
(value: string) => {
setDialect(value);
},
[setDialect]
);
const updateQuery = useCallback(
(value: string) => {
const compressed = LZString.compressToEncodedURIComponent(value);
setQuery(compressed);
},
[setQuery]
);
const displayQuery = LZString.decompressFromEncodedURIComponent(query) || "";
const [toQuery, sql, bindings] = translate(displayQuery, dialect);
const noop = () => {};
const copy = useCallback(
(text) => {
navigator.clipboard.writeText(text);
setMessage("Copied to clipboard!");
},
[setMessage]
);
return (
<ThemeProvider theme={theme}>
<div className="App">
{/* header */}
<AppBar position="static">
<Toolbar>
<Typography variant="h6">Knex QueryLab</Typography>
<div>
<Select
variant="outlined"
labelId="dialect-label"
value={dialect}
onChange={(e) => updateDialect(e.target.value as string)}
label="Dialect"
>
{allDialects().map((d, i) => (
<MenuItem key={i} value={d}>
{d}
</MenuItem>
))}
</Select>
</div>
</Toolbar>
</AppBar>
<section>
{/* body */}
{/* body - select dialect */}
<Typography className="subtitle" variant="h6">
Expression
</Typography>
<Button style={{ float: "right" }} onClick={() => copy(displayQuery)}>
Copy
</Button>
{/* body - input knex query*/}
<Paper className="code">
<Editor
value={displayQuery}
onValueChange={updateQuery}
highlight={(code) =>
highlight(code, languages.javascript, "javascript")
}
/>
</Paper>
<Typography className="subtitle" variant="h6">
Query
</Typography>
<Button style={{ float: "right" }} onClick={() => copy(toQuery)}>
Copy
</Button>
{/* body - output sql*/}
<Paper className="code">
<Editor
disabled={true}
value={toQuery}
onValueChange={noop}
highlight={(code) =>
highlight(format(code, FORMAT_OPTIONS), languages.sql, "sql")
}
/>
</Paper>
<Typography className="subtitle" variant="h6">
SQL - Native
</Typography>
{/* body - output native*/}
<Paper className="code">
<Editor
disabled={true}
value={sql}
onValueChange={noop}
highlight={(code) =>
highlight(format(code, FORMAT_OPTIONS), languages.sql, "sql")
}
/>
</Paper>
{/* body - output bindings*/}
<TableContainer component={Paper} className="bindings">
<Table>
<TableHead>
<TableRow>
<TableCell style={{ width: "20px" }}>Binding</TableCell>
<TableCell>Value</TableCell>
</TableRow>
</TableHead>
<TableBody>
{bindings.map((value, index) => {
return (
<TableRow key={index + 1}>
<TableCell>{index + 1}</TableCell>
<TableCell>{JSON.stringify(value)}</TableCell>
</TableRow>
);
})}
</TableBody>
</Table>
</TableContainer>
</section>
{/* footer */}
<footer>
<Typography variant="body1">
Experiment with the <Link href="https://knexjs.org">KnexJS</Link>{" "}
API to build SQL.{" "}
<Link href="https://github.com/michaelavila/knex-querylab">
View source.
</Link>
</Typography>
</footer>
</div>
</ThemeProvider>
);
}
|
<reponame>sgmap/aplus
package views
import cats.syntax.all._
import helpers.forms.CSRFInput
import helper.TwirlImports.toHtml
import models.{Authorization, User}
import org.webjars.play.WebJarsUtil
import play.api.mvc.{Flash, RequestHeader}
import play.twirl.api.Html
import scalatags.Text.all._
object franceServices {
def page(
currentUser: User,
currentUserRights: Authorization.UserRights,
)(implicit
flash: Flash,
request: RequestHeader,
webJarsUtil: WebJarsUtil,
mainInfos: MainInfos
): Html =
views.html.main(currentUser, currentUserRights, maxWidth = false)(
s"France Services"
)(Nil)(
frag(
h5(cls := "title--addline", "France Services"),
CSRFInput,
div(cls := "mdl-cell mdl-cell--12-col", id := "tabulator-france-services-table"),
div(
cls := "mdl-cell mdl-cell--3-col",
a(
id := "france-services-download-btn-csv",
href := "#",
i(cls := "fas fa-download"),
" Tรฉlรฉchargement au format CSV"
)
),
div(
cls := "mdl-cell mdl-cell--3-col",
a(
id := "france-services-download-btn-xlsx",
href := "#",
i(cls := "fas fa-download"),
" Tรฉlรฉchargement au format XLSX"
)
),
div(id := "france-services-alerts", cls := "mdl-cell mdl-cell--12-col"),
h5(cls := "title--addline", "Ajout"),
div(cls := "mdl-cell mdl-cell--12-col", id := "tabulator-france-services-add-table"),
div(
cls := "mdl-cell mdl-cell--3-col",
a(
id := "add-france-services-new-line",
href := "#",
i(cls := "fas fa-plus"),
" Ajouter une ligne vide"
)
),
div(
cls := "mdl-cell mdl-cell--3-col",
a(
id := "add-france-services-csv",
href := "#",
i(cls := "fas fa-file"),
" Ajouter un CSV"
)
),
div(
cls := "mdl-cell mdl-cell--3-col",
a(
id := "add-france-services-download-csv",
href := "#",
i(cls := "fas fa-download"),
" Tรฉlรฉcharger en CSV"
)
),
div(
cls := "mdl-cell mdl-cell--3-col",
a(
id := "add-france-services-dedup",
href := "#",
"Dรฉduplication"
)
),
div(id := "france-services-add-alerts", cls := "mdl-cell mdl-cell--12-col"),
div(
cls := "mdl-cell mdl-cell--3-col",
button(
id := "add-france-services-upload",
cls := "mdl-button mdl-button--raised",
i(cls := "fas fa-upload"),
" Envoyer les ajouts"
)
)
)
)(
views.helpers.head.publicScript("generated-js/xlsx.full.min.js")
)
}
|
const Discord = require('discord.js');
const nekoclient = require('nekos.life');
const neko = new nekoclient();
module.exports = {
name: 'neko',
description: "Nekos UwU",
aliases: ['catgirl'],
usage: '',
cooldown: 2,
args: 0,
catergory: 'Imagenes',
async execute(client, message, args) {
message.react('๐ฑ');
var superagent = require('superagent')
superagent.get('https://nekobot.xyz/api/image').query({ type: 'neko'}).end((err, response) => {
async function nekof() {
const GIF = await neko.sfw.neko();
const GUF = await neko.sfw.nekoGif();
const randomaction = [
GIF.url,
GUF.url,
response.body.message
] //Respuestas posibles
randomsg = randomaction[Math.floor(Math.random() * Math.floor(randomaction.length))];
const embed = new Discord.MessageEmbed()
.setColor('RANDOM')
.setDescription(`Nya, nyah!!! UwU`)
.setImage(randomsg)
return message.channel.send(embed);
}
nekof();
});
}
}; |
<reponame>zhoujiagen/learning-algorithms<filename>codes/compiler/dragon/IR/src/main/java/com/spike/compiler/dragon/ir/iloc/gen/ILOCBaseVisitor.java
// Generated from ILOC.g4 by ANTLR 4.10.1
package com.spike.compiler.dragon.ir.iloc.gen;
import org.antlr.v4.runtime.tree.AbstractParseTreeVisitor;
/**
* This class provides an empty implementation of {@link ILOCVisitor},
* which can be extended to create a visitor which only needs to handle a subset
* of the available methods.
*
* @param <T> The return type of the visit operation. Use {@link Void} for
* operations with no return type.
*/
public class ILOCBaseVisitor<T> extends AbstractParseTreeVisitor<T> implements ILOCVisitor<T> {
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitProgram(ILOCParser.ProgramContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitDecl(ILOCParser.DeclContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitSingleInstr(ILOCParser.SingleInstrContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitInstrList(ILOCParser.InstrListContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitComment(ILOCParser.CommentContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitRealOp(ILOCParser.RealOpContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitSources(ILOCParser.SourcesContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitTargets(ILOCParser.TargetsContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLabel(ILOCParser.LabelContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitOperand(ILOCParser.OperandContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitOpCode(ILOCParser.OpCodeContext ctx) { return visitChildren(ctx); }
} |
<reponame>orz365/http-mpcloud
const {hcloud, db} = require('./head')
const jasmine = require("jasmine");
describe("ไบๅผๅๆไฝๆฐๆฎๅบ", function () {
it("ๅขๅ ๆนๆฅ", function (done) {
db.collection('tb_test').add({
data: {
name: 'mp-cloud-http',
},
}).then(res => {
let id = res.id_list[0]
db.collection('tb_test').where({
_id: id,
}).update({
data: {
name: 'mp-cloud-http2',
},
}).then(res => {
db.collection('tb_test').where({
_id: id,
}).remove().then(() => {
db.collection('tb_test').get().then(res => {
done()
console.log(res)
})
})
})
})
});
it("่ๅๅฝๆฐ", function (done) {
let _ = db.command
let $ = db.command.aggregate
db.collection('tb_comment').aggregate().lookup({
from: 'tb_post',
let: {
post_id: '$target_id',
},
pipeline: $.pipeline()
.match(
_.expr(
$.and([
$.eq(['$_id', '$$post_id']),
]),
),
).project({
_id: 1,
content: 1,
}).done(),
as: 'postList',
}).limit(1).end().then(res => {
done()
}).catch(err => {
done.fail(err)
})
});
it('ไบๅฝๆฐ่ฐ็จ', function (done) {
hcloud.callFunction({
name: 'getComment',
}).then(res => {
done()
}).catch(err => {
done.fail(err)
})
})
it('่ทๅ้ๅไฟกๆฏ', function (done) {
hcloud.collections().get().then(res => {
console.log(res)
done()
}).catch(err => {
done.fail(err)
})
})
}); |
#!/bin/bash
echo -e "Smallest HTTP Server packaging script"
FROM_DIR=$(pwd)
echo -e "Packaging the server..."
rm -rf classes
rm -rf dist
mkdir classes
#
# No debug option (to keep it small)
# Remove the '-g:none' from the javac command to have it.
# Add -verbose if interested
#
javac -g:none -d classes -s src/main/java \
src/main/java/http/httpserver/SmallestHTTPServer.java
mkdir dist
echo "Main-Class: http.httpserver.SmallestHTTPServer" > manifest.txt
echo "Compile-date: $(date)" >> manifest.txt
cd classes
jar -cvfm ../dist/smallest.http.jar ../manifest.txt *
#
echo -e "To run the server:"
echo -e "cd ../dist"
echo -e "java [-Dhttp.port:8888] -jar smallest.http.jar --verbose:false"
#
cd ${FROM_DIR}
rm manifest.txt
echo -e "------------------------"
echo -e "Done. Jar is ready:"
echo -e "------------------------"
ls -lisah dist
echo -e "------------------------"
|
def find_max(arr):
if len(arr) == 1:
return arr[0]
else:
mid = len(arr)//2
left_max = find_max(arr[:mid])
right_max = find_max(arr[mid:])
return max(left_max, right_max)
# Call the function
print(find_max(arr)) |
#!/bin/bash
# SPDX-FileCopyrightText: 2021 "SAP SE or an SAP affiliate company and Gardener contributors"
#
# SPDX-License-Identifier: Apache-2.0
set -e
K8S_VERSION="1.21.x"
echo "> Setup Test Environment for K8s Version ${K8S_VERSION}"
CURRENT_DIR=$(dirname $0)
PROJECT_ROOT="${CURRENT_DIR}"/..
# TODO: setup-envtest currently doesnt support darwin/arm64 / force amd64
ARCH_ARG=""
if [[ $(go env GOOS) == "darwin" && $(go env GOARCH) == "arm64" ]]; then
ARCH_ARG="--arch amd64"
fi
export KUBEBUILDER_ASSETS=$(setup-envtest use -p path ${K8S_VERSION} ${ARCH_ARG})
mkdir -p ${PROJECT_ROOT}/tmp/test
rm -f ${PROJECT_ROOT}/tmp/test/bin
ln -s "${KUBEBUILDER_ASSETS}" ${PROJECT_ROOT}/tmp/test/bin
# TODO: The landscaper crd files used for testing are currently not exported via landscaper api module.
# To avoid adding the landscaper module, download the needed crd files directly.
LANDSCAPER_APIS_VERSION=$(go list -m -mod=readonly -f {{.Version}} github.com/gardener/landscaper/apis)
LANDSCAPER_CRD_URL="https://raw.githubusercontent.com/gardener/landscaper/${LANDSCAPER_APIS_VERSION}/pkg/landscaper/crdmanager/crdresources"
LANDSCAPER_CRD_DIR="${PROJECT_ROOT}/tmp/landscapercrd"
LANDSCAPER_CRDS="landscaper.gardener.cloud_installations.yaml landscaper.gardener.cloud_targets.yaml landscaper.gardener.cloud_dataobjects.yaml"
mkdir -p ${PROJECT_ROOT}/tmp/landscapercrd
for crd in $LANDSCAPER_CRDS; do
(cd ${LANDSCAPER_CRD_DIR} && curl -s -O "$LANDSCAPER_CRD_URL/$crd")
done
|
#!/bin/bash
set -e
BRANCH=${BRANCH:?'missing BRANCH env var'}
source ~/.nvm/nvm.sh
nvm use 8
make build-all-in-one-linux
export REPO=jaegertracing/all-in-one
docker build -f cmd/all-in-one/Dockerfile -t $REPO:latest cmd/all-in-one
export CID=$(docker run -d -p 16686:16686 -p 5778:5778 $REPO:latest)
make integration-test
docker kill $CID
# Only push the docker container to Docker Hub for master branch
if [[ ("$BRANCH" == "master" || $BRANCH =~ ^v[0-9]+\.[0-9]+\.[0-9]+$) && "$TRAVIS_SECURE_ENV_VARS" == "true" ]]; then
echo 'upload to Docker Hub'
else
echo 'skip docker upload for PR'
exit 0
fi
bash ./scripts/travis/upload-to-docker.sh
|
def max_group_calls(consumption: int = 0) -> int:
if consumption <= 0:
return 0 # If consumption is not provided or is non-positive, return 0
else:
return 1000 // consumption # Calculate and return the maximum number of group calls |
import random
def randomElement(list):
return random.choice(list)
print(randomElement([1,2,3,4,5])) |
<reponame>zhaohainan666/Adlik
#!/usr/bin/env python3
import os
import shutil
import subprocess
import sys
def main(args):
subprocess.check_call(['bazel', 'build', *args, '//adlik_serving'])
os.makedirs('__adlik_serving', exist_ok=True)
shutil.copy2('bazel-bin/adlik_serving/adlik_serving', '__adlik_serving/adlik_serving')
if __name__ == "__main__":
main(sys.argv[1:])
|
<filename>lib/resources/libvirt/libvirt_domain.rb
class Libvirtdomain < Inspec.resource(1)
name 'libvirt_domain'
desc 'Verifies settings for a Libvirt domain.'
supports platform: 'libvirt'
example <<-RUBY
describe libvirt_domain(name:'ubuntu-16.04') do
it { should exist }
it { should be_active }
it { should be_persistent }
its(:cpus) { should eq(2) }
its(:memory) { should eq(2048) }
its(:arch) { should eq('x86_64') }
its(:networks) { should include('default') }
end
RUBY
include LibvirtBase
def active?
domain.active
end
def persistent?
domain.persistent
end
def arch
domain.arch
end
def autostarted?
domain.autostart
end
def cpus
domain.cpus
end
def exists?
domain && domain.is_a?(Fog::Compute::Libvirt::Server)
end
def memory
domain.memory_size / 1024
end
def name
domain.name
end
def networks
domain.nics.map(&:network)
end
def persistent?
domain.persistent
end
def state
domain.state
end
private
def domain
@domain ||= domain_for(@options[:name], @options[:pool])
end
def domain_for(name, pool)
client.servers.all(name: name, pool_name: pool).first
end
end
|
<gh_stars>1-10
#include "mainwindow.h"
#include "ui_mainwindow.h"
#include "InstrumentPanel.h"
#include "DevicePanel.h"
// ------------------------------------------------------------------------------------------------
MainWindow::MainWindow(QWidget *parent)
: QMainWindow(parent)
, ui(new Ui::MainWindow)
{
ui->setupUi(this);
QList<QWidget*> widgets;
auto *devicePanel = new DevicePanel();
ui->tabWidget->addTab(devicePanel, tr("Devices"));
auto *instrumentPanel = new InstrumentPanel();
widgets.append(instrumentPanel);
ui->tabWidget->addTab(instrumentPanel, tr("Instruments"));
for (QWidget *& widget: widgets)
{
connect(devicePanel, SIGNAL(inputChanged(MIDIInput*)), widget, SLOT(setInputDevice(MIDIInput*)));
connect(devicePanel, SIGNAL(outputChanged(MIDIOutput*)), widget, SLOT(setOutputDevice(MIDIOutput*)));
}
}
// ------------------------------------------------------------------------------------------------
MainWindow::~MainWindow()
{
delete ui;
}
|
import asyncio
import discord
import logging
import os
import random
import time
import traceback
from discord.ext import tasks
import army_date_calculate as army
import spread_sheet_reader as excel
import bang_dream_score_caluclator as bangdream
import mltd_border as border
TOKEN = os.environ["TOKEN"]
client = discord.Client()
cool_time = 1800 # ๊ธฐ๋ณธ๊ฐ 30๋ถ ๋จ์
msg = '' # ๋ด์ด ๊ฐ์ฅ ๋ง์ง๋ง์ผ๋ก ๋ณด๋ธ ๋ณด๋ ๋ฉ์ธ์ง embed
reaction_message_id = int # ๋ด์ด ๊ฐ์ฅ ๋ง์ง๋ง์ผ๋ก ๋ณด๋ ์ด๋ฒคํธ ์ ์ก์ ํ ๋ฉ์ธ์ง ID
channel = int # ๋ณด๋ ์ด๋ฒคํธ ์ ์ก ์ฒด๋ ๋ฆฌ์คํธ
class MyClient(discord.Client):
async def on_ready(self):
# ๊ฑด์ค๋ก๋ด ์ค๋น์๋ฃ
print('{0}์ค๋น ์๋ฃ!'.format(self.user))
# ๋ฐ๋ฆฌ์ํ ๋ณด๋ ๋ฐ๋ณต ์์
self.runtime_get_mili_border.start()
# ๋ฐ๋ฆฌ์ํ ๋ณด๋๋ฅผ ์ ์ ๊ฐ ์ ํ ์ฃผ๊ธฐ๋งํผ ์์ง์
๋๋ค.
@tasks.loop()
async def runtime_get_mili_border(self):
global msg, reaction_message_id
# ํ์ฌ ์๊ฐ์ ์ด ๋จ์๋ก ํ์ฐํ ํ cool_time ์ ๊ฐ์ ์๋ ค์ค๋๋ค. ๋ด์ ๊ณผ๋ถํ๋ฅผ ๋ง๊ธฐ ์ํด XX๋ถ 00์ด๋ก ์๋ ค์ฃผ๋๊ฑด ์๋ตํฉ๋๋ค.
await asyncio.sleep(60)
if (int(time.time()) + 32400) % cool_time < 60: # UTC ๊ธฐ์ค์ด๋ฏ๋ก UTC+9๋ก ํ์ฐํด์ค๋๋ค, XX๋ถ 59์ด๊น์ง๋ ๋ณด๋ผ ์ ์์ต๋๋ค.
embed_border = border.get_embed()
if embed_border == -1:
return
else:
event_send = client.get_channel(channel)
# ๋ณด๋ผ ์ฒด๋์ด ์์ผ๋ฉด return ํฉ๋๋ค.
if not event_send:
return
msg = await event_send.send(embed=embed_border)
reaction_message_id = msg.id
reactions = ['1๏ธโฃ', '2๏ธโฃ', '3๏ธโฃ', '4๏ธโฃ']
for emoji in reactions:
await msg.add_reaction(emoji)
# ๋๊ตฐ๊ฐ๊ฐ ์ด๋ชจ์ง์ ๋ฐ์์ ํด ์คฌ์ ๋ ์ก์
์ ์ทจํฉ๋๋ค.
async def on_raw_reaction_add(self, payload):
global cool_time
# ๋ฐ์ํด์ค ์ ์ ๊ฐ ๋ด์ด ์๋๋ ๊ทธ๋ฆฌ๊ณ ๋ฐ์์ ์ถ๊ฐํ ๋ฉ์ธ์ง๊ฐ ๊ฐ์ฅ ์ต์ ์ผ๋ก ๋ด์ด ๋ณด๋ธ ๋ฉ์ธ์ง ์ผ๋ ๋ค์๊ณผ ๊ฐ์ด ํ๋ํฉ๋๋ค.
if payload.message_id == reaction_message_id and payload.user_id != self.user.id:
if payload.emoji.name in '1๏ธโฃ':
await msg.reply("์
๋ฐ์ดํธ ์ฃผ๊ธฐ๊ฐ 30๋ถ์ผ๋ก ๋ณ๊ฒฝ๋์์ด์!")
cool_time = 1800
elif payload.emoji.name in '2๏ธโฃ':
await msg.reply("์
๋ฐ์ดํธ ์ฃผ๊ธฐ๊ฐ 1์๊ฐ์ผ๋ก ๋ณ๊ฒฝ๋์์ด์!")
cool_time = 3600
elif payload.emoji.name in '3๏ธโฃ':
await msg.reply("์
๋ฐ์ดํธ ์ฃผ๊ธฐ๊ฐ 12์๊ฐ์ผ๋ก ๋ณ๊ฒฝ๋์์ด์!")
cool_time = 43200
elif payload.emoji.name in '4๏ธโฃ':
await msg.reply("์
๋ฐ์ดํธ ์ฃผ๊ธฐ๊ฐ 24์๊ฐ์ผ๋ก ๋ณ๊ฒฝ๋์์ด์!")
cool_time = 86400
else:
return
self.runtime_get_mili_border.restart()
async def on_message(self, message):
# DM์์ ๋ฌด์จ ๋ฉ์ธ์ง๋ฅผ ๋ณด๋๋๋ฐ log๋ก ํ์ธํฉ๋๋ค.
print("{0.author}์๊ฒ์ ์จ DM : {0.content}".format(message))
async def on_message(self, message):
# ๋ฉ์ธ์ง๋ฅผ ๋ณด๋ธ ์ ์ ๊ฐ ๋ด์ด ์๋ ์ ์ ์ธ์ง ํ์ธํด์ค๋๋ค.
if message.author.id == self.user.id:
return
# ์ธ์ฌ๋ฅผ ๊ฑด๋
๋๋ค.
if message.content.startswith('-์๋
'):
await message.reply('์๋
ํ์ธ์!!', mention_author=True)
# ์ ์์์ ๋จ์ ๊ตฐ์ํ์ ์๋ ค์ค๋๋ค.
if message.content.startswith('-๋ณต๋ฌด์ผ'):
date = army.remain_days()
date_percent = army.remain_days_percent()
await message.channel.send("์น๋น๋์ ๋จ์ ์ ์ญ์ผ ์๋ " + date + "์ผ์ด๋ฉฐ ํ์ฌ๊น์ง " + date_percent + "%๋งํผ ํ์ด์!")
# ๋ก๋ ๋ฒํธ๋ฅผ ๋ฝ์์ค๋๋ค. ๋์ฒจ๋๋ ์ฑ
์์ ์ง์ง ์์ต๋๋ค...
if message.content.startswith('-๋ก๋'):
# ๋ก๋ ๋ฒํธ 1~45๊น์ง์ ์ซ์๋ฅผ ๋ฝ์์ค๋๋ค.
possible_number_list = random.sample(range(1, 45), 6)
# ๋ก๋ ๋ฒํธ๋ฅผ ๋ณด๊ธฐ ํธํ๊ฒ ์ซ์ ํฌ๊ธฐ ์์ผ๋ก ์ ๋ ฌํด์ค๋๋ค.
possible_number_list = sorted(possible_number_list)
# ๋ก๋ ๋ฒํธ๋ฅผ ์ถ๋ ฅํ๊ธฐ ์ํด ๋ฌธ์์ด๋ก ๋ฐ๊พธ๊ณ ์ถ๋ ฅํ๊ธฐ ํธํ๊ฒ ๋ฐ๊ฟ์ค๋๋ค.
number_list = ", ".join([str(numb) for numb in possible_number_list])
await message.channel.send("์ด๋ฒ ๋ก๋ ์ถ์ฒจ๋ฒํธ๋ `" + number_list + "` ๊ฐ ์ข์ ๊ฒ ๊ฐ์์!")
# 0~10 ์ค์์์ ์ซ์๋ฅผ ๋ฝ์์ค๋๋ค.
if message.content.startswith('-์ถ์ฒจ'):
# ๋์ค์ฝ๋ ์ด๋ชจ์ง์ธ ์ซ์๋ค์ ๋ฆฌ์คํธ
possible_numb = [":zero:", ":one:", ":two:", ":three:", ":four:", ":five:", ":six:", ":seven:", ":eight:",
":nine:", ":keycap_ten:"]
# ์ซ์๋ค์ ๋ฆฌ์คํธ์์ ํ๋๋ฅผ ๋ฝ์์ค๋๋ค.
picked = random.choice(possible_numb)
await message.channel.send("์ซ์ " + picked + "๊ฐ ๋์์ด์!")
if message.content.startswith('-์ ์ '):
nickname = message.content[4:]
nickname = nickname.replace(" ", "%20")
opggwebsite = 'https://www.op.gg/summoner/userName='
await message.channel.send(opggwebsite + nickname)
# ์ ํ์ฅ์ ๋ฅผ ์ํด ์ ํ์ ๋์ ํด์ค๋๋ค.
if message.content.startswith('-์ ํ'):
# ๋ฉ์ธ์ง๋ฅผ ๋ถ๋ฌ์ต๋๋ค
choice_msg = message.content
# .split()์ ์ํด "-์ ํ" ์ด๋ผ๋ ๋จ์ด๋ฅผ ์ ๊ฑฐํฉ๋๋ค.
wanted_choice = choice_msg[3:]
# ์ ํ์ง๋ฅผ ๋๋ ๋ฆฌ์คํธํ์ํต๋๋ค.
select_choose = wanted_choice.split()
# ๋ฆฌ์คํธํ ๋ ์ ํ์ง์์ ํ๋๋ฅผ ๊ณจ๋ผ์ค๋๋ค.
choose = random.choice(select_choose)
await message.channel.send(choose + "(์ด)๊ฐ ์ข์ ๊ฒ ๊ฐ์์!")
# ๊ตฌ๊ธ ์คํ๋ ๋์ํธ์ ์ ์ฅํด๋ ๋ฆฌ๋ฌ๊ฒ์์ ์ต๊ณ ๊ธฐ๋ก๋ค์ ๊ฐ์ ธ์ต๋๋ค. ๊ฐ์ธ์ ์ผ๋ก ์ฌ์ฉํ๋ ๊ธฐ๋ฅ์
๋๋ค.
if message.content.startswith('-์์
'):
try:
# ๊ตฌ๊ธ ์คํ๋ ๋์ํธ์์ ์คํ๋ ๋์ํธ ํ์ผ์ ๊ฐ์ ธ์ต๋๋ค.
excel.sync_spread()
# ๋ฉ์ธ์ง๋ฅผ ๋ณด๋ด๋ ์ฌ๋์ด ์ํ๋ ๊ฒ์ ์ด๋ฆ๊ณผ ๊ณก์ ๊ฐ์ ธ์ต๋๋ค. ์ด ๋ ๊ณก์ ์ ๋ชฉ์ ์น๊ธฐ์ ๊ณต์์ ์ผ๋ก ๊ท์ ๋ ํ๊ตญ์ด ๋ฒ์ญ์ด ์๊ธฐ์ ์ซ์๋ก ๋์ฒดํฉ๋๋ค.
get_game_title = message.content.split()
# ์ฐพ๊ณ ์ ํ๋ ๋ฆฌ๋ฌ ๊ฒ์ ์ ๋ชฉ์ ๋ถ๋ฌ์ต๋๋ค.
gametitle = get_game_title[1]
# ์ฐพ๊ณ ์ ํ๋ ๋ฆฌ๋ฌ ๊ฒ์ ๊ณก์ ๋ถ๋ฌ์ต๋๋ค.
gamesong = get_game_title[2]
# ์ ์ฅํด๋ ๋ฆฌ๋ฌ๊ฒ์๊ณผ ๊ทธ ๊ฒ์ ๋ด๋ถ์ ๊ณก์ ๊ฐ์ ธ์ค๋ ์ญํ์ ํฉ๋๋ค.
imform = excel.spread_information(gametitle, gamesong)
# embed๋ก ๋ฉ์ธ์ง ์ถ๋ ฅํฉ๋๋ค.
await message.reply(embed=imform)
except Exception:
# ๋ฌธ์ ๊ฐ ์์ผ๋ฉด ๋์ค์ฝ๋ ์ฑํ
์ ๋ก๊ทธ๋ฅผ ์ถ๋ ฅํฉ๋๋ค. ๋ง์ฝ ์๋ฌ๊ฐ ๋์จ๋ค๋ฉด ๊ผญ ์๋ ค์ฃผ์ธ์.
await message.reply("๋ฌธ์ ๊ฐ ๋ฐ์ํ์ด์!")
await message.reply(traceback.format_exc())
# ๋ฑ
๋๋ฆผ! ๊ฑธ์ฆ ๋ฐด๋ ํํฐ์ ์์ ์ค์ฝ์ด ๊ณ์ฐ๊ธฐ์
๋๋ค.
if message.content.startswith('-๋ฐฉ๋๋ฆฌ'):
try:
# ์ ์ ์ ์ ๋ณด๋ฅผ ๊ฐ์ ธ์ต๋๋ค. ๊ฐ๊ฐ ๊ณก์ ๋์ด๋, ๋
ธํธ ๊ฐ์, ๋ฐด๋ ์ข
ํฉ๋ ฅ.
get_information = message.content.split()
song_difficulty = int(get_information[1])
song_notes = int(get_information[2])
total_power = int(get_information[3])
# ๋
ธํธ๋น ์ค์ฝ์ด๋ฅผ ๊ฐ์ ธ์ต๋๋ค.
notes_score = bangdream.note_score_calculate(song_difficulty, song_notes, total_power)
# ์ ์ฒด ๊ณก์ ์ค์ฝ์ด๋ฅผ ์์ํด์ ๊ฐ์ ธ์ต๋๋ค.
total_score = bangdream.total_score_calculate(notes_score, song_notes)
await message.reply("์ ์๊ฐ ๊ทธ ๊ณก์์๋ " + str(total_score) + " ์ ๋ ๋์ฌ ๊ฒ ๊ฐ์์!")
except IndexError as e:
# ๋๋ฌด ๋์ ์ซ์๋ฅผ ์ฃผ๊ฑฐ๋ ๋ฌด์ธ๊ฐ๊ฐ ํ๋ฆฌ๋ฉด ์๋ฌ ๋ฉ์ธ์ง๊ฐ ๋์ต๋๋ค.
await message.reply("๋ฌด์ธ๊ฐ๊ฐ ์ ๋ชป ์ฃผ์ ๊ฒ ๊ฐ์์! -๋ฐฉ๋๋ฆฌ [๋์ด๋] [๋
ธํธ ๊ฐฏ์] [์ข
ํฉ๋ ฅ]์์ผ๋ก ๋ฃ์ด์ฃผ์ธ์!")
except TypeError as e:
# ์ ์๊ฐ ์๋ ๋ฌธ์์ด์ ์ฃผ๋ฉด ์๋ฌ ๋ฉ์ธ์ง๊ฐ ๋์ต๋๋ค. ์ถ๊ฐ๋ก ์๋ฌ ๋ก๊ทธ๊น์ง ๊ฐ์ด ๋์ต๋๋ค.
await message.reply("๊ธ์๋ง๊ณ ์ซ์๋ฅผ ๋ฃ์ด์ฃผ์ธ์!")
await message.reply(traceback.format_exc())
# ๋ฐ๋ฆฌ์ํ ์ด๋ฒคํธ ๋ณด๋๋ฅผ ์ค์๊ฐ์ผ๋ก ์ ์กํ ์ฒด๋์ ์ธํ
ํฉ๋๋ค.
if message.content.startswith('-๋ณด๋์ฒด๋์ค์ '):
global channel
event_border_channel = int(message.content.replace('-๋ณด๋์ฒด๋์ค์ ', ''))
channel = event_border_channel
event_border_channel = client.get_channel(event_border_channel)
await event_border_channel.send('์ด์ ์ด๊ณณ์ ์ด๋ฒคํธ ๋ณด๋๋ฅผ ๋ฐ์์!')
# ๋์ค์ฝ๋ ๋ด์์ ์ฌ์ฉํ ์ ์๋ ๊ธฐ๋ฅ์ ์๊ฐํด์ค๋๋ค.
if message.content.startswith('-์ค๋ช
'):
await message.channel.send(
"์๋
ํ์ธ์! ์น๋น๋์ ์ธ๊ณต๋น์ ํ์ฝ์ํค ์ธ๋ฆฌ์นด์์! \n\nํ์ฌ ์ฌ์ฉํ ์ ์๋ ๊ธฐ๋ฅ์ผ๋ก๋\n```fix\n-๋ณต๋ฌด์ผ : ๊ฐ๋ฐ์์ ๋จ์ ๋ณต๋ฌด์ผ์๋ฅผ ์๋ ค์ฃผ๋ ๊ธฐ๋ฅ์ด์์!.\n"
"-๋ก๋ : ๋ก๋ ๋ฒํธ๋ฅผ 6๊ฐ ์ ํํด๋๋ ค์! ๋ก๋ ๋ฒํธ๋ฅผ ์ถ์ฒจํด์ ๋ฝ์ผ๋ฉด ๋ฝํ ๋ ๊ธฐ๋ถ์ด ๋ ์ข์์ง๋ ํจ๊ณผ๊ฐ ์๋ค๊ณ ํด์!\n"
"-์ถ์ฒจ : ๋ง์์ ๋๋ ์ซ์ 0 ์์ 10๊น์ง ๋ฌด์์๋ก ๊ณจ๋ผ๋๋ ค์!.\n"
"-์ ํ [์ ํ์ง1, ์ ํ์ง2, ...] : ์ ํ์ ์ฝ๊ฒ ๋ชปํ๋ ์ฌ๋ฌ๋ถ์ ์ํด ์ ๊ฐ ์ ํํด์! ๊ทธ๋ฐ๋ฐ ์ ๋ ์ ํ์ง์์ ์ข์ ๊ฒ์ ๊ณ ๋ฅด๊ณ ์๋๊ฑธ๊น์...?\n"
"-์์
[๊ฒ์ ์ด๋ฆ] [์์
์ด ๋ฒํธ] : ๊ฐ๋ฐ์์ ์ฌ๋ฌ๊ฐ์ง ์ง์
(?)์์ ๊ฐ ๊ณก์์ ์ ์ผ ์ข์๋ ๊ธฐ๋ก์ ์๋ ค์ฃผ์ด์! ์์ง ์์
์ ์์ฑ์ด ๋์ง ์์์ ๋น์ด์๋ ๊ณก๋ค์ด ๋ง์๊ฑฐ์์!\n"
"-๋ฐฉ๋๋ฆฌ [๊ณก ๋์ด๋] [๋
ธํธ ๊ฐ์] [๋ฐด๋ ์ข
ํฉ๋ ฅ(์์ด๋ฆฌ์ด ์์ดํ
ํฌํจ)] : ๋ฑ
๋๋ฆผ์ด๋ผ๋ ๊ฒ์์์ ๋์ค๋ ์ ์๋ฅผ ๋์ถฉ์ด๋๋ง ์์์ ํด์ฃผ์ด์! ์ด ๊ฐ๋ฐ์๋ ์ด๋ฐ ๊ณณ์์ ์์ด๋ ๊ด์ฐฎ์๊ฑธ๊น์?\n"
"-๋ฐ๋ฆฌ์ํ๋ณด๋ : ํ์ฌ ๋ฐ๋ฆฌ์ธ๋ผ์ด๋ธ ์์ดํฐ๋ฐ์ด์ฆ ์ผ๋ณธ ์๋ฒ์ ์ด๋ฒคํธ ํฌ์ธํธ ์์๋ฅผ ์๋ ค๋๋ ค์!```\n๊ฐ ์์ด์!")
# ๊ธฐ๋ณธ์ ์ธ ์ ๋ณด๋ค์ ๋ก๊ทธ์ ์ถ๋ ฅํด์ค๋๋ค.
logging.basicConfig(level=logging.INFO)
client = MyClient()
client.run(TOKEN)
|
<gh_stars>0
package edu.fiuba.algo3.modelo;
import java.util.ArrayList;
import java.util.List;
public class Dibujo {
private List<Tramo> tramos = new ArrayList<>();
public Dibujo() {}
public void crearTramo(Posicion inicio, Posicion fin) throws TramoInvalidoPosicionDeInicioYFinIgualesException {
try {
Tramo tramo = new Tramo(inicio, fin);
if (!this.contieneTramo(tramo)) {
this.tramos.add(tramo);
}
} catch (TramoInvalidoPosicionDeInicioYFinIgualesException e) {
throw new TramoInvalidoPosicionDeInicioYFinIgualesException();
}
}
public List<Tramo> obtenerTramos() {
return this.tramos;
}
public boolean contieneTramo(Tramo tramo) {
return (this.tramos.stream().anyMatch(unTramo -> unTramo.equals(tramo)));
}
} |
<gh_stars>1-10
/*
* Copyright (c) Open Source Strategies, Inc.
*
* Opentaps is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Opentaps is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Opentaps. If not, see <http://www.gnu.org/licenses/>.
*/
package org.opentaps.gwt.common.client.lookup.configuration;
import java.util.Arrays;
import java.util.List;
/**
* Defines the interface between the server and client for the TaxAuthorityLookupService
* Technically not a java interface, but it defines all the constants needed on both sides
* which makes the code more robust.
*/
public abstract class TaxAuthorityLookupConfiguration {
private TaxAuthorityLookupConfiguration() { }
public static final String URL_SUGGEST = "gwtSuggestTaxAuth";
public static final String OUT_GEO_ID = "geoId";
public static final String OUT_GEO_NAME = "geoName";
public static final String OUT_TAX_NAME = "groupName";
public static final String OUT_TAX_ID = "taxAuthPartyId";
public static final List<String> LIST_OUT_FIELDS = Arrays.asList(
OUT_TAX_NAME,
OUT_TAX_ID
);
public static final List<String> LIST_LOOKUP_FIELDS = Arrays.asList(
OUT_GEO_ID,
OUT_GEO_NAME,
OUT_TAX_NAME,
OUT_TAX_ID
);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.